Merge branch 'master' into index-lifecycle

This commit is contained in:
Colin Goodheart-Smithe 2018-11-02 10:56:35 +00:00
commit fc6e1f7f3f
No known key found for this signature in database
GPG Key ID: F975E7BDD739B3C7
45 changed files with 273 additions and 267 deletions

View File

@ -229,34 +229,6 @@ Pass arbitrary jvm arguments.
./gradlew test -Dtests.jvm.argline="-Djava.security.debug=access,failure"
------------------------------
== Backwards Compatibility Tests
Running backwards compatibility tests is disabled by default since it
requires a release version of elasticsearch to be present on the test system.
To run backwards compatibility tests untar or unzip a release and run the tests
with the following command:
---------------------------------------------------------------------------
./gradlew test -Dtests.filter="@backwards" -Dtests.bwc.version=x.y.z -Dtests.bwc.path=/path/to/elasticsearch -Dtests.security.manager=false
---------------------------------------------------------------------------
Note that backwards tests must be run with security manager disabled.
If the elasticsearch release is placed under `./backwards/elasticsearch-x.y.z` the path
can be omitted:
---------------------------------------------------------------------------
./gradlew test -Dtests.filter="@backwards" -Dtests.bwc.version=x.y.z -Dtests.security.manager=false
---------------------------------------------------------------------------
To setup the bwc test environment execute the following steps (provided you are
already in your elasticsearch clone):
---------------------------------------------------------------------------
$ mkdir backwards && cd backwards
$ curl -O https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.2.1.tar.gz
$ tar -xzf elasticsearch-1.2.1.tar.gz
---------------------------------------------------------------------------
== Running verification tasks
To run all verification tasks, including static checks, unit tests, and integration tests:
@ -554,25 +526,28 @@ environment variable.
== Testing backwards compatibility
Backwards compatibility tests exist to test upgrading from each supported version
to the current version. To run all backcompat tests use:
to the current version. To run them all use:
-------------------------------------------------
./gradlew bwcTest
-------------------------------------------------
A specific version can be tested as well. For example, to test backcompat with
A specific version can be tested as well. For example, to test bwc with
version 5.3.2 run:
-------------------------------------------------
./gradlew v5.3.2#bwcTest
-------------------------------------------------
When running `./gradlew check`, some minimal backcompat checks are run. Which version
is tested depends on the branch. On master, this will test against the current
stable branch. On the stable branch, it will test against the latest release
branch. Finally, on a release branch, it will test against the most recent release.
Tests are ran for versions that are not yet released but with which the current version will be compatible with.
These are automatically checked out and built from source.
See link:./buildSrc/src/main/java/org/elasticsearch/gradle/VersionCollection.java[VersionCollection]
and link:./distribution/bwc/build.gradle[distribution/bwc/build.gradle]
for more information.
=== BWC Testing against a specific remote/branch
When running `./gradlew check`, minimal bwc checks are also run against compatible versions that are not yet released.
==== BWC Testing against a specific remote/branch
Sometimes a backward compatibility change spans two versions. A common case is a new functionality
that needs a BWC bridge in an unreleased versioned of a release branch (for example, 5.x).
@ -597,7 +572,7 @@ will contain your change.
. Push both branches to your remote repository.
. Run the tests with `./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x`.
== Skip fetching latest
==== Skip fetching latest
For some BWC testing scenarios, you want to use the local clone of the
repository without fetching latest. For these use cases, you can set the system

View File

@ -39,7 +39,7 @@ if (properties.get("org.elasticsearch.acceptScanTOS", "false") == "true") {
// common maven publishing configuration
subprojects {
group = 'org.elasticsearch'
version = VersionProperties.elasticsearch.toString()
version = VersionProperties.elasticsearch
description = "Elasticsearch subproject ${project.path}"
}
@ -282,7 +282,7 @@ subprojects {
// other packages (e.g org.elasticsearch.client) will point to server rather than
// their own artifacts.
if (project.plugins.hasPlugin(BuildPlugin) || project.plugins.hasPlugin(PluginBuildPlugin)) {
String artifactsHost = VersionProperties.elasticsearch.isSnapshot() ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
String artifactsHost = VersionProperties.elasticsearch.endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
Closure sortClosure = { a, b -> b.group <=> a.group }
Closure depJavadocClosure = { shadowed, dep ->
if (dep.group == null || false == dep.group.startsWith('org.elasticsearch')) {

View File

@ -41,46 +41,29 @@ if (project == rootProject) {
* Propagating version.properties to the rest of the build *
*****************************************************************************/
Properties props = new Properties()
props.load(project.file('version.properties').newDataInputStream())
version = props.getProperty('elasticsearch')
boolean snapshot = "true".equals(System.getProperty("build.snapshot", "true"));
if (snapshot) {
// we update the version property to reflect if we are building a snapshot or a release build
// we write this back out below to load it in the Build.java which will be shown in rest main action
// to indicate this being a snapshot build or a release build.
version += "-SNAPSHOT"
props.put("elasticsearch", version);
}
File tempPropertiesFile = new File(project.buildDir, "version.properties")
task writeVersionProperties {
inputs.properties(props)
outputs.file(tempPropertiesFile)
// we update the version property to reflect if we are building a snapshot or a release build
// we write this back out below to load it in the Build.java which will be shown in rest main action
// to indicate this being a snapshot build or a release build.
Properties props = VersionPropertiesLoader.loadBuildSrcVersion(project.file('version.properties'))
version = props.getProperty("elasticsearch")
processResources {
doLast {
OutputStream stream = Files.newOutputStream(tempPropertiesFile.toPath());
Writer writer = file("$destinationDir/version.properties").newWriter()
try {
props.store(stream, "UTF-8");
props.store(writer, "Generated version properties")
} finally {
stream.close();
writer.close()
}
}
}
processResources {
dependsOn writeVersionProperties
from tempPropertiesFile
}
if (JavaVersion.current() < JavaVersion.VERSION_1_10) {
throw new GradleException('At least Java 10 is required to build elasticsearch gradle tools')
}
/*****************************************************************************
* Java version *
*****************************************************************************/
if (JavaVersion.current() < JavaVersion.VERSION_1_10) {
throw new GradleException('At least Java 10 is required to build elasticsearch gradle tools')
}
// Gradle 4.10 does not support setting this to 11 yet
targetCompatibility = "10"
sourceCompatibility = "10"
@ -232,3 +215,42 @@ if (project != rootProject) {
generatePomFileForPluginMavenPublication.enabled = false
}
}
// Define this here because we need it early.
class VersionPropertiesLoader {
static Properties loadBuildSrcVersion(File input) throws IOException {
Properties props = new Properties();
InputStream is = new FileInputStream(input)
try {
props.load(is)
} finally {
is.close()
}
loadBuildSrcVersion(props, System.getProperties())
return props
}
protected static void loadBuildSrcVersion(Properties loadedProps, Properties systemProperties) {
String elasticsearch = loadedProps.getProperty("elasticsearch")
if (elasticsearch == null) {
throw new IllegalStateException("Elasticsearch version is missing from properties.")
}
if (elasticsearch.matches("[0-9]+\\.[0-9]+\\.[0-9]+") == false) {
throw new IllegalStateException(
"Expected elasticsearch version to be numbers only of the form X.Y.Z but it was: " +
elasticsearch
)
}
String qualifier = systemProperties.getProperty("build.version_qualifier", "alpha1");
if (qualifier.isEmpty() == false) {
if (qualifier.matches("(alpha|beta|rc)\\d+") == false) {
throw new IllegalStateException("Invalid qualifier: " + qualifier)
}
elasticsearch += "-" + qualifier
}
if ("true".equals(systemProperties.getProperty("build.snapshot", "true"))) {
elasticsearch += "-SNAPSHOT"
}
loadedProps.put("elasticsearch", elasticsearch)
}
}

View File

@ -696,18 +696,12 @@ class BuildPlugin implements Plugin<Project> {
jarTask.destinationDir = new File(project.buildDir, 'distributions')
// fixup the jar manifest
jarTask.doFirst {
final Version versionWithoutSnapshot = new Version(
VersionProperties.elasticsearch.major,
VersionProperties.elasticsearch.minor,
VersionProperties.elasticsearch.revision,
VersionProperties.elasticsearch.suffix,
false)
// this doFirst is added before the info plugin, therefore it will run
// after the doFirst added by the info plugin, and we can override attributes
jarTask.manifest.attributes(
'X-Compile-Elasticsearch-Version': versionWithoutSnapshot,
'X-Compile-Elasticsearch-Version': VersionProperties.elasticsearch.replace("-SNAPSHOT", ""),
'X-Compile-Lucene-Version': VersionProperties.lucene,
'X-Compile-Elasticsearch-Snapshot': VersionProperties.elasticsearch.isSnapshot(),
'X-Compile-Elasticsearch-Snapshot': VersionProperties.isElasticsearchSnapshot(),
'Build-Date': ZonedDateTime.now(ZoneOffset.UTC),
'Build-Java-Version': project.compilerJavaVersion)
if (jarTask.manifest.attributes.containsKey('Change') == false) {

View File

@ -42,7 +42,7 @@ public class DocsTestPlugin extends RestTestPlugin {
* to the version being built for testing but needs to resolve to
* the last released version for docs. */
'\\{version\\}':
VersionProperties.elasticsearch.toString().replace('-SNAPSHOT', ''),
VersionProperties.elasticsearch.replace('-SNAPSHOT', ''),
'\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''),
'\\{build_flavor\\}' :
project.integTestCluster.distribution.startsWith('oss-') ? 'oss' : 'default',

View File

@ -98,7 +98,7 @@ public class PluginBuildPlugin extends BuildPlugin {
project.pluginProperties.extension.name + "-client"
)
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask ->
generatePOMTask.ext.pomFileName = "${project.archivesBaseName}-client-${project.version}.pom"
generatePOMTask.ext.pomFileName = "${project.archivesBaseName}-client-${project.versions.elasticsearch}.pom"
}
} else {
project.plugins.withType(MavenPublishPlugin).whenPluginAdded {

View File

@ -76,7 +76,7 @@ class PluginPropertiesTask extends Copy {
'name': extension.name,
'description': extension.description,
'version': stringSnap(extension.version),
'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch.toString()),
'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch),
'javaVersion': project.targetCompatibility as String,
'classname': extension.classname,
'extendedPlugins': extension.extendedPlugins.join(','),

View File

@ -22,6 +22,7 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin
import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
import org.elasticsearch.gradle.VersionProperties
import org.gradle.api.JavaVersion
import org.gradle.api.Project
import org.gradle.api.Task
@ -220,7 +221,7 @@ class PrecommitTasks {
private static Task configureLoggerUsage(Project project) {
project.configurations.create('loggerUsagePlugin')
project.dependencies.add('loggerUsagePlugin',
"org.elasticsearch.test:logger-usage:${org.elasticsearch.gradle.VersionProperties.elasticsearch}")
"org.elasticsearch.test:logger-usage:${VersionProperties.elasticsearch}")
return project.tasks.create('loggerUsageCheck', LoggerUsageTask.class) {
classpath = project.configurations.loggerUsagePlugin
javaHome = project.runtimeJavaHome

View File

@ -101,7 +101,7 @@ class ClusterFormationTasks {
// from here on everything else works the same as if it's the current version, we fetch the BWC version
// from mirrors using gradles built-in mechanism etc.
configureDistributionDependency(project, config.distribution, bwcDistro, config.bwcVersion)
configureDistributionDependency(project, config.distribution, bwcDistro, config.bwcVersion.toString())
for (Map.Entry<String, Object> entry : config.plugins.entrySet()) {
configureBwcPluginDependency(project, entry.getValue(), bwcPlugins, config.bwcVersion)
}
@ -112,9 +112,12 @@ class ClusterFormationTasks {
// we start N nodes and out of these N nodes there might be M bwc nodes.
// for each of those nodes we might have a different configuration
final Configuration distro
final Version elasticsearchVersion
final String elasticsearchVersion
if (i < config.numBwcNodes) {
elasticsearchVersion = config.bwcVersion
elasticsearchVersion = config.bwcVersion.toString()
if (project.bwcVersions.unreleased.contains(config.bwcVersion)) {
elasticsearchVersion += "-SNAPSHOT"
}
distro = bwcDistro
} else {
elasticsearchVersion = VersionProperties.elasticsearch
@ -156,8 +159,10 @@ class ClusterFormationTasks {
}
/** Adds a dependency on the given distribution */
static void configureDistributionDependency(Project project, String distro, Configuration configuration, Version elasticsearchVersion) {
if (elasticsearchVersion.before('6.3.0') && distro.startsWith('oss-')) {
static void configureDistributionDependency(Project project, String distro, Configuration configuration, String elasticsearchVersion) {
if (Version.fromString(elasticsearchVersion).before('6.3.0') &&
distro.startsWith('oss-')
) {
distro = distro.substring('oss-'.length())
}
String packaging = distro
@ -227,7 +232,7 @@ class ClusterFormationTasks {
setup = configureAddKeystoreFileTasks(prefix, project, setup, node)
if (node.config.plugins.isEmpty() == false) {
if (node.nodeVersion == VersionProperties.elasticsearch) {
if (node.nodeVersion == Version.fromString(VersionProperties.elasticsearch)) {
setup = configureCopyPluginsTask(taskName(prefix, node, 'copyPlugins'), project, setup, node, prefix)
} else {
setup = configureCopyBwcPluginsTask(taskName(prefix, node, 'copyBwcPlugins'), project, setup, node, prefix)
@ -591,7 +596,7 @@ class ClusterFormationTasks {
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, String pluginName, String prefix) {
final FileCollection pluginZip;
if (node.nodeVersion != VersionProperties.elasticsearch) {
if (node.nodeVersion != Version.fromString(VersionProperties.elasticsearch)) {
pluginZip = project.configurations.getByName(pluginBwcConfigurationName(prefix, pluginName))
} else {
pluginZip = project.configurations.getByName(pluginConfigurationName(prefix, pluginName))

View File

@ -112,7 +112,7 @@ class NodeInfo {
Version nodeVersion
/** Holds node configuration for part of a test cluster. */
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, Version nodeVersion, File sharedDir) {
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, String nodeVersion, File sharedDir) {
this.config = config
this.nodeNum = nodeNum
this.project = project
@ -124,7 +124,7 @@ class NodeInfo {
}
baseDir = new File(project.buildDir, "cluster/${prefix} node${nodeNum}")
pidFile = new File(baseDir, 'es.pid')
this.nodeVersion = nodeVersion
this.nodeVersion = Version.fromString(nodeVersion)
homeDir = homeDir(baseDir, config.distribution, nodeVersion)
pathConf = pathConf(baseDir, config.distribution, nodeVersion)
if (config.dataDir != null) {
@ -173,11 +173,11 @@ class NodeInfo {
}
if (nodeVersion.before("6.2.0")) {
if (this.nodeVersion.before("6.2.0")) {
javaVersion = 8
} else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) {
} else if (this.nodeVersion.onOrAfter("6.2.0") && this.nodeVersion.before("6.3.0")) {
javaVersion = 9
} else if (nodeVersion.onOrAfter("6.3.0") && nodeVersion.before("6.5.0")) {
} else if (this.nodeVersion.onOrAfter("6.3.0") && this.nodeVersion.before("6.5.0")) {
javaVersion = 10
}
@ -301,7 +301,7 @@ class NodeInfo {
}
/** Returns the directory elasticsearch home is contained in for the given distribution */
static File homeDir(File baseDir, String distro, Version nodeVersion) {
static File homeDir(File baseDir, String distro, String nodeVersion) {
String path
switch (distro) {
case 'integ-test-zip':
@ -321,7 +321,7 @@ class NodeInfo {
return new File(baseDir, path)
}
static File pathConf(File baseDir, String distro, Version nodeVersion) {
static File pathConf(File baseDir, String distro, String nodeVersion) {
switch (distro) {
case 'integ-test-zip':
case 'zip':

View File

@ -12,28 +12,17 @@ public final class Version implements Comparable<Version> {
private final int minor;
private final int revision;
private final int id;
private final boolean snapshot;
/**
* Suffix on the version name.
*/
private final String suffix;
private static final Pattern pattern =
Pattern.compile("(\\d)+\\.(\\d+)\\.(\\d+)(-alpha\\d+|-beta\\d+|-rc\\d+)?(-SNAPSHOT)?");
public Version(int major, int minor, int revision) {
this(major, minor, revision, "", false);
}
public Version(int major, int minor, int revision, String suffix, boolean snapshot) {
Objects.requireNonNull(major, "major version can't be null");
Objects.requireNonNull(minor, "minor version can't be null");
Objects.requireNonNull(revision, "revision version can't be null");
this.major = major;
this.minor = minor;
this.revision = revision;
this.snapshot = snapshot;
this.suffix = suffix == null ? "" : suffix;
// currently snapshot is not taken into account
this.id = major * 10000000 + minor * 100000 + revision * 1000;
@ -58,17 +47,13 @@ public final class Version implements Comparable<Version> {
return new Version(
Integer.parseInt(matcher.group(1)),
parseSuffixNumber(matcher.group(2)),
parseSuffixNumber(matcher.group(3)),
matcher.group(4),
matcher.group(5) != null
parseSuffixNumber(matcher.group(3))
);
}
@Override
public String toString() {
final String snapshotStr = snapshot ? "-SNAPSHOT" : "";
return String.valueOf(getMajor()) + "." + String.valueOf(getMinor()) + "." + String.valueOf(getRevision()) +
(suffix == null ? "" : suffix) + snapshotStr;
return String.valueOf(getMajor()) + "." + String.valueOf(getMinor()) + "." + String.valueOf(getRevision());
}
public boolean before(Version compareTo) {
@ -103,19 +88,6 @@ public final class Version implements Comparable<Version> {
return after(fromString(compareTo));
}
public boolean onOrBeforeIncludingSuffix(Version otherVersion) {
if (id != otherVersion.getId()) {
return id < otherVersion.getId();
}
if (suffix.equals("")) {
return otherVersion.getSuffix().equals("");
}
return otherVersion.getSuffix().equals("") || suffix.compareTo(otherVersion.getSuffix()) < 0;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
@ -128,8 +100,7 @@ public final class Version implements Comparable<Version> {
@Override
public int hashCode() {
return Objects.hash(major, minor, revision, id, snapshot, suffix);
return Objects.hash(major, minor, revision, id);
}
public int getMajor() {
@ -148,14 +119,6 @@ public final class Version implements Comparable<Version> {
return id;
}
public boolean isSnapshot() {
return snapshot;
}
public String getSuffix() {
return suffix;
}
@Override
public int compareTo(Version other) {
return Integer.compare(getId(), other.getId());

View File

@ -100,7 +100,7 @@ public class VersionCollection {
}
public VersionCollection(List<String> versionLines) {
this(versionLines, VersionProperties.getElasticsearch());
this(versionLines, Version.fromString(VersionProperties.getElasticsearch()));
}
protected VersionCollection(List<String> versionLines, Version currentVersionProperty) {
@ -110,12 +110,10 @@ public class VersionCollection {
.map(match -> new Version(
Integer.parseInt(match.group(1)),
Integer.parseInt(match.group(2)),
Integer.parseInt(match.group(3)),
(match.group(4) == null ? "" : match.group(4)).replace('_', '-'),
false
Integer.parseInt(match.group(3))
))
.sorted()
.filter(version -> version.getSuffix().isEmpty() || version.equals(currentVersionProperty))
.distinct()
.collect(Collectors.groupingBy(Version::getMajor, Collectors.toList()));
if (groupByMajor.isEmpty()) {
@ -131,22 +129,11 @@ public class VersionCollection {
assertCurrentVersionMatchesParsed(currentVersionProperty);
assertNoOlderThanTwoMajors();
markUnreleasedAsSnapshot();
}
private void markUnreleasedAsSnapshot() {
getUnreleased().forEach(uv ->
groupByMajor.get(uv.getMajor()).set(
groupByMajor.get(uv.getMajor()).indexOf(uv),
new Version(uv.getMajor(), uv.getMinor(), uv.getRevision(),uv.getSuffix(), true)
)
);
}
private void assertNoOlderThanTwoMajors() {
Set<Integer> majors = groupByMajor.keySet();
if (majors.size() != 2 && currentVersion.getMinor() != 0 && currentVersion.getMajor() != 0) {
if (majors.size() != 2 && currentVersion.getMinor() != 0 && currentVersion.getRevision() != 0) {
throw new IllegalStateException(
"Expected exactly 2 majors in parsed versions but found: " + majors
);

View File

@ -10,7 +10,7 @@ import java.util.Properties;
* Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions.
*/
public class VersionProperties {
public static Version getElasticsearch() {
public static String getElasticsearch() {
return elasticsearch;
}
@ -22,12 +22,12 @@ public class VersionProperties {
return versions;
}
private static final Version elasticsearch;
private static final String elasticsearch;
private static final String lucene;
private static final Map<String, String> versions = new HashMap<String, String>();
static {
Properties props = getVersionProperties();
elasticsearch = Version.fromString(props.getProperty("elasticsearch"));
elasticsearch = props.getProperty("elasticsearch");
lucene = props.getProperty("lucene");
for (String property : props.stringPropertyNames()) {
versions.put(property, props.getProperty(property));
@ -38,13 +38,17 @@ public class VersionProperties {
Properties props = new Properties();
InputStream propsStream = VersionProperties.class.getResourceAsStream("/version.properties");
if (propsStream == null) {
throw new RuntimeException("/version.properties resource missing");
throw new IllegalStateException("/version.properties resource missing");
}
try {
props.load(propsStream);
} catch (IOException e) {
throw new RuntimeException(e);
throw new IllegalStateException("Failed to load version properties", e);
}
return props;
}
public static boolean isElasticsearchSnapshot() {
return elasticsearch.endsWith("-SNAPSHOT");
}
}

View File

@ -101,7 +101,7 @@ public class VersionCollectionTests extends GradleUnitTestCase {
formatVersionToLine("6.5.0"),
formatVersionToLine("7.0.0")
),
Version.fromString("7.0.0")
Version.fromString("6.5.0")
);
}

View File

@ -33,11 +33,11 @@ public class VersionTests extends GradleUnitTestCase {
public ExpectedException expectedEx = ExpectedException.none();
public void testVersionParsing() {
assertVersionEquals("7.0.1", 7, 0, 1, "", false);
assertVersionEquals("7.0.1-alpha2", 7, 0, 1, "-alpha2", false);
assertVersionEquals("5.1.2-rc3", 5, 1, 2, "-rc3", false);
assertVersionEquals("6.1.2-SNAPSHOT", 6, 1, 2, "", true);
assertVersionEquals("6.1.2-beta1-SNAPSHOT", 6, 1, 2, "-beta1", true);
assertVersionEquals("7.0.1", 7, 0, 1);
assertVersionEquals("7.0.1-alpha2", 7, 0, 1);
assertVersionEquals("5.1.2-rc3", 5, 1, 2);
assertVersionEquals("6.1.2-SNAPSHOT", 6, 1, 2);
assertVersionEquals("6.1.2-beta1-SNAPSHOT", 6, 1, 2);
}
public void testCompareWithStringVersions() {
@ -74,21 +74,12 @@ public class VersionTests extends GradleUnitTestCase {
}
public void testToString() {
assertEquals("7.0.1", new Version(7, 0, 1, null, false).toString());
assertEquals("7.0.1", new Version(7, 0, 1).toString());
}
public void testCompareVersions() {
assertEquals(0, new Version(7, 0, 0, null, true).compareTo(
new Version(7, 0, 0, null, true)
));
assertEquals(0, new Version(7, 0, 0, null, true).compareTo(
new Version(7, 0, 0, "", true)
));
assertEquals(
0,
new Version(7, 0, 0, "-alpha1", false).compareTo(
new Version(7, 0, 0, "", true))
assertEquals(0,
new Version(7, 0, 0).compareTo(new Version(7, 0, 0))
);
}
@ -108,17 +99,11 @@ public class VersionTests extends GradleUnitTestCase {
assertEquals(smaller + " should be smaller than " + bigger, -1, smaller.compareTo(bigger));
}
private void assertVersionEquals(String stringVersion, int major, int minor, int revision, String sufix, boolean snapshot) {
private void assertVersionEquals(String stringVersion, int major, int minor, int revision) {
Version version = Version.fromString(stringVersion);
assertEquals(major, version.getMajor());
assertEquals(minor, version.getMinor());
assertEquals(revision, version.getRevision());
if (snapshot) {
assertTrue("Expected version to be a snapshot but it was not", version.isSnapshot());
} else {
assertFalse("Expected version not to be a snapshot but it was", version.isSnapshot());
}
assertEquals(sufix, version.getSuffix());
}
}

View File

@ -1,4 +1,4 @@
elasticsearch = 7.0.0-alpha1
elasticsearch = 7.0.0
lucene = 8.0.0-snapshot-7d0a7782fa
# optional dependencies

View File

@ -129,10 +129,10 @@ bwcVersions.forPreviousUnreleased { VersionCollection.UnreleasedVersionInfo unre
baseDir += project == 'zip' ? '/archives' : '/packages'
// add oss variant first
projectDirs.add("${baseDir}/oss-${project}")
artifactFiles.add(file("${checkoutDir}/${baseDir}/oss-${project}/build/distributions/elasticsearch-oss-${bwcVersion}.${project}"))
artifactFiles.add(file("${checkoutDir}/${baseDir}/oss-${project}/build/distributions/elasticsearch-oss-${bwcVersion}-SNAPSHOT.${project}"))
}
projectDirs.add("${baseDir}/${project}")
artifactFiles.add(file("${checkoutDir}/${baseDir}/${project}/build/distributions/elasticsearch-${bwcVersion}.${project}"))
artifactFiles.add(file("${checkoutDir}/${baseDir}/${project}/build/distributions/elasticsearch-${bwcVersion}-SNAPSHOT.${project}"))
}
task buildBwcVersion(type: Exec) {

View File

@ -3,7 +3,9 @@
[[configuring-metricbeat]]
=== Monitoring {es} with {metricbeat}
beta[] In 6.5 and later, you can use {metricbeat} to collect data about {es}
beta[]
In 6.5 and later, you can use {metricbeat} to collect data about {es}
and ship it to the monitoring cluster, rather than routing it through exporters
as described in <<configuring-monitoring>>.

View File

@ -118,6 +118,10 @@ The above request will yield the following response:
<2> The `_percolator_document_slot` field indicates which document has matched with this query.
Useful when percolating multiple document simultaneously.
TIP: To provide a simple example, this documentation uses one index `my-index` for both the percolate queries and documents.
This set-up can work well when there are just a few percolate queries registered. However, with heavier usage it is recommended
to store queries and documents in separate indices. Please see <<how-it-works, How it Works Under the Hood>> for more details.
[float]
==== Parameters
@ -643,6 +647,7 @@ The above search request returns a response similar to this:
query with `_name` parameter set to `query1`.
[float]
[[how-it-works]]
==== How it Works Under the Hood
When indexing a document into an index that has the <<percolator,percolator field type>> mapping configured, the query
@ -679,3 +684,11 @@ GET /_search
NOTE: The above example assumes that there is a `query` field of type
`percolator` in the mappings.
Given the design of percolation, it often makes sense to use separate indices for the percolate queries and documents
being percolated, as opposed to a single index as we do in examples. There are a few benefits to this approach:
- Because percolate queries contain a different set of fields from the percolated documents, using two separate indices
allows for fields to be stored in a denser, more efficient way.
- Percolate queries do not scale in the same way as other queries, so percolation performance may benefit from using
a different index configuration, like the number of primary shards.

View File

@ -77,11 +77,11 @@ opening spend more time in the `opening` state. Defaults to `2`.
These settings are for advanced use cases; the default values are generally
sufficient:
`xpack.ml.max_anomaly_records`:: (<<cluster-update-settings,Dynamic>>)
`xpack.ml.max_anomaly_records` (<<cluster-update-settings,Dynamic>>)::
The maximum number of records that are output per bucket. The default value is
`500`.
`xpack.ml.max_lazy_ml_nodes`:: (<<cluster-update-settings,Dynamic>>)
`xpack.ml.max_lazy_ml_nodes` (<<cluster-update-settings,Dynamic>>)::
The number of lazily spun up Machine Learning nodes. Useful in situations
where ML nodes are not desired until the first Machine Learning Job
is opened. It defaults to `0` and has a maximum acceptable value of `3`.

View File

@ -43,14 +43,14 @@ to `true`. Its default value is `false`.
The `xpack.monitoring.collection` settings control how data is collected from
your Elasticsearch nodes.
`xpack.monitoring.collection.enabled`:: (<<cluster-update-settings,Dynamic>>)
`xpack.monitoring.collection.enabled` (<<cluster-update-settings,Dynamic>>)::
added[6.3.0] Set to `true` to enable the collection of monitoring data. When
this setting is `false` (default), {es} monitoring data is not collected and
all monitoring data from other sources such as {kib}, Beats, and Logstash is
ignored.
`xpack.monitoring.collection.interval`:: (<<cluster-update-settings,Dynamic>>)
`xpack.monitoring.collection.interval` (<<cluster-update-settings,Dynamic>>)::
Setting to `-1` to disable data collection is no longer supported beginning with
7.0.0. deprecated[6.3.0, Use `xpack.monitoring.collection.enabled` set to
@ -60,7 +60,7 @@ Controls how often data samples are collected. Defaults to `10s`. If you
modify the collection interval, set the `xpack.monitoring.min_interval_seconds`
option in `kibana.yml` to the same value.
`xpack.monitoring.elasticsearch.collection.enabled`:: (<<cluster-update-settings,Dynamic>>)
`xpack.monitoring.elasticsearch.collection.enabled` (<<cluster-update-settings,Dynamic>>)::
Controls whether statistics about your {es} cluster should be collected. Defaults to `true`.
This is different from xpack.monitoring.collection.enabled, which allows you to enable or disable
@ -72,7 +72,7 @@ to pass through this cluster.
Sets the timeout for collecting the cluster statistics. Defaults to `10s`.
`xpack.monitoring.collection.indices`:: (<<cluster-update-settings,Dynamic>>)
`xpack.monitoring.collection.indices` (<<cluster-update-settings,Dynamic>>)::
Controls which indices Monitoring collects data from. Defaults to all indices. Specify the index names
as a comma-separated list, for example `test1,test2,test3`. Names can include wildcards, for

View File

@ -11,7 +11,7 @@ setup:
settings:
bucket: ${bucket}
client: "integration_test"
base_path: ${base_path}
base_path: "${base_path}"
---
"Snapshot/Restore with repository-gcs":
@ -23,7 +23,7 @@ setup:
- match: { repository.settings.bucket : ${bucket} }
- match: { repository.settings.client : "integration_test" }
- match: { repository.settings.base_path : ${base_path} }
- match: { repository.settings.base_path : "${base_path}" }
# Index documents
- do:

View File

@ -12,7 +12,7 @@ setup:
settings:
bucket: ${permanent_bucket}
client: integration_test_permanent
base_path: ${permanent_base_path}
base_path: "${permanent_base_path}"
canned_acl: private
storage_class: standard
@ -26,7 +26,7 @@ setup:
- match: { repository_permanent.settings.bucket : ${permanent_bucket} }
- match: { repository_permanent.settings.client : "integration_test_permanent" }
- match: { repository_permanent.settings.base_path : ${permanent_base_path} }
- match: { repository_permanent.settings.base_path : "${permanent_base_path}" }
- match: { repository_permanent.settings.canned_acl : "private" }
- match: { repository_permanent.settings.storage_class : "standard" }
- is_false: repository_permanent.settings.access_key

View File

@ -12,7 +12,7 @@ setup:
settings:
bucket: ${temporary_bucket}
client: integration_test_temporary
base_path: ${temporary_base_path}
base_path: "${temporary_base_path}"
canned_acl: private
storage_class: standard
@ -26,7 +26,7 @@ setup:
- match: { repository_temporary.settings.bucket : ${temporary_bucket} }
- match: { repository_temporary.settings.client : "integration_test_temporary" }
- match: { repository_temporary.settings.base_path : ${temporary_base_path} }
- match: { repository_temporary.settings.base_path : "${temporary_base_path}" }
- match: { repository_temporary.settings.canned_acl : "private" }
- match: { repository_temporary.settings.storage_class : "standard" }
- is_false: repository_temporary.settings.access_key

View File

@ -12,7 +12,7 @@ setup:
settings:
bucket: ${ec2_bucket}
client: integration_test_ec2
base_path: ${ec2_base_path}
base_path: "${ec2_base_path}"
canned_acl: private
storage_class: standard
@ -26,7 +26,7 @@ setup:
- match: { repository_ec2.settings.bucket : ${ec2_bucket} }
- match: { repository_ec2.settings.client : "integration_test_ec2" }
- match: { repository_ec2.settings.base_path : ${ec2_base_path} }
- match: { repository_ec2.settings.base_path : "${ec2_base_path}" }
- match: { repository_ec2.settings.canned_acl : "private" }
- match: { repository_ec2.settings.storage_class : "standard" }
- is_false: repository_ec2.settings.access_key

View File

@ -12,7 +12,7 @@ setup:
settings:
bucket: ${ecs_bucket}
client: integration_test_ecs
base_path: ${ecs_base_path}
base_path: "${ecs_base_path}"
canned_acl: private
storage_class: standard
@ -26,7 +26,7 @@ setup:
- match: { repository_ecs.settings.bucket : ${ecs_bucket} }
- match: { repository_ecs.settings.client : "integration_test_ecs" }
- match: { repository_ecs.settings.base_path : ${ecs_base_path} }
- match: { repository_ecs.settings.base_path : "${ecs_base_path}" }
- match: { repository_ecs.settings.canned_acl : "private" }
- match: { repository_ecs.settings.storage_class : "standard" }
- is_false: repository_ecs.settings.access_key

View File

@ -107,6 +107,7 @@ public class QueryProfilerIT extends ESIntegTestCase {
* search for each query. It then does some basic sanity checking of score and hits
* to make sure the profiling doesn't interfere with the hits being returned
*/
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32492")
public void testProfileMatchesRegular() throws Exception {
createIndex("test");
ensureGreen();

View File

@ -71,4 +71,5 @@ precommit.dependsOn namingConventionsMain
test.configure {
systemProperty 'tests.gradle_index_compat_versions', bwcVersions.indexCompatible.join(',')
systemProperty 'tests.gradle_wire_compat_versions', bwcVersions.wireCompatible.join(',')
systemProperty 'tests.gradle_unreleased_versions', bwcVersions.unreleased.join(',')
}

View File

@ -413,7 +413,6 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
* Test creates the {@link Query} from the {@link QueryBuilder} under test and delegates the
* assertions being made on the result to the implementing subclass.
*/
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34188")
public void testToQuery() throws IOException {
for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) {
QueryShardContext context = createShardContext();

View File

@ -24,8 +24,10 @@ import org.elasticsearch.common.collect.Tuple;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import static java.util.stream.Collectors.toCollection;
import static java.util.stream.Collectors.toList;
@ -389,13 +391,19 @@ public class VersionUtilsTests extends ESTestCase {
private final List<String> unreleased = new ArrayList<>();
private VersionsFromProperty(String property) {
Set<String> allUnreleased = new HashSet<>(Arrays.asList(
System.getProperty("tests.gradle_unreleased_versions", "").split(",")
));
if (allUnreleased.isEmpty()) {
fail("[tests.gradle_unreleased_versions] not set or empty. Gradle should set this before running.");
}
String versions = System.getProperty(property);
assertNotNull("Couldn't find [" + property + "]. Gradle should set these before running the tests.", versions);
assertNotNull("Couldn't find [" + property + "]. Gradle should set this before running the tests.", versions);
logger.info("Looked up versions [{}={}]", property, versions);
for (String version : versions.split(",")) {
if (version.endsWith("-SNAPSHOT")) {
unreleased.add(version.replace("-SNAPSHOT", ""));
if (allUnreleased.contains(version)) {
unreleased.add(version);
} else {
released.add(version);
}

View File

@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.LicenseUtils;
import org.elasticsearch.tasks.Task;
@ -28,6 +29,7 @@ import org.elasticsearch.xpack.core.ccr.AutoFollowStats;
import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction;
import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction;
import java.io.IOException;
import java.util.Objects;
public class TransportCcrStatsAction extends TransportMasterNodeAction<CcrStatsAction.Request, CcrStatsAction.Response> {
@ -70,7 +72,12 @@ public class TransportCcrStatsAction extends TransportMasterNodeAction<CcrStatsA
@Override
protected CcrStatsAction.Response newResponse() {
return new CcrStatsAction.Response();
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
protected CcrStatsAction.Response read(StreamInput in) throws IOException {
return new CcrStatsAction.Response(in);
}
@Override

View File

@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -24,6 +25,7 @@ import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern;
import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
@ -48,7 +50,12 @@ public class TransportGetAutoFollowPatternAction
@Override
protected GetAutoFollowPatternAction.Response newResponse() {
return new GetAutoFollowPatternAction.Response();
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
protected GetAutoFollowPatternAction.Response read(StreamInput in) throws IOException {
return new GetAutoFollowPatternAction.Response(in);
}
@Override

View File

@ -27,6 +27,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.license.LicenseUtils;
@ -38,6 +39,7 @@ import org.elasticsearch.xpack.ccr.CcrSettings;
import org.elasticsearch.xpack.core.ccr.action.PutFollowAction;
import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
@ -83,7 +85,12 @@ public final class TransportPutFollowAction
@Override
protected PutFollowAction.Response newResponse() {
return new PutFollowAction.Response();
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
protected PutFollowAction.Response read(StreamInput in) throws IOException {
return new PutFollowAction.Response(in);
}
@Override

View File

@ -6,17 +6,19 @@
package org.elasticsearch.xpack.ccr;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.xpack.CcrIntegTestCase;
import org.elasticsearch.xpack.core.ccr.action.PutFollowAction;
@ -48,15 +50,15 @@ public class FollowerFailOverIT extends CcrIntegTestCase {
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread(() -> {
while (stopped.get() == false) {
try {
if (frequently()) {
String id = Integer.toString(frequently() ? docID.incrementAndGet() : between(0, 10)); // sometimes update
leaderClient().prepareIndex("leader-index", "doc", id).setSource("{\"f\":" + id + "}", XContentType.JSON).get();
} else {
String id = Integer.toString(between(0, docID.get()));
leaderClient().prepareDelete("leader-index", "doc", id).get();
}
} catch (NodeClosedException ignored) {
if (frequently()) {
String id = Integer.toString(frequently() ? docID.incrementAndGet() : between(0, 10)); // sometimes update
IndexResponse indexResponse = leaderClient().prepareIndex("leader-index", "doc", id)
.setSource("{\"f\":" + id + "}", XContentType.JSON).get();
logger.info("--> index id={} seq_no={}", indexResponse.getId(), indexResponse.getSeqNo());
} else {
String id = Integer.toString(between(0, docID.get()));
DeleteResponse deleteResponse = leaderClient().prepareDelete("leader-index", "doc", id).get();
logger.info("--> delete id={} seq_no={}", deleteResponse.getId(), deleteResponse.getSeqNo());
}
}
});
@ -69,6 +71,7 @@ public class FollowerFailOverIT extends CcrIntegTestCase {
follow.getFollowRequest().setMaxWriteRequestOperationCount(randomIntBetween(32, 2048));
follow.getFollowRequest().setMaxWriteRequestSize(new ByteSizeValue(randomIntBetween(1, 4096), ByteSizeUnit.KB));
follow.getFollowRequest().setMaxOutstandingWriteRequests(randomIntBetween(1, 10));
logger.info("--> follow params {}", Strings.toString(follow.getFollowRequest()));
followerClient().execute(PutFollowAction.INSTANCE, follow).get();
ensureFollowerGreen("follower-index");
atLeastDocsIndexed(followerClient(), "follower-index", between(20, 60));

View File

@ -5,7 +5,8 @@
*/
package org.elasticsearch.xpack.ccr.action;
import org.elasticsearch.test.AbstractStreamableTestCase;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.ccr.AutoFollowStats;
import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction;
import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction;
@ -13,11 +14,11 @@ import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction;
import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomReadExceptions;
import static org.elasticsearch.xpack.ccr.action.StatsResponsesTests.createStatsResponse;
public class AutoFollowStatsResponseTests extends AbstractStreamableTestCase<CcrStatsAction.Response> {
public class AutoFollowStatsResponseTests extends AbstractWireSerializingTestCase<CcrStatsAction.Response> {
@Override
protected CcrStatsAction.Response createBlankInstance() {
return new CcrStatsAction.Response();
protected Writeable.Reader<CcrStatsAction.Response> instanceReader() {
return CcrStatsAction.Response::new;
}
@Override

View File

@ -5,10 +5,11 @@
*/
package org.elasticsearch.xpack.ccr.action;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.AbstractStreamableTestCase;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern;
import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction;
@ -16,11 +17,11 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class GetAutoFollowPatternResponseTests extends AbstractStreamableTestCase<GetAutoFollowPatternAction.Response> {
public class GetAutoFollowPatternResponseTests extends AbstractWireSerializingTestCase<GetAutoFollowPatternAction.Response> {
@Override
protected GetAutoFollowPatternAction.Response createBlankInstance() {
return new GetAutoFollowPatternAction.Response();
protected Writeable.Reader<GetAutoFollowPatternAction.Response> instanceReader() {
return GetAutoFollowPatternAction.Response::new;
}
@Override

View File

@ -5,14 +5,15 @@
*/
package org.elasticsearch.xpack.ccr.action;
import org.elasticsearch.test.AbstractStreamableTestCase;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.ccr.action.PutFollowAction;
public class PutFollowActionResponseTests extends AbstractStreamableTestCase<PutFollowAction.Response> {
public class PutFollowActionResponseTests extends AbstractWireSerializingTestCase<PutFollowAction.Response> {
@Override
protected PutFollowAction.Response createBlankInstance() {
return new PutFollowAction.Response();
protected Writeable.Reader<PutFollowAction.Response> instanceReader() {
return PutFollowAction.Response::new;
}
@Override

View File

@ -12,6 +12,7 @@ import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.action.Action;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ccr.AutoFollowStats;
@ -30,7 +31,12 @@ public class CcrStatsAction extends Action<CcrStatsAction.Response> {
@Override
public Response newResponse() {
return new Response();
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
public Writeable.Reader<Response> getResponseReader() {
return Response::new;
}
public static class Request extends MasterNodeRequest<Request> {
@ -55,15 +61,19 @@ public class CcrStatsAction extends Action<CcrStatsAction.Response> {
public static class Response extends ActionResponse implements ToXContentObject {
private AutoFollowStats autoFollowStats;
private FollowStatsAction.StatsResponses followStats;
private final AutoFollowStats autoFollowStats;
private final FollowStatsAction.StatsResponses followStats;
public Response(AutoFollowStats autoFollowStats, FollowStatsAction.StatsResponses followStats) {
this.autoFollowStats = Objects.requireNonNull(autoFollowStats);
this.followStats = Objects.requireNonNull(followStats);
}
public Response() {
public Response(StreamInput in) throws IOException {
super(in);
autoFollowStats = new AutoFollowStats(in);
followStats = new FollowStatsAction.StatsResponses();
followStats.readFrom(in);
}
public AutoFollowStats getAutoFollowStats() {
@ -74,14 +84,6 @@ public class CcrStatsAction extends Action<CcrStatsAction.Response> {
return followStats;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
autoFollowStats = new AutoFollowStats(in);
followStats = new FollowStatsAction.StatsResponses();
followStats.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);

View File

@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern;
@ -31,7 +32,12 @@ public class GetAutoFollowPatternAction extends Action<GetAutoFollowPatternActio
@Override
public Response newResponse() {
return new Response();
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
public Writeable.Reader<Response> getResponseReader() {
return Response::new;
}
public static class Request extends MasterNodeReadRequest<Request> {
@ -81,21 +87,17 @@ public class GetAutoFollowPatternAction extends Action<GetAutoFollowPatternActio
public static class Response extends ActionResponse implements ToXContentObject {
private Map<String, AutoFollowPattern> autoFollowPatterns;
private final Map<String, AutoFollowPattern> autoFollowPatterns;
public Response(Map<String, AutoFollowPattern> autoFollowPatterns) {
this.autoFollowPatterns = autoFollowPatterns;
}
public Response() {
}
public Map<String, AutoFollowPattern> getAutoFollowPatterns() {
return autoFollowPatterns;
}
@Override
public void readFrom(StreamInput in) throws IOException {
public Response(StreamInput in) throws IOException {
super.readFrom(in);
autoFollowPatterns = in.readMap(StreamInput::readString, AutoFollowPattern::new);
}

View File

@ -15,6 +15,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
@ -49,7 +50,12 @@ public final class PutFollowAction extends Action<PutFollowAction.Response> {
@Override
public Response newResponse() {
return new Response();
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
public Writeable.Reader<Response> getResponseReader() {
return Response::new;
}
public static class Request extends AcknowledgedRequest<Request> implements IndicesRequest, ToXContentObject {
@ -211,13 +217,9 @@ public final class PutFollowAction extends Action<PutFollowAction.Response> {
public static class Response extends ActionResponse implements ToXContentObject {
private boolean followIndexCreated;
private boolean followIndexShardsAcked;
private boolean indexFollowingStarted;
public Response() {
}
private final boolean followIndexCreated;
private final boolean followIndexShardsAcked;
private final boolean indexFollowingStarted;
public Response(boolean followIndexCreated, boolean followIndexShardsAcked, boolean indexFollowingStarted) {
this.followIndexCreated = followIndexCreated;
@ -237,9 +239,8 @@ public final class PutFollowAction extends Action<PutFollowAction.Response> {
return indexFollowingStarted;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
public Response(StreamInput in) throws IOException {
super(in);
followIndexCreated = in.readBoolean();
followIndexShardsAcked = in.readBoolean();
indexFollowingStarted = in.readBoolean();

View File

@ -107,9 +107,9 @@ public class In extends NamedExpression implements ScriptWeaver {
@Override
public ScriptTemplate asScript() {
ScriptTemplate leftScript = asScript(value);
// remove duplicates
// fold & remove duplicates
List<Object> values = new ArrayList<>(new LinkedHashSet<>(Foldables.valuesOf(list, value.dataType())));
values.removeIf(Objects::isNull);
return new ScriptTemplate(
formatTemplate(String.format(Locale.ROOT, "{sql}.in(%s, {})", leftScript.template())),
@ -141,6 +141,6 @@ public class In extends NamedExpression implements ScriptWeaver {
In other = (In) obj;
return Objects.equals(value, other.value)
&& Objects.equals(list, other.list);
&& Objects.equals(list, other.list);
}
}

View File

@ -213,7 +213,7 @@ public class QueryTranslatorTests extends ESTestCase {
assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.in(" +
"InternalSqlScriptUtils.power(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1), params.v2))",
sc.script().toString());
assertEquals("[{v=int}, {v=2}, {v=[10.0, 20.0]}]", sc.script().params().toString());
assertEquals("[{v=int}, {v=2}, {v=[10.0, null, 20.0]}]", sc.script().params().toString());
}
public void testTranslateInExpression_HavingClause_Painless() {
@ -259,6 +259,6 @@ public class QueryTranslatorTests extends ESTestCase {
assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.in(params.a0, params.v0))",
aggFilter.scriptTemplate().toString());
assertThat(aggFilter.scriptTemplate().params().toString(), startsWith("[{a=MAX(int){a->"));
assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=[10, 20, 30]}]"));
assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=[10, null, 20, 30]}]"));
}
}

View File

@ -142,7 +142,12 @@ subprojects {
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
dependsOn copyTestNodeKeystore
if (version.before('6.3.0')) {
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${version}"
String depVersion = version;
if (project.bwcVersions.unreleased.contains(version)) {
depVersion += "-SNAPSHOT"
}
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${depVersion}"
}
bwcVersion = version
numBwcNodes = 2

View File

@ -30,7 +30,12 @@ for (Version version : bwcVersions.wireCompatible) {
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
if (version.before('6.3.0')) {
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${version}"
String depVersion = version;
if (project.bwcVersions.unreleased.contains(version)) {
depVersion += "-SNAPSHOT"
}
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${depVersion}"
}
bwcVersion = version
numBwcNodes = 3

View File

@ -124,7 +124,11 @@ subprojects {
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
dependsOn copyTestNodeKeystore
if (version.before('6.3.0')) {
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${version}"
String depVersion = version;
if (project.bwcVersions.unreleased.contains(version)) {
depVersion += "-SNAPSHOT"
}
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${depVersion}"
}
String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/elasticsearch-users'
setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'