Improve total build configuration time (#54611) (#54994)

This commit includes a number of changes to reduce overall build
configuration time. These optimizations include:

- Removing the usage of the 'nebula.info-scm' plugin. This plugin
   leverages jgit to load read various pieces of VCS information. This
   is mostly overkill and we have our own minimal implementation for
   determining the current commit id.
- Removing unnecessary build dependencies such as perforce and jgit
   now that we don't need them. This reduces our classpath considerably.
- Expanding the usage lazy task creation, particularly in our
   distribution projects. The archives and packages projects create
   lots of tasks with very complex configuration. Avoiding the creation
   of these tasks at configuration time gives us a nice boost.
This commit is contained in:
Mark Vieira 2020-04-08 16:47:02 -07:00 committed by GitHub
parent ac6d1f7b24
commit dd73a14d11
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 297 additions and 203 deletions

View File

@ -25,6 +25,7 @@ import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.BwcVersions
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.info.BuildParams
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
import org.gradle.plugins.ide.eclipse.model.AccessRule
import org.gradle.plugins.ide.eclipse.model.SourceFolder
@ -37,10 +38,9 @@ plugins {
id 'lifecycle-base'
id 'elasticsearch.docker-support'
id 'elasticsearch.global-build-info'
id "com.diffplug.gradle.spotless" version "3.24.2" apply false
id "com.diffplug.gradle.spotless" version "3.28.0" apply false
}
apply plugin: 'nebula.info-scm'
apply from: 'gradle/build-scan.gradle'
apply from: 'gradle/build-complete.gradle'
apply from: 'gradle/runtime-jdk-provision.gradle'
@ -59,7 +59,7 @@ BuildPlugin.configureRepositories(project)
String licenseCommit
if (VersionProperties.elasticsearch.toString().endsWith('-SNAPSHOT')) {
licenseCommit = scminfo.change ?: "master" // leniency for non git builds
licenseCommit = BuildParams.gitRevision ?: "master" // leniency for non git builds
} else {
licenseCommit = "v${version}"
}

View File

@ -112,10 +112,7 @@ dependencies {
compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3'
compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4'
compile 'com.netflix.nebula:gradle-info-plugin:5.1.0'
compile 'org.eclipse.jgit:org.eclipse.jgit:5.5.0.201909110433-r'
compile 'com.netflix.nebula:gradle-info-plugin:3.0.3'
compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE....
compile 'com.netflix.nebula:gradle-info-plugin:7.1.3'
compile 'org.apache.rat:apache-rat:0.11'
compile "org.elasticsearch:jna:4.5.1"
compile 'com.github.jengelman.gradle.plugins:shadow:5.1.0'

View File

@ -23,6 +23,7 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowExtension
import com.github.jengelman.gradle.plugins.shadow.ShadowJavaPlugin
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
import groovy.transform.CompileStatic
import nebula.plugin.info.InfoBrokerPlugin
import org.apache.commons.io.IOUtils
import org.elasticsearch.gradle.info.BuildParams
import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin
@ -115,13 +116,8 @@ class BuildPlugin implements Plugin<Project> {
}
project.pluginManager.apply('java')
configureConfigurations(project)
configureJars(project) // jar config must be added before info broker
// these plugins add lots of info to our jars
project.pluginManager.apply('nebula.info-broker')
project.pluginManager.apply('nebula.info-basic')
project.pluginManager.apply('nebula.info-java')
project.pluginManager.apply('nebula.info-scm')
project.pluginManager.apply('nebula.info-jar')
configureJars(project)
configureJarManifest(project)
// apply global test task failure listener
project.rootProject.pluginManager.apply(TestFailureReportingPlugin)
@ -512,7 +508,7 @@ class BuildPlugin implements Plugin<Project> {
}
// ensure javadoc task is run with 'check'
project.pluginManager.withPlugin('lifecycle-base') {
project.tasks.getByName(LifecycleBasePlugin.CHECK_TASK_NAME).dependsOn(project.tasks.withType(Javadoc))
project.tasks.named(LifecycleBasePlugin.CHECK_TASK_NAME).configure { it.dependsOn(project.tasks.withType(Javadoc)) }
}
configureJavadocJar(project)
}
@ -549,19 +545,15 @@ class BuildPlugin implements Plugin<Project> {
ext.set('noticeFile', null)
project.tasks.withType(Jar).configureEach { Jar jarTask ->
// we put all our distributable files under distributions
jarTask.destinationDir = new File(project.buildDir, 'distributions')
jarTask.destinationDirectory.set(new File(project.buildDir, 'distributions'))
// fixup the jar manifest
jarTask.doFirst {
// this doFirst is added before the info plugin, therefore it will run
// after the doFirst added by the info plugin, and we can override attributes
JavaVersion compilerJavaVersion = BuildParams.compilerJavaVersion
jarTask.manifest.attributes(
'Change': BuildParams.gitRevision,
'X-Compile-Elasticsearch-Version': VersionProperties.elasticsearch,
'X-Compile-Lucene-Version': VersionProperties.lucene,
'X-Compile-Elasticsearch-Snapshot': VersionProperties.isElasticsearchSnapshot(),
'Build-Date': BuildParams.buildDate,
'Build-Java-Version': compilerJavaVersion)
'Build-Java-Version': BuildParams.compilerJavaVersion)
}
}
// add license/notice files
@ -610,9 +602,22 @@ class BuildPlugin implements Plugin<Project> {
}
}
static void configureTestTasks(Project project) {
ExtraPropertiesExtension ext = project.extensions.getByType(ExtraPropertiesExtension)
static void configureJarManifest(Project project) {
project.pluginManager.apply('nebula.info-broker')
project.pluginManager.apply('nebula.info-basic')
project.pluginManager.apply('nebula.info-java')
project.pluginManager.apply('nebula.info-jar')
project.plugins.withId('nebula.info-broker') { InfoBrokerPlugin manifestPlugin ->
manifestPlugin.add('Module-Origin') { BuildParams.gitOrigin }
manifestPlugin.add('Change') { BuildParams.gitRevision }
manifestPlugin.add('X-Compile-Elasticsearch-Version') { VersionProperties.elasticsearch }
manifestPlugin.add('X-Compile-Lucene-Version') { VersionProperties.lucene }
manifestPlugin.add('X-Compile-Elasticsearch-Snapshot') { VersionProperties.isElasticsearchSnapshot() }
}
}
static void configureTestTasks(Project project) {
// Default test task should run only unit tests
maybeConfigure(project.tasks, 'test', Test) { Test task ->
task.include '**/*Tests.class'

View File

@ -39,6 +39,7 @@ import org.gradle.api.publish.maven.plugins.MavenPublishPlugin
import org.gradle.api.publish.maven.tasks.GenerateMavenPom
import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.SourceSet
import org.gradle.api.tasks.TaskProvider
import org.gradle.api.tasks.bundling.Zip
import org.gradle.jvm.tasks.Jar
@ -101,7 +102,7 @@ class PluginBuildPlugin implements Plugin<Project> {
if (extension1.classname == null) {
throw new InvalidUserDataException('classname is a required setting for esplugin')
}
Copy buildProperties = project.tasks.getByName('pluginProperties')
Map<String, String> properties = [
'name' : extension1.name,
'description' : extension1.description,
@ -113,8 +114,10 @@ class PluginBuildPlugin implements Plugin<Project> {
'hasNativeController' : extension1.hasNativeController,
'requiresKeystore' : extension1.requiresKeystore
]
buildProperties.expand(properties)
buildProperties.inputs.properties(properties)
project.tasks.named('pluginProperties').configure {
expand(properties)
inputs.properties(properties)
}
if (isModule == false || isXPackModule) {
addNoticeGeneration(project, extension1)
}
@ -199,7 +202,7 @@ class PluginBuildPlugin implements Plugin<Project> {
File templateFile = new File(project.buildDir, "templates/plugin-descriptor.properties")
// create tasks to build the properties file for this plugin
Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') {
TaskProvider<Task> copyPluginPropertiesTemplate = project.tasks.register('copyPluginPropertiesTemplate') {
outputs.file(templateFile)
doLast {
InputStream resourceTemplate = PluginBuildPlugin.getResourceAsStream("/${templateFile.name}")
@ -207,7 +210,7 @@ class PluginBuildPlugin implements Plugin<Project> {
}
}
Copy buildProperties = project.tasks.create('pluginProperties', Copy) {
TaskProvider<Copy> buildProperties = project.tasks.register('pluginProperties', Copy) {
dependsOn(copyPluginPropertiesTemplate)
from(templateFile)
into("${project.buildDir}/generated-resources")
@ -216,11 +219,11 @@ class PluginBuildPlugin implements Plugin<Project> {
// add the plugin properties and metadata to test resources, so unit tests can
// know about the plugin (used by test security code to statically initialize the plugin in unit tests)
SourceSet testSourceSet = project.sourceSets.test
testSourceSet.output.dir(buildProperties.destinationDir, builtBy: buildProperties)
testSourceSet.output.dir("${project.buildDir}/generated-resources", builtBy: buildProperties)
testSourceSet.resources.srcDir(pluginMetadata)
// create the actual bundle task, which zips up all the files for the plugin
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip) {
TaskProvider<Zip> bundle = project.tasks.register('bundlePlugin', Zip) {
from buildProperties
from pluginMetadata // metadata (eg custom security policy)
/*
@ -270,16 +273,21 @@ class PluginBuildPlugin implements Plugin<Project> {
protected static void addNoticeGeneration(Project project, PluginPropertiesExtension extension) {
File licenseFile = extension.licenseFile
if (licenseFile != null) {
project.tasks.bundlePlugin.from(licenseFile.parentFile) {
include(licenseFile.name)
rename { 'LICENSE.txt' }
project.tasks.named('bundlePlugin').configure {
from(licenseFile.parentFile) {
include(licenseFile.name)
rename { 'LICENSE.txt' }
}
}
}
File noticeFile = extension.noticeFile
if (noticeFile != null) {
NoticeTask generateNotice = project.tasks.create('generateNotice', NoticeTask.class)
generateNotice.inputFile = noticeFile
project.tasks.bundlePlugin.from(generateNotice)
TaskProvider<NoticeTask> generateNotice = project.tasks.register('generateNotice', NoticeTask) {
inputFile = noticeFile
}
project.tasks.named('bundlePlugin').configure {
from(generateNotice)
}
}
}
}

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.gradle.test
import groovy.transform.CompileStatic
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.gradle.api.InvalidUserDataException
@ -31,14 +32,15 @@ import org.gradle.api.plugins.JavaBasePlugin
* projects and in conjunction with {@link BuildPlugin} for testing the rest
* client.
*/
public class RestTestPlugin implements Plugin<Project> {
List REQUIRED_PLUGINS = [
@CompileStatic
class RestTestPlugin implements Plugin<Project> {
List<String> REQUIRED_PLUGINS = [
'elasticsearch.build',
'elasticsearch.standalone-rest-test']
@Override
public void apply(Project project) {
if (false == REQUIRED_PLUGINS.any {project.pluginManager.hasPlugin(it)}) {
void apply(Project project) {
if (false == REQUIRED_PLUGINS.any { project.pluginManager.hasPlugin(it) }) {
throw new InvalidUserDataException('elasticsearch.rest-test '
+ 'requires either elasticsearch.build or '
+ 'elasticsearch.standalone-rest-test')
@ -48,7 +50,7 @@ public class RestTestPlugin implements Plugin<Project> {
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
integTest.description = 'Runs rest tests against an elasticsearch cluster.'
integTest.group = JavaBasePlugin.VERIFICATION_GROUP
integTest.mustRunAfter(project.precommit)
project.check.dependsOn(integTest)
integTest.mustRunAfter(project.tasks.named('precommit'))
project.tasks.named('check').configure { it.dependsOn(integTest) }
}
}

View File

@ -76,7 +76,7 @@ class StandaloneRestTestPlugin implements Plugin<Project> {
// need to apply plugin after test source sets are created
project.pluginManager.apply(RestResourcesPlugin)
project.tasks.withType(Test) { Test test ->
project.tasks.withType(Test).configureEach { Test test ->
test.testClassesDirs = testSourceSet.output.classesDirs
test.classpath = testSourceSet.runtimeClasspath
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.gradle.BuildPlugin
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.api.tasks.TaskProvider
import org.gradle.api.tasks.testing.Test
/**
@ -37,12 +38,13 @@ class StandaloneTestPlugin implements Plugin<Project> {
void apply(Project project) {
project.pluginManager.apply(StandaloneRestTestPlugin)
Test test = project.tasks.create('test', Test)
test.group = JavaBasePlugin.VERIFICATION_GROUP
test.description = 'Runs unit tests that are separate'
project.tasks.register('test', Test).configure { t ->
t.group = JavaBasePlugin.VERIFICATION_GROUP
t.description = 'Runs unit tests that are separate'
t.mustRunAfter(project.tasks.getByName('precommit'))
}
BuildPlugin.configureCompile(project)
test.mustRunAfter(project.tasks.getByName('precommit'))
project.tasks.getByName('check').dependsOn(test)
project.tasks.named('check').configure { it.dependsOn(project.tasks.named('test')) }
}
}

View File

@ -32,6 +32,7 @@ import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@ -68,6 +69,8 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
File compilerJavaHome = findCompilerJavaHome();
File runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome);
GitInfo gitInfo = gitInfo(project.getRootProject().getRootDir());
// Initialize global build parameters
BuildParams.init(params -> {
params.reset();
@ -80,7 +83,8 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
params.setMinimumCompilerVersion(minimumCompilerVersion);
params.setMinimumRuntimeVersion(minimumRuntimeVersion);
params.setGradleJavaVersion(Jvm.current().getJavaVersion());
params.setGitRevision(gitRevision(project.getRootProject().getRootDir()));
params.setGitRevision(gitInfo.getRevision());
params.setGitOrigin(gitInfo.getOrigin());
params.setBuildDate(ZonedDateTime.now(ZoneOffset.UTC));
params.setTestSeed(getTestSeed());
params.setIsCi(System.getenv("JENKINS_URL") != null);
@ -299,7 +303,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
return _defaultParallel;
}
public static String gitRevision(File rootDir) {
public static GitInfo gitInfo(File rootDir) {
try {
/*
* We want to avoid forking another process to run git rev-parse HEAD. Instead, we will read the refs manually. The
@ -320,7 +324,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
final Path dotGit = rootDir.toPath().resolve(".git");
final String revision;
if (Files.exists(dotGit) == false) {
return "unknown";
return new GitInfo("unknown", "unknown");
}
final Path head;
final Path gitDir;
@ -332,7 +336,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
// this is a git worktree, follow the pointer to the repository
final Path workTree = Paths.get(readFirstLine(dotGit).substring("gitdir:".length()).trim());
if (Files.exists(workTree) == false) {
return "unknown";
return new GitInfo("unknown", "unknown");
}
head = workTree.resolve("HEAD");
final Path commonDir = Paths.get(readFirstLine(workTree.resolve("commondir")));
@ -366,13 +370,44 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
// we are in detached HEAD state
revision = ref;
}
return revision;
return new GitInfo(revision, findOriginUrl(gitDir.resolve("config")));
} catch (final IOException e) {
// for now, do not be lenient until we have better understanding of real-world scenarios where this happens
throw new GradleException("unable to read the git revision", e);
}
}
private static String findOriginUrl(final Path configFile) throws IOException {
Map<String, String> props = new HashMap<>();
try (Stream<String> stream = Files.lines(configFile, StandardCharsets.UTF_8)) {
Iterator<String> lines = stream.iterator();
boolean foundOrigin = false;
while (lines.hasNext()) {
String line = lines.next().trim();
if (line.startsWith(";") || line.startsWith("#")) {
// ignore comments
continue;
}
if (foundOrigin) {
if (line.startsWith("[")) {
// we're on to the next config item so stop looking
break;
}
String[] pair = line.trim().split("=");
props.put(pair[0].trim(), pair[1].trim());
} else {
if (line.equals("[remote \"origin\"]")) {
foundOrigin = true;
}
}
}
}
String originUrl = props.get("url");
return originUrl == null ? "unknown" : originUrl;
}
private static String readFirstLine(final Path path) throws IOException {
String firstLine;
try (Stream<String> lines = Files.lines(path, StandardCharsets.UTF_8)) {
@ -380,4 +415,22 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
}
return firstLine;
}
private static class GitInfo {
private final String revision;
private final String origin;
GitInfo(String revision, String origin) {
this.revision = revision;
this.origin = origin;
}
public String getRevision() {
return revision;
}
public String getOrigin() {
return origin;
}
}
}

View File

@ -23,6 +23,7 @@ public class BuildParams {
private static JavaVersion runtimeJavaVersion;
private static Boolean inFipsJvm;
private static String gitRevision;
private static String gitOrigin;
private static ZonedDateTime buildDate;
private static String testSeed;
private static Boolean isCi;
@ -86,6 +87,10 @@ public class BuildParams {
return value(gitRevision);
}
public static String getGitOrigin() {
return value(gitOrigin);
}
public static ZonedDateTime getBuildDate() {
return value(buildDate);
}
@ -195,6 +200,10 @@ public class BuildParams {
BuildParams.gitRevision = requireNonNull(gitRevision);
}
public void setGitOrigin(String gitOrigin) {
BuildParams.gitOrigin = requireNonNull(gitOrigin);
}
public void setBuildDate(ZonedDateTime buildDate) {
BuildParams.buildDate = requireNonNull(buildDate);
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.gradle.EmptyDirTask
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.info.BuildParams
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
import org.elasticsearch.gradle.tar.SymbolicLinkPreservingTar
@ -37,17 +38,17 @@ apply plugin: 'base'
// create the directory that we want, and then point CopySpec to its
// parent to copy to the root of the distribution
ext.logsDir = new File(buildDir, 'logs-hack/logs')
task createLogsDir(type: EmptyDirTask) {
tasks.register('createLogsDir', EmptyDirTask) {
dir = "${logsDir}"
dirMode = 0755
}
ext.pluginsDir = new File(buildDir, 'plugins-hack/plugins')
task createPluginsDir(type: EmptyDirTask) {
tasks.register('createPluginsDir', EmptyDirTask) {
dir = "${pluginsDir}"
dirMode = 0755
}
ext.jvmOptionsDir = new File(buildDir, 'jvm-options-hack/jvm.options.d')
task createJvmOptionsDir(type: EmptyDirTask) {
tasks.register('createJvmOptionsDir', EmptyDirTask) {
dir = "${jvmOptionsDir}"
dirMode = 0750
}
@ -104,7 +105,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla
}
// common config across all zip/tar
tasks.withType(AbstractArchiveTask) {
tasks.withType(AbstractArchiveTask).configureEach {
dependsOn createLogsDir, createPluginsDir, createJvmOptionsDir
String subdir = it.name.substring('build'.size()).replaceAll(/[A-Z]/) { '-' + it.toLowerCase() }.substring(1)
destinationDir = file("${subdir}/build/distributions")
@ -116,30 +117,30 @@ Closure commonZipConfig = {
fileMode 0644
}
task buildIntegTestZip(type: Zip) {
tasks.register('buildIntegTestZip', Zip) {
configure(commonZipConfig)
with archiveFiles(transportModulesFiles, 'zip', null, 'x64', true, false)
}
task buildWindowsZip(type: Zip) {
tasks.register('buildWindowsZip', Zip) {
configure(commonZipConfig)
archiveClassifier = 'windows-x86_64'
with archiveFiles(modulesFiles(false, 'windows'), 'zip', 'windows', 'x64', false, true)
}
task buildOssWindowsZip(type: Zip) {
tasks.register('buildOssWindowsZip', Zip) {
configure(commonZipConfig)
archiveClassifier = 'windows-x86_64'
with archiveFiles(modulesFiles(true, 'windows'), 'zip', 'windows', 'x64', true, true)
}
task buildNoJdkWindowsZip(type: Zip) {
tasks.register('buildNoJdkWindowsZip', Zip) {
configure(commonZipConfig)
archiveClassifier = 'no-jdk-windows-x86_64'
with archiveFiles(modulesFiles(false, 'windows'), 'zip', 'windows', 'x64', false, false)
}
task buildOssNoJdkWindowsZip(type: Zip) {
tasks.register('buildOssNoJdkWindowsZip', Zip) {
configure(commonZipConfig)
archiveClassifier = 'no-jdk-windows-x86_64'
with archiveFiles(modulesFiles(true, 'windows'), 'zip', 'windows', 'x64', true, false)
@ -152,61 +153,61 @@ Closure commonTarConfig = {
fileMode 0644
}
task buildDarwinTar(type: SymbolicLinkPreservingTar) {
tasks.register('buildDarwinTar', SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'darwin-x86_64'
with archiveFiles(modulesFiles(false, 'darwin'), 'tar', 'darwin', 'x64', false, true)
}
task buildOssDarwinTar(type: SymbolicLinkPreservingTar) {
tasks.register('buildOssDarwinTar', SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'darwin-x86_64'
with archiveFiles(modulesFiles(true, 'darwin'), 'tar', 'darwin', 'x64', true, true)
}
task buildNoJdkDarwinTar(type: SymbolicLinkPreservingTar) {
tasks.register('buildNoJdkDarwinTar', SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'no-jdk-darwin-x86_64'
with archiveFiles(modulesFiles(false, 'darwin'), 'tar', 'darwin', 'x64', false, false)
}
task buildOssNoJdkDarwinTar(type: SymbolicLinkPreservingTar) {
tasks.register('buildOssNoJdkDarwinTar', SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'no-jdk-darwin-x86_64'
with archiveFiles(modulesFiles(true, 'darwin'), 'tar', 'darwin', 'x64', true, false)
}
task buildLinuxAarch64Tar(type: SymbolicLinkPreservingTar) {
tasks.register('buildLinuxAarch64Tar', SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'linux-aarch64'
with archiveFiles(modulesFiles(false, 'linux'), 'tar', 'linux', 'aarch64', false, true)
}
task buildLinuxTar(type: SymbolicLinkPreservingTar) {
tasks.register('buildLinuxTar', SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'linux-x86_64'
with archiveFiles(modulesFiles(false, 'linux'), 'tar', 'linux', 'x64', false, true)
}
task buildOssLinuxAarch64Tar(type: SymbolicLinkPreservingTar) {
tasks.register('buildOssLinuxAarch64Tar', SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'linux-aarch64'
with archiveFiles(modulesFiles(true, 'linux'), 'tar', 'linux', 'aarch64', true, true)
}
task buildOssLinuxTar(type: SymbolicLinkPreservingTar) {
tasks.register('buildOssLinuxTar', SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'linux-x86_64'
with archiveFiles(modulesFiles(true, 'linux'), 'tar', 'linux', 'x64', true, true)
}
task buildNoJdkLinuxTar(type: SymbolicLinkPreservingTar) {
tasks.register('buildNoJdkLinuxTar', SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'no-jdk-linux-x86_64'
with archiveFiles(modulesFiles(false, 'linux'), 'tar', 'linux', 'x64', false, false)
}
task buildOssNoJdkLinuxTar(type: SymbolicLinkPreservingTar) {
tasks.register('buildOssNoJdkLinuxTar', SymbolicLinkPreservingTar) {
configure(commonTarConfig)
archiveClassifier = 'no-jdk-linux-x86_64'
with archiveFiles(modulesFiles(true, 'linux'), 'tar', 'linux', 'x64', true, false)
@ -225,7 +226,7 @@ subprojects {
apply plugin: 'distribution'
String buildTask = "build${it.name.replaceAll(/-[a-z]/) { it.substring(1).toUpperCase() }.capitalize()}"
ext.buildDist = parent.tasks.getByName(buildTask)
ext.buildDist = parent.tasks.named(buildTask)
artifacts {
'default' buildDist
}
@ -238,24 +239,24 @@ subprojects {
assert project.name.contains('zip')
archiveExtractionDir = new File(buildDir, 'zip-extracted')
}
task checkExtraction(type: LoggedExec) {
def checkExtraction = tasks.register('checkExtraction', LoggedExec) {
dependsOn buildDist
doFirst {
project.delete(archiveExtractionDir)
archiveExtractionDir.mkdirs()
}
}
check.dependsOn checkExtraction
tasks.named('check').configure { dependsOn checkExtraction }
if (project.name.contains('tar')) {
checkExtraction {
checkExtraction.configure {
onlyIf tarExists
commandLine 'tar', '-xvzf', "${-> buildDist.outputs.files.singleFile}", '-C', archiveExtractionDir
commandLine 'tar', '-xvzf', "${-> buildDist.get().outputs.files.singleFile}", '-C', archiveExtractionDir
}
} else {
assert project.name.contains('zip')
checkExtraction {
checkExtraction.configure {
onlyIf unzipExists
commandLine 'unzip', "${-> buildDist.outputs.files.singleFile}", '-d', archiveExtractionDir
commandLine 'unzip', "${-> buildDist.get().outputs.files.singleFile}", '-d', archiveExtractionDir
}
}
@ -268,7 +269,7 @@ subprojects {
}
task checkLicense {
tasks.register('checkLicense') {
dependsOn buildDist, checkExtraction
onlyIf toolExists
doLast {
@ -283,9 +284,9 @@ subprojects {
assertLinesInFile(licensePath, licenseLines)
}
}
check.dependsOn checkLicense
tasks.named('check').configure { dependsOn checkLicense }
task checkNotice {
tasks.register('checkNotice') {
dependsOn buildDist, checkExtraction
onlyIf toolExists
doLast {
@ -294,12 +295,12 @@ subprojects {
assertLinesInFile(noticePath, noticeLines)
}
}
check.dependsOn checkNotice
tasks.named('check').configure { dependsOn checkNotice }
if (project.name == 'zip' || project.name == 'tar') {
project.ext.licenseName = 'Elastic License'
project.ext.licenseUrl = ext.elasticLicenseUrl
task checkMlCppNotice {
def checkMlCppNotice = tasks.register('checkMlCppNotice') {
dependsOn buildDist, checkExtraction
onlyIf toolExists
doLast {
@ -314,7 +315,7 @@ subprojects {
}
}
}
check.dependsOn checkMlCppNotice
tasks.named('check').configure { dependsOn checkMlCppNotice }
}
}
@ -355,7 +356,6 @@ configure(subprojects.findAll { it.name == 'integ-test-zip' }) {
// The integ-test-distribution is published to maven
BuildPlugin.configurePomGeneration(project)
apply plugin: 'nebula.info-scm'
apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm'
@ -366,7 +366,7 @@ configure(subprojects.findAll { it.name == 'integ-test-zip' }) {
publications {
nebula {
artifactId archivesBaseName
artifact buildDist
artifact(buildDist.flatMap { it.archiveFile })
}
/*
* HUGE HACK: the underlying maven publication library refuses to
@ -391,9 +391,9 @@ configure(subprojects.findAll { it.name == 'integ-test-zip' }) {
Node root = xml.asNode()
root.appendNode('name', 'Elasticsearch')
root.appendNode('description', 'A Distributed RESTful Search Engine')
root.appendNode('url', PluginBuildPlugin.urlFromOrigin(project.scminfo.origin))
root.appendNode('url', PluginBuildPlugin.urlFromOrigin(BuildParams.gitOrigin))
Node scmNode = root.appendNode('scm')
scmNode.appendNode('url', project.scminfo.origin)
scmNode.appendNode('url', BuildParams.gitOrigin)
}
}
}

View File

@ -246,6 +246,20 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
apply plugin: 'elasticsearch.jdk-download'
// Setup all required JDKs
project.jdks {
['darwin', 'windows', 'linux'].each { platform ->
(platform == 'linux' ? ['x64', 'aarch64'] : ['x64']).each { architecture ->
"bundled_${platform}_${architecture}" {
it.platform = platform
it.version = VersionProperties.getBundledJdk(platform)
it.vendor = VersionProperties.bundledJdkVendor
it.architecture = architecture
}
}
}
}
// TODO: the map needs to be an input of the tasks, so that when it changes, the task will re-run...
/*****************************************************************************
* Properties to expand when copying packaging files *
@ -392,14 +406,6 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
}
jdkFiles = { Project project, String platform, String architecture ->
project.jdks {
"bundled_${platform}_${architecture}" {
it.platform = platform
it.version = VersionProperties.getBundledJdk(platform)
it.vendor = VersionProperties.bundledJdkVendor
it.architecture = architecture
}
}
return copySpec {
from project.jdks."bundled_${platform}_${architecture}"
exclude "demo/**"
@ -414,7 +420,6 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
}
}
}
}
testClusters {

View File

@ -149,7 +149,7 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
spec.workingDir = checkoutDir
spec.commandLine "git", "checkout", refspec
}
String checkoutHash = GlobalBuildInfoPlugin.gitRevision(checkoutDir)
String checkoutHash = GlobalBuildInfoPlugin.gitInfo(checkoutDir).revision
logger.lifecycle("Checkout hash for ${project.path} is ${checkoutHash}")
file("${project.buildDir}/refspec").text = checkoutHash
}

View File

@ -351,27 +351,27 @@ Closure commonDebConfig(boolean oss, boolean jdk, String architecture) {
}
}
task buildAarch64Deb(type: Deb) {
tasks.register('buildAarch64Deb', Deb) {
configure(commonDebConfig(false, true, 'aarch64'))
}
task buildDeb(type: Deb) {
tasks.register('buildDeb', Deb) {
configure(commonDebConfig(false, true, 'x64'))
}
task buildAarch64OssDeb(type: Deb) {
tasks.register('buildAarch64OssDeb', Deb) {
configure(commonDebConfig(true, true, 'aarch64'))
}
task buildOssDeb(type: Deb) {
tasks.register('buildOssDeb', Deb) {
configure(commonDebConfig(true, true, 'x64'))
}
task buildNoJdkDeb(type: Deb) {
tasks.register('buildNoJdkDeb', Deb) {
configure(commonDebConfig(false, false, 'x64'))
}
task buildOssNoJdkDeb(type: Deb) {
tasks.register('buildOssNoJdkDeb', Deb) {
configure(commonDebConfig(true, false, 'x64'))
}
@ -404,27 +404,27 @@ Closure commonRpmConfig(boolean oss, boolean jdk, String architecture) {
}
}
task buildAarch64Rpm(type: Rpm) {
tasks.register('buildAarch64Rpm', Rpm) {
configure(commonRpmConfig(false, true, 'aarch64'))
}
task buildRpm(type: Rpm) {
tasks.register('buildRpm', Rpm) {
configure(commonRpmConfig(false, true, 'x64'))
}
task buildAarch64OssRpm(type: Rpm) {
tasks.register('buildAarch64OssRpm', Rpm) {
configure(commonRpmConfig(true, true, 'aarch64'))
}
task buildOssRpm(type: Rpm) {
tasks.register('buildOssRpm', Rpm) {
configure(commonRpmConfig(true, true, 'x64'))
}
task buildNoJdkRpm(type: Rpm) {
tasks.register('buildNoJdkRpm', Rpm) {
configure(commonRpmConfig(false, false, 'x64'))
}
task buildOssNoJdkRpm(type: Rpm) {
tasks.register('buildOssNoJdkRpm', Rpm) {
configure(commonRpmConfig(true, false, 'x64'))
}
@ -440,7 +440,7 @@ subprojects {
apply plugin: 'distribution'
String buildTask = "build${it.name.replaceAll(/-[a-z]/) { it.substring(1).toUpperCase() }.capitalize()}"
ext.buildDist = parent.tasks.getByName(buildTask)
ext.buildDist = parent.tasks.named(buildTask)
artifacts {
'default' buildDist
}
@ -456,7 +456,7 @@ subprojects {
assert project.name.contains('rpm')
packageExtractionDir = new File(extractionDir, 'rpm-extracted')
}
task checkExtraction(type: LoggedExec) {
tasks.register('checkExtraction', LoggedExec) {
dependsOn buildDist
doFirst {
project.delete(extractionDir)
@ -468,7 +468,7 @@ subprojects {
if (project.name.contains('deb')) {
checkExtraction {
onlyIf dpkgExists
commandLine 'dpkg-deb', '-x', "${-> buildDist.outputs.files.filter(debFilter).singleFile}", packageExtractionDir
commandLine 'dpkg-deb', '-x', "${-> buildDist.get()outputs.files.filter(debFilter).singleFile}", packageExtractionDir
}
} else {
assert project.name.contains('rpm')
@ -487,7 +487,7 @@ subprojects {
'--relocate',
"/=${packageExtractionDir}",
'-i',
"${-> buildDist.outputs.files.singleFile}"
"${-> buildDist.get().outputs.files.singleFile}"
}
}
@ -550,7 +550,7 @@ subprojects {
}
check.dependsOn checkNotice
task checkLicenseMetadata(type: LoggedExec) {
tasks.register('checkLicenseMetadata', LoggedExec) {
dependsOn buildDist, checkExtraction
}
check.dependsOn checkLicenseMetadata
@ -558,7 +558,7 @@ subprojects {
checkLicenseMetadata { LoggedExec exec ->
onlyIf dpkgExists
final ByteArrayOutputStream output = new ByteArrayOutputStream()
exec.commandLine 'dpkg-deb', '--info', "${-> buildDist.outputs.files.filter(debFilter).singleFile}"
exec.commandLine 'dpkg-deb', '--info', "${-> buildDist.get().outputs.files.filter(debFilter).singleFile}"
exec.standardOutput = output
doLast {
String expectedLicense
@ -594,7 +594,7 @@ subprojects {
checkLicenseMetadata { LoggedExec exec ->
onlyIf rpmExists
final ByteArrayOutputStream output = new ByteArrayOutputStream()
exec.commandLine 'rpm', '-qp', '--queryformat', '%{License}', "${-> buildDist.outputs.files.singleFile}"
exec.commandLine 'rpm', '-qp', '--queryformat', '%{License}', "${-> buildDist.get().outputs.files.singleFile}"
exec.standardOutput = output
doLast {
String license = output.toString('UTF-8')
@ -605,7 +605,7 @@ subprojects {
expectedLicense = "Elastic License"
}
if (license != expectedLicense) {
throw new GradleException("expected license [${expectedLicense}] for [${-> buildDist.outputs.files.singleFile}] but was [${license}]")
throw new GradleException("expected license [${expectedLicense}] for [${-> buildDist.get().outputs.files.singleFile}] but was [${license}]")
}
}
}

View File

@ -90,7 +90,7 @@ project.rootProject.subprojects.findAll { it.parent.path == ':plugins' }.each {
return
}
// Do not install ingest-attachment in a FIPS 140 JVM as this is not supported
if (subproj.path.startsWith(':plugins:ingest-attachment') && Boolean.parseBoolean(System.getProperty("tests.fips.enabled"))) {
if (subproj.path.startsWith(':plugins:ingest-attachment') && BuildParams.inFipsJvm) {
return
}
// FIXME

View File

@ -12,8 +12,6 @@ buildscript {
}
}
apply plugin: org.jetbrains.gradle.ext.IdeaExtPlugin
allprojects {
apply plugin: 'idea'
@ -22,63 +20,67 @@ allprojects {
}
}
tasks.register('configureIdeaGradleJvm') {
group = 'ide'
description = 'Configures the appropriate JVM for Gradle'
// Applying this stuff, particularly the idea-ext plugin, has a cost so avoid it unless we're running in the IDE
if (System.getProperty('idea.active') == 'true') {
apply plugin: org.jetbrains.gradle.ext.IdeaExtPlugin
doLast {
modifyXml('.idea/gradle.xml') { xml ->
def gradleSettings = xml.component.find { it.'@name' == 'GradleSettings' }.option[0].GradleProjectSettings
// Remove configured JVM option to force IntelliJ to use the project JDK for Gradle
gradleSettings.option.findAll { it.'@name' == 'gradleJvm' }.each { it.parent().remove(it) }
tasks.register('configureIdeaGradleJvm') {
group = 'ide'
description = 'Configures the appropriate JVM for Gradle'
doLast {
modifyXml('.idea/gradle.xml') { xml ->
def gradleSettings = xml.component.find { it.'@name' == 'GradleSettings' }.option[0].GradleProjectSettings
// Remove configured JVM option to force IntelliJ to use the project JDK for Gradle
gradleSettings.option.findAll { it.'@name' == 'gradleJvm' }.each { it.parent().remove(it) }
}
}
}
}
idea {
project {
vcs = 'Git'
jdkName = '13'
idea {
project {
vcs = 'Git'
jdkName = '13'
settings {
delegateActions {
delegateBuildRunToGradle = false
testRunner = 'choose_per_test'
}
taskTriggers {
afterSync tasks.named('configureIdeaGradleJvm')
}
codeStyle {
java {
classCountToUseImportOnDemand = 999
settings {
delegateActions {
delegateBuildRunToGradle = false
testRunner = 'choose_per_test'
}
}
encodings {
encoding = 'UTF-8'
}
compiler {
parallelCompilation = true
javac {
generateDeprecationWarnings = false
taskTriggers {
afterSync tasks.named('configureIdeaGradleJvm')
}
}
runConfigurations {
'Debug Elasticsearch'(Remote) {
mode = 'listen'
host = 'localhost'
port = 5005
codeStyle {
java {
classCountToUseImportOnDemand = 999
}
}
defaults(JUnit) {
vmParameters = '-ea -Djava.locale.providers=SPI,COMPAT'
encodings {
encoding = 'UTF-8'
}
}
copyright {
useDefault = 'Apache'
scopes = ['x-pack': 'Elastic']
profiles {
Apache {
keyword = 'Licensed to Elasticsearch under one or more contributor'
notice = '''\
compiler {
parallelCompilation = true
javac {
generateDeprecationWarnings = false
}
}
runConfigurations {
'Debug Elasticsearch'(Remote) {
mode = 'listen'
host = 'localhost'
port = 5005
}
defaults(JUnit) {
vmParameters = '-ea -Djava.locale.providers=SPI,COMPAT'
}
}
copyright {
useDefault = 'Apache'
scopes = ['x-pack': 'Elastic']
profiles {
Apache {
keyword = 'Licensed to Elasticsearch under one or more contributor'
notice = '''\
Licensed to Elasticsearch under one or more contributor
license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright
@ -95,13 +97,14 @@ idea {
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.'''.stripIndent()
}
Elastic {
keyword = 'Licensed under the Elastic License'
notice = '''\
}
Elastic {
keyword = 'Licensed under the Elastic License'
notice = '''\
Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
or more contributor license agreements. Licensed under the Elastic License;
you may not use this file except in compliance with the Elastic License.'''.stripIndent()
}
}
}
}

View File

@ -18,11 +18,14 @@
*/
import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.Architecture
import org.elasticsearch.gradle.OS
import org.elasticsearch.gradle.info.BuildParams
import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
apply plugin: 'elasticsearch.test-with-dependencies'
apply plugin: 'elasticsearch.jdk-download'
esplugin {
description 'The Reindex module adds APIs to reindex from one index to another or update documents in place.'
@ -95,6 +98,15 @@ dependencies {
es090 'org.elasticsearch:elasticsearch:0.90.13@zip'
}
jdks {
legacy {
vendor = 'adoptopenjdk'
version = '8u242+b08'
platform = OS.current().name().toLowerCase()
architecture = Architecture.current().name().toLowerCase()
}
}
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
logger.warn("Disabling reindex-from-old tests because we can't get the pid file on windows")
integTest.runner {
@ -127,11 +139,11 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
}
Task fixture = task("oldEs${version}Fixture", type: org.elasticsearch.gradle.test.AntFixture) {
dependsOn project.configurations.oldesFixture
dependsOn project.configurations.oldesFixture, jdks.legacy
dependsOn unzip
executable = "${BuildParams.runtimeJavaHome}/bin/java"
env 'CLASSPATH', "${-> project.configurations.oldesFixture.asPath}"
env 'JAVA_HOME', getJavaHome(it, 8)
env 'JAVA_HOME', jdks.legacy.javaHomePath
args 'oldes.OldElasticsearch',
baseDir,
unzip.temporaryDir,

View File

@ -60,8 +60,9 @@ for (Version bwcVersion : bwcVersions.indexCompatible) {
systemProperty 'tests.is_old_cluster', 'false'
}
String oldVersion = bwcVersion.toString().minus("-SNAPSHOT")
tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach {
it.systemProperty 'tests.old_cluster_version', bwcVersion.toString().minus("-SNAPSHOT")
it.systemProperty 'tests.old_cluster_version', oldVersion
it.systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")

View File

@ -72,7 +72,6 @@ for (Version bwcVersion : bwcVersions.indexCompatible) {
project.delete("${buildDir}/cluster/shared/repo/${baseName}")
}
systemProperty 'tests.is_old_cluster', 'true'
systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT")
exclude 'org/elasticsearch/upgrades/FullClusterRestartIT.class'
exclude 'org/elasticsearch/upgrades/FullClusterRestartSettingsUpgradeIT.class'
exclude 'org/elasticsearch/upgrades/QueryBuilderBWCIT.class'
@ -87,14 +86,14 @@ for (Version bwcVersion : bwcVersions.indexCompatible) {
testClusters."${baseName}".goToNextVersion()
}
systemProperty 'tests.is_old_cluster', 'false'
systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT")
exclude 'org/elasticsearch/upgrades/FullClusterRestartIT.class'
exclude 'org/elasticsearch/upgrades/FullClusterRestartSettingsUpgradeIT.class'
exclude 'org/elasticsearch/upgrades/QueryBuilderBWCIT.class'
}
String oldVersion = bwcVersion.toString().minus("-SNAPSHOT")
tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach {
it.systemProperty 'tests.old_cluster_version', bwcVersion.toString().minus("-SNAPSHOT")
it.systemProperty 'tests.old_cluster_version', oldVersion
it.systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")

View File

@ -36,7 +36,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
}
String oldVersion = bwcVersion.toString().replace('-SNAPSHOT', '')
tasks.register("${baseName}#oneThirdUpgradedTest", RestTestRunnerTask) {
dependsOn "${baseName}#oldClusterTest"
useCluster testClusters."${baseName}"
@ -47,7 +47,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.first_round', 'true'
systemProperty 'tests.upgrade_from_version', bwcVersion.toString().replace('-SNAPSHOT', '')
systemProperty 'tests.upgrade_from_version', oldVersion
}
tasks.register("${baseName}#twoThirdsUpgradedTest", RestTestRunnerTask) {
@ -60,7 +60,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.first_round', 'false'
systemProperty 'tests.upgrade_from_version', bwcVersion.toString().replace('-SNAPSHOT', '')
systemProperty 'tests.upgrade_from_version', oldVersion
}
tasks.register("${baseName}#upgradedClusterTest", RestTestRunnerTask) {
@ -72,7 +72,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
systemProperty 'tests.rest.suite', 'upgraded_cluster'
systemProperty 'tests.upgrade_from_version', bwcVersion.toString().replace('-SNAPSHOT', '')
systemProperty 'tests.upgrade_from_version', oldVersion
}
tasks.register(bwcTaskName(bwcVersion)) {

View File

@ -32,7 +32,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
setting 'xpack.license.self_generated.type', 'trial'
}
tasks.withType(RestTestRunnerTask).matching { it.name.startsWith(baseName) }.configureEach {
tasks.withType(RestTestRunnerTask).matching { it.name.startsWith(baseName) }.all {
useCluster testClusters."${baseName}-leader"
useCluster testClusters."${baseName}-follower"
systemProperty 'tests.upgrade_from_version', bwcVersion.toString().replace('-SNAPSHOT', '')
@ -54,27 +54,27 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
for (kind in ["follower", "leader"]) {
// Attention!! Groovy trap: do not pass `kind` to a closure
tasks.register("${baseName}#${kind}#clusterTest", RestTestRunnerTask) {
tasks.create("${baseName}#${kind}#clusterTest", RestTestRunnerTask) {
systemProperty 'tests.rest.upgrade_state', 'none'
systemProperty 'tests.rest.cluster_name', kind
ext.kindExt = kind
}
tasks.register("${baseName}#${kind}#oneThirdUpgradedTest", RestTestRunnerTask) {
tasks.create("${baseName}#${kind}#oneThirdUpgradedTest", RestTestRunnerTask) {
systemProperty 'tests.rest.upgrade_state', 'one_third'
systemProperty 'tests.rest.cluster_name', kind
dependsOn "${baseName}#leader#clusterTest", "${baseName}#follower#clusterTest"
ext.kindExt = kind
}
tasks.register("${baseName}#${kind}#twoThirdsUpgradedTest", RestTestRunnerTask) {
tasks.create("${baseName}#${kind}#twoThirdsUpgradedTest", RestTestRunnerTask) {
systemProperty 'tests.rest.upgrade_state', 'two_third'
systemProperty 'tests.rest.cluster_name', kind
dependsOn "${baseName}#${kind}#oneThirdUpgradedTest"
ext.kindExt = kind
}
tasks.register("${baseName}#${kind}#upgradedClusterTest", RestTestRunnerTask) {
tasks.create("${baseName}#${kind}#upgradedClusterTest", RestTestRunnerTask) {
systemProperty 'tests.rest.upgrade_state', 'all'
systemProperty 'tests.rest.cluster_name', kind
dependsOn "${baseName}#${kind}#twoThirdsUpgradedTest"

View File

@ -49,7 +49,6 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
setting 'xpack.security.audit.enabled', 'true'
setting 'xpack.security.transport.ssl.key', 'testnode.pem'
setting 'xpack.security.transport.ssl.certificate', 'testnode.crt'
keystore 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode'
if (bwcVersion.onOrAfter('7.0.0')) {
@ -82,26 +81,26 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
if (bwcVersion.before('5.6.9') || (bwcVersion.onOrAfter('6.0.0') && bwcVersion.before('6.2.4'))) {
jvmArgs '-da:org.elasticsearch.xpack.monitoring.exporter.http.HttpExportBulk'
}
setting 'logger.org.elasticsearch.xpack.watcher', 'DEBUG'
}
}
String oldVersion = bwcVersion.toString().replace('-SNAPSHOT', '')
tasks.register("${baseName}#oldClusterTest", RestTestRunnerTask) {
useCluster testClusters."${baseName}"
mustRunAfter(precommit)
dependsOn copyTestNodeKeyMaterial
systemProperty 'tests.rest.suite', 'old_cluster'
systemProperty 'tests.upgrade_from_version', bwcVersion.toString().replace('-SNAPSHOT', '')
systemProperty 'tests.upgrade_from_version', oldVersion
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
def toBlackList = []
// Dataframe transforms were not added until 7.2.0
if (bwcVersion.before('7.2.0')) {
if (Version.fromString(oldVersion).before('7.2.0')) {
toBlackList << 'old_cluster/80_transform_jobs_crud/Test put batch transform on old cluster'
}
// continuous Dataframe transforms were not added until 7.3.0
if (bwcVersion.before('7.3.0')) {
if (Version.fromString(oldVersion).before('7.3.0')) {
toBlackList << 'old_cluster/80_transform_jobs_crud/Test put continuous transform on old cluster'
}
if (!toBlackList.empty) {
@ -119,7 +118,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.first_round', 'true'
systemProperty 'tests.upgrade_from_version', bwcVersion.toString().replace('-SNAPSHOT', '')
systemProperty 'tests.upgrade_from_version', oldVersion
// We only need to run these tests once so we may as well do it when we're two thirds upgraded
def toBlackList = [
'mixed_cluster/10_basic/Start scroll in mixed cluster on upgraded node that we will continue after upgrade',
@ -130,7 +129,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
'mixed_cluster/80_transform_jobs_crud/Test put continuous transform on mixed cluster',
]
// transform in mixed cluster is effectively disabled till 7.4, see gh#48019
if (bwcVersion.before('7.4.0')) {
if (Version.fromString(oldVersion).before('7.4.0')) {
toBlackList.addAll([
'mixed_cluster/80_transform_jobs_crud/Test GET, start, and stop old cluster batch transforms',
'mixed_cluster/80_transform_jobs_crud/Test GET, stop, start, old continuous transforms'
@ -151,7 +150,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
systemProperty 'tests.first_round', 'false'
def toBlackList = []
// transform in mixed cluster is effectively disabled till 7.4, see gh#48019
if (bwcVersion.before('7.4.0')) {
if (Version.fromString(oldVersion).before('7.4.0')) {
toBlackList.addAll([
'mixed_cluster/80_transform_jobs_crud/Test put batch transform on mixed cluster',
'mixed_cluster/80_transform_jobs_crud/Test GET, start, and stop old cluster batch transforms',
@ -162,7 +161,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
if (!toBlackList.empty) {
systemProperty 'tests.rest.blacklist', toBlackList.join(',')
}
systemProperty 'tests.upgrade_from_version', bwcVersion.toString().replace('-SNAPSHOT', '')
systemProperty 'tests.upgrade_from_version', oldVersion
}
tasks.register("${baseName}#upgradedClusterTest", RestTestRunnerTask) {
@ -171,22 +170,21 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
doFirst {
testClusters."${baseName}".nextNodeToNextVersion()
}
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
systemProperty 'tests.rest.suite', 'upgraded_cluster'
systemProperty 'tests.upgrade_from_version', bwcVersion.toString().replace('-SNAPSHOT', '')
systemProperty 'tests.upgrade_from_version', oldVersion
def toBlackList = []
// Dataframe transforms were not added until 7.2.0
if (bwcVersion.before('7.2.0')) {
if (Version.fromString(oldVersion).before('7.2.0')) {
toBlackList << 'upgraded_cluster/80_transform_jobs_crud/Get start, stop, and delete old cluster batch transform'
}
// continuous Dataframe transforms were not added until 7.3.0
if (bwcVersion.before('7.3.0')) {
if (Version.fromString(oldVersion).before('7.3.0')) {
toBlackList << 'upgraded_cluster/80_transform_jobs_crud/Test GET, stop, delete, old continuous transforms'
}
// transform in mixed cluster is effectively disabled till 7.4, see gh#48019
if (bwcVersion.before('7.4.0')) {
if (Version.fromString(oldVersion).before('7.4.0')) {
toBlackList << 'upgraded_cluster/80_transform_jobs_crud/Get start, stop mixed cluster batch transform'
toBlackList << 'upgraded_cluster/80_transform_jobs_crud/Test GET, mixed continuous transforms'
}