Replace immediate task creations by using task avoidance api (#60071) (#60504)

- Replace immediate task creations by using task avoidance api
- One step closer to #56610
- Still many tasks are created during configuration phase. Tackled in separate steps
This commit is contained in:
Rene Groeschke 2020-07-31 13:09:04 +02:00 committed by GitHub
parent a721d6d19b
commit ed4b70190b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
57 changed files with 515 additions and 481 deletions

View File

@ -24,6 +24,7 @@ import org.elasticsearch.gradle.VersionProperties
import org.gradle.api.Plugin import org.gradle.api.Plugin
import org.gradle.api.Project import org.gradle.api.Project
import org.gradle.api.Task import org.gradle.api.Task
import org.gradle.api.tasks.TaskProvider
/** /**
* Sets up tests for documentation. * Sets up tests for documentation.
@ -42,7 +43,7 @@ class DocsTestPlugin implements Plugin<Project> {
project.testClusters.integTest.nameCustomization = { it.replace("integTest", "node") } project.testClusters.integTest.nameCustomization = { it.replace("integTest", "node") }
// Docs are published separately so no need to assemble // Docs are published separately so no need to assemble
project.tasks.assemble.enabled = false project.tasks.assemble.enabled = false
Map<String, String> defaultSubstitutions = [ Map<String, String> commonDefaultSubstitutions = [
/* These match up with the asciidoc syntax for substitutions but /* These match up with the asciidoc syntax for substitutions but
* the values may differ. In particular {version} needs to resolve * the values may differ. In particular {version} needs to resolve
* to the version being built for testing but needs to resolve to * to the version being built for testing but needs to resolve to
@ -53,26 +54,26 @@ class DocsTestPlugin implements Plugin<Project> {
'\\{build_flavor\\}' : distribution, '\\{build_flavor\\}' : distribution,
'\\{build_type\\}' : OS.conditionalString().onWindows({"zip"}).onUnix({"tar"}).supply(), '\\{build_type\\}' : OS.conditionalString().onWindows({"zip"}).onUnix({"tar"}).supply(),
] ]
Task listSnippets = project.tasks.create('listSnippets', SnippetsTask) project.tasks.register('listSnippets', SnippetsTask) {
listSnippets.group 'Docs' group 'Docs'
listSnippets.description 'List each snippet' description 'List each snippet'
listSnippets.defaultSubstitutions = defaultSubstitutions defaultSubstitutions = commonDefaultSubstitutions
listSnippets.perSnippet { println(it.toString()) } perSnippet { println(it.toString()) }
}
Task listConsoleCandidates = project.tasks.create( project.tasks.register('listConsoleCandidates', SnippetsTask) {
'listConsoleCandidates', SnippetsTask) group 'Docs'
listConsoleCandidates.group 'Docs' description
listConsoleCandidates.description 'List snippets that probably should be marked // CONSOLE'
'List snippets that probably should be marked // CONSOLE' defaultSubstitutions = commonDefaultSubstitutions
listConsoleCandidates.defaultSubstitutions = defaultSubstitutions perSnippet {
listConsoleCandidates.perSnippet { if (RestTestsFromSnippetsTask.isConsoleCandidate(it)) {
if (RestTestsFromSnippetsTask.isConsoleCandidate(it)) { println(it.toString())
println(it.toString()) }
} }
} }
Task buildRestTests = project.tasks.create( project.tasks.register('buildRestTests', RestTestsFromSnippetsTask) {
'buildRestTests', RestTestsFromSnippetsTask) defaultSubstitutions = commonDefaultSubstitutions
buildRestTests.defaultSubstitutions = defaultSubstitutions }
} }
} }

View File

@ -67,7 +67,9 @@ class PluginBuildPlugin implements Plugin<Project> {
createIntegTestTask(project) createIntegTestTask(project)
createBundleTasks(project, extension) createBundleTasks(project, extension)
project.tasks.integTest.dependsOn(project.tasks.bundlePlugin) project.tasks.named("integTest").configure {
it.dependsOn(project.tasks.named("bundlePlugin"))
}
if (isModule) { if (isModule) {
project.testClusters.integTest.module(project.tasks.bundlePlugin.archiveFile) project.testClusters.integTest.module(project.tasks.bundlePlugin.archiveFile)
} else { } else {
@ -80,7 +82,7 @@ class PluginBuildPlugin implements Plugin<Project> {
if (project.findProject(":modules:${pluginName}") != null) { if (project.findProject(":modules:${pluginName}") != null) {
project.integTest.dependsOn(project.project(":modules:${pluginName}").tasks.bundlePlugin) project.integTest.dependsOn(project.project(":modules:${pluginName}").tasks.bundlePlugin)
project.testClusters.integTest.module( project.testClusters.integTest.module(
project.project(":modules:${pluginName}").tasks.bundlePlugin.archiveFile project.project(":modules:${pluginName}").tasks.bundlePlugin.archiveFile
) )
} }
} }
@ -101,15 +103,15 @@ class PluginBuildPlugin implements Plugin<Project> {
} }
Map<String, String> properties = [ Map<String, String> properties = [
'name' : extension1.name, 'name' : extension1.name,
'description' : extension1.description, 'description' : extension1.description,
'version' : extension1.version, 'version' : extension1.version,
'elasticsearchVersion': Version.fromString(VersionProperties.elasticsearch).toString(), 'elasticsearchVersion': Version.fromString(VersionProperties.elasticsearch).toString(),
'javaVersion' : project.targetCompatibility as String, 'javaVersion' : project.targetCompatibility as String,
'classname' : extension1.classname, 'classname' : extension1.classname,
'extendedPlugins' : extension1.extendedPlugins.join(','), 'extendedPlugins' : extension1.extendedPlugins.join(','),
'hasNativeController' : extension1.hasNativeController, 'hasNativeController' : extension1.hasNativeController,
'requiresKeystore' : extension1.requiresKeystore 'requiresKeystore' : extension1.requiresKeystore
] ]
project.tasks.named('pluginProperties').configure { project.tasks.named('pluginProperties').configure {
expand(properties) expand(properties)
@ -142,7 +144,7 @@ class PluginBuildPlugin implements Plugin<Project> {
} }
} }
project.configurations.getByName('default') project.configurations.getByName('default')
.extendsFrom(project.configurations.getByName('runtimeClasspath')) .extendsFrom(project.configurations.getByName('runtimeClasspath'))
// allow running ES with this plugin in the foreground of a build // allow running ES with this plugin in the foreground of a build
project.tasks.register('run', RunTask) { project.tasks.register('run', RunTask) {
dependsOn(project.tasks.bundlePlugin) dependsOn(project.tasks.bundlePlugin)
@ -235,7 +237,7 @@ class PluginBuildPlugin implements Plugin<Project> {
*/ */
from { project.plugins.hasPlugin(ShadowPlugin) ? project.shadowJar : project.jar } from { project.plugins.hasPlugin(ShadowPlugin) ? project.shadowJar : project.jar }
from project.configurations.runtimeClasspath - project.configurations.getByName( from project.configurations.runtimeClasspath - project.configurations.getByName(
CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME
) )
// extra files for the plugin to go into the zip // extra files for the plugin to go into the zip
from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging

View File

@ -62,7 +62,7 @@ class StandaloneRestTestPlugin implements Plugin<Project> {
project.pluginManager.apply(TestClustersPlugin) project.pluginManager.apply(TestClustersPlugin)
project.pluginManager.apply(RepositoriesSetupPlugin) project.pluginManager.apply(RepositoriesSetupPlugin)
project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask) project.getTasks().register("buildResources", ExportElasticsearchBuildResourcesTask)
ElasticsearchJavaPlugin.configureTestTasks(project) ElasticsearchJavaPlugin.configureTestTasks(project)
ElasticsearchJavaPlugin.configureInputNormalization(project) ElasticsearchJavaPlugin.configureInputNormalization(project)
ElasticsearchJavaPlugin.configureCompile(project) ElasticsearchJavaPlugin.configureCompile(project)

View File

@ -114,12 +114,15 @@ public class TestClustersPlugin implements Plugin<Project> {
} }
private void createListClustersTask(Project project, NamedDomainObjectContainer<ElasticsearchCluster> container) { private void createListClustersTask(Project project, NamedDomainObjectContainer<ElasticsearchCluster> container) {
Task listTask = project.getTasks().create(LIST_TASK_NAME); // Task is never up to date so we can pass an lambda for the task action
listTask.setGroup("ES cluster formation"); project.getTasks().register(LIST_TASK_NAME, task -> {
listTask.setDescription("Lists all ES clusters configured for this project"); task.setGroup("ES cluster formation");
listTask.doLast( task.setDescription("Lists all ES clusters configured for this project");
(Task task) -> container.forEach(cluster -> logger.lifecycle(" * {}: {}", cluster.getName(), cluster.getNumberOfNodes())) task.doLast(
); (Task t) -> container.forEach(cluster -> logger.lifecycle(" * {}: {}", cluster.getName(), cluster.getNumberOfNodes()))
);
});
} }
static class TestClustersHookPlugin implements Plugin<Project> { static class TestClustersHookPlugin implements Plugin<Project> {

View File

@ -54,23 +54,23 @@ tasks.register("generateDependenciesReport", ConcatFilesTask) {
*****************************************************************************/ *****************************************************************************/
// integ test zip only uses server, so a different notice file is needed there // integ test zip only uses server, so a different notice file is needed there
task buildServerNotice(type: NoticeTask) tasks.register("buildServerNotice", NoticeTask)
// other distributions include notices from modules as well, which are added below later // other distributions include notices from modules as well, which are added below later
task buildDefaultNotice(type: NoticeTask) { tasks.register("buildDefaultNotice", NoticeTask).configure {
licensesDir new File(project(':distribution').projectDir, 'licenses') licensesDir new File(project(':distribution').projectDir, 'licenses')
} }
task buildOssNotice(type: NoticeTask) { tasks.register("buildOssNotice", NoticeTask).configure {
licensesDir new File(project(':distribution').projectDir, 'licenses') licensesDir new File(project(':distribution').projectDir, 'licenses')
} }
task buildDefaultNoJdkNotice(type: NoticeTask) tasks.register("buildDefaultNoJdkNotice", NoticeTask)
task buildOssNoJdkNotice(type: NoticeTask) tasks.register("buildOssNoJdkNotice", NoticeTask)
// The :server and :libs projects belong to all distributions // The :server and :libs projects belong to all distributions
tasks.withType(NoticeTask) { tasks.withType(NoticeTask).configureEach {
licensesDir project(':server').file('licenses') licensesDir project(':server').file('licenses')
source project(':server').file('src/main/java') source project(':server').file('src/main/java')
project(':libs').subprojects.each { Project lib -> project(':libs').subprojects.each { Project lib ->

View File

@ -42,8 +42,9 @@ BuildParams.bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInf
String bwcBranch = unreleasedVersion.branch String bwcBranch = unreleasedVersion.branch
apply plugin: 'distribution' apply plugin: 'distribution'
// Not published so no need to assemble // Not published so no need to assemble
assemble.enabled = false tasks.named("assemble").configure {
enabled = false
}
File checkoutDir = file("${buildDir}/bwc/checkout-${bwcBranch}") File checkoutDir = file("${buildDir}/bwc/checkout-${bwcBranch}")
final String remote = System.getProperty("bwc.remote", "elastic") final String remote = System.getProperty("bwc.remote", "elastic")
@ -58,13 +59,13 @@ BuildParams.bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInf
throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]") throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]")
} }
task createClone(type: LoggedExec) { tasks.register("createClone", LoggedExec) {
onlyIf { checkoutDir.exists() == false } onlyIf { checkoutDir.exists() == false }
commandLine = ['git', 'clone', rootDir, checkoutDir] commandLine = ['git', 'clone', rootDir, checkoutDir]
} }
task findRemote(type: LoggedExec) { tasks.register("findRemote", LoggedExec) {
dependsOn createClone dependsOn "createClone"
workingDir = checkoutDir workingDir = checkoutDir
commandLine = ['git', 'remote', '-v'] commandLine = ['git', 'remote', '-v']
ByteArrayOutputStream output = new ByteArrayOutputStream() ByteArrayOutputStream output = new ByteArrayOutputStream()
@ -79,16 +80,16 @@ BuildParams.bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInf
} }
} }
task addRemote(type: LoggedExec) { tasks.register("addRemote", LoggedExec) {
dependsOn findRemote dependsOn findRemote
onlyIf { project.ext.remoteExists == false } onlyIf { project.ext.remoteExists == false }
workingDir = checkoutDir workingDir = checkoutDir
commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"] commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"]
} }
task fetchLatest(type: LoggedExec) { tasks.register("fetchLatest", LoggedExec) {
onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest } onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest }
dependsOn addRemote dependsOn("addRemote")
workingDir = checkoutDir workingDir = checkoutDir
commandLine = ['git', 'fetch', '--all'] commandLine = ['git', 'fetch', '--all']
} }
@ -104,8 +105,8 @@ BuildParams.bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInf
return os.toString().trim() return os.toString().trim()
} }
} }
task checkoutBwcBranch() { tasks.register("checkoutBwcBranch") {
dependsOn fetchLatest dependsOn("fetchLatest")
doLast { doLast {
def refspec = System.getProperty("bwc.refspec." + bwcBranch) ?: System.getProperty("tests.bwc.refspec." + bwcBranch) ?: "${remote}/${bwcBranch}" def refspec = System.getProperty("bwc.refspec." + bwcBranch) ?: System.getProperty("tests.bwc.refspec." + bwcBranch) ?: "${remote}/${bwcBranch}"
if (System.getProperty("bwc.checkout.align") != null) { if (System.getProperty("bwc.checkout.align") != null) {
@ -156,9 +157,9 @@ BuildParams.bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInf
} }
Closure createRunBwcGradleTask = { name, extraConfig -> Closure<TaskProvider> createRunBwcGradleTask = { name, extraConfig ->
return tasks.register("$name", LoggedExec) { return tasks.register("$name", LoggedExec) {
dependsOn checkoutBwcBranch dependsOn "checkoutBwcBranch"
spoolOutput = true spoolOutput = true
workingDir = checkoutDir workingDir = checkoutDir
doFirst { doFirst {
@ -304,7 +305,9 @@ BuildParams.bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInf
Version currentVersion = Version.fromString(version) Version currentVersion = Version.fromString(version)
if (currentVersion.getMinor() == 0 && currentVersion.getRevision() == 0) { if (currentVersion.getMinor() == 0 && currentVersion.getRevision() == 0) {
// We only want to resolve dependencies for live versions of master, without cascading this to older versions // We only want to resolve dependencies for live versions of master, without cascading this to older versions
resolveAllDependencies.dependsOn resolveAllBwcDependencies tasks.named("resolveAllDependencies").configure {
dependsOn("resolveAllBwcDependencies")
}
} }
for (e in artifactFiles) { for (e in artifactFiles) {
@ -326,7 +329,9 @@ BuildParams.bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInf
} }
} }
// make sure no dependencies were added to assemble; we want it to be a no-op // make sure no dependencies were added to assemble; we want it to be a no-op
assemble.dependsOn = [] tasks.named("assemble").configure {
dependsOn = []
}
} }
} }

View File

@ -109,7 +109,7 @@ project.ext {
} }
void addCopyDockerContextTask(final String architecture, final boolean oss) { void addCopyDockerContextTask(final String architecture, final boolean oss) {
task(taskName("copy", architecture, oss, "DockerContext"), type: Sync) { tasks.register(taskName("copy", architecture, oss, "DockerContext"), Sync) {
expansions(architecture, oss, true).findAll { it.key != 'build_date' }.each { k, v -> expansions(architecture, oss, true).findAll { it.key != 'build_date' }.each { k, v ->
inputs.property(k, { v.toString() }) inputs.property(k, { v.toString() })
} }
@ -143,7 +143,7 @@ def createAndSetWritable(Object... locations) {
} }
} }
task copyKeystore(type: Sync) { tasks.register("copyKeystore", Sync) {
from project(':x-pack:plugin:core') from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
into "${buildDir}/certs" into "${buildDir}/certs"
@ -167,9 +167,9 @@ elasticsearch_distributions {
} }
} }
preProcessFixture { tasks.named("preProcessFixture").configure {
dependsOn elasticsearch_distributions.docker_default, elasticsearch_distributions.docker_oss dependsOn elasticsearch_distributions.docker_default, elasticsearch_distributions.docker_oss
dependsOn copyKeystore dependsOn "copyKeystore"
doLast { doLast {
// tests expect to have an empty repo // tests expect to have an empty repo
project.delete( project.delete(
@ -187,21 +187,25 @@ preProcessFixture {
} }
} }
processTestResources { tasks.named("processTestResources").configure {
from project(':x-pack:plugin:core') from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
} }
task integTest(type: Test) { tasks.register("integTest", Test) {
outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true } outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
maxParallelForks = '1' maxParallelForks = '1'
include '**/*IT.class' include '**/*IT.class'
} }
check.dependsOn integTest tasks.named("check").configure {
dependsOn "integTest"
}
void addBuildDockerImage(final String architecture, final boolean oss) { void addBuildDockerImage(final String architecture, final boolean oss) {
final Task buildDockerImageTask = task(taskName("build", architecture, oss, "DockerImage"), type: DockerBuildTask) { final TaskProvider<DockerBuildTask> buildDockerImageTask =
tasks.register(taskName("build", architecture, oss, "DockerImage"), DockerBuildTask) {
onlyIf { Architecture.current().name().toLowerCase().equals(architecture) }
TaskProvider<Sync> copyContextTask = tasks.named(taskName("copy", architecture, oss, "DockerContext")) TaskProvider<Sync> copyContextTask = tasks.named(taskName("copy", architecture, oss, "DockerContext"))
dependsOn(copyContextTask) dependsOn(copyContextTask)
dockerContext.fileProvider(copyContextTask.map { it.destinationDir }) dockerContext.fileProvider(copyContextTask.map { it.destinationDir })
@ -220,8 +224,9 @@ void addBuildDockerImage(final String architecture, final boolean oss) {
] ]
} }
} }
buildDockerImageTask.onlyIf { Architecture.current().name().toLowerCase().equals(architecture) } tasks.named("assemble").configure {
assemble.dependsOn(buildDockerImageTask) dependsOn(buildDockerImageTask)
}
} }
for (final String architecture : ["aarch64", "x64"]) { for (final String architecture : ["aarch64", "x64"]) {
@ -253,7 +258,7 @@ subprojects { Project subProject ->
def buildTaskName = taskName("build", architecture, oss, "DockerImage") def buildTaskName = taskName("build", architecture, oss, "DockerImage")
def tarFile = "${parent.projectDir}/build/elasticsearch${"aarch64".equals(architecture) ? '-aarch64' : ''}${oss ? '-oss' : ''}_test.${VersionProperties.elasticsearch}.docker.tar" def tarFile = "${parent.projectDir}/build/elasticsearch${"aarch64".equals(architecture) ? '-aarch64' : ''}${oss ? '-oss' : ''}_test.${VersionProperties.elasticsearch}.docker.tar"
final Task exportDockerImageTask = task(exportTaskName, type: LoggedExec) { final TaskProvider<LoggedExec> exportDockerImageTask = tasks.register(exportTaskName, LoggedExec) {
inputs.file("${parent.projectDir}/build/markers/${buildTaskName}.marker") inputs.file("${parent.projectDir}/build/markers/${buildTaskName}.marker")
executable 'docker' executable 'docker'
outputs.file(tarFile) outputs.file(tarFile)
@ -261,18 +266,19 @@ subprojects { Project subProject ->
"-o", "-o",
tarFile, tarFile,
"elasticsearch${oss ? '-oss' : ''}:test" "elasticsearch${oss ? '-oss' : ''}:test"
dependsOn(parent.path + ":" + buildTaskName)
onlyIf { Architecture.current().name().toLowerCase().equals(architecture) }
} }
exportDockerImageTask.dependsOn(parent.tasks.getByName(buildTaskName))
exportDockerImageTask.onlyIf { Architecture.current().name().toLowerCase().equals(architecture) }
artifacts.add('default', file(tarFile)) { artifacts.add('default', file(tarFile)) {
type 'tar' type 'tar'
name "elasticsearch${"aarch64".equals(architecture) ? '-aarch64' : ''}${oss ? '-oss' : ''}" name "elasticsearch${"aarch64".equals(architecture) ? '-aarch64' : ''}${oss ? '-oss' : ''}"
builtBy exportTaskName builtBy exportTaskName
} }
assemble.dependsOn exportTaskName tasks.named("assemble").configure {
dependsOn(exportTaskName)
}
} }
} }

View File

@ -1,6 +1,6 @@
apply plugin: 'base' apply plugin: 'base'
task buildDockerBuildContext(type: Tar) { tasks.register("buildDockerBuildContext", Tar) {
archiveExtension = 'tar.gz' archiveExtension = 'tar.gz'
compression = Compression.GZIP compression = Compression.GZIP
archiveClassifier = "docker-build-context" archiveClassifier = "docker-build-context"
@ -10,4 +10,4 @@ task buildDockerBuildContext(type: Tar) {
with dockerBuildContext("<remote>", false, false) with dockerBuildContext("<remote>", false, false)
} }
assemble.dependsOn buildDockerBuildContext tasks.named("assemble").configure {dependsOn "buildDockerBuildContext"}

View File

@ -1,6 +1,6 @@
apply plugin: 'base' apply plugin: 'base'
task buildOssDockerBuildContext(type: Tar) { tasks.register("buildOssDockerBuildContext", Tar) {
archiveExtension = 'tar.gz' archiveExtension = 'tar.gz'
compression = Compression.GZIP compression = Compression.GZIP
archiveClassifier = "docker-build-context" archiveClassifier = "docker-build-context"
@ -10,4 +10,4 @@ task buildOssDockerBuildContext(type: Tar) {
with dockerBuildContext("<remote>", true, false) with dockerBuildContext("<remote>", true, false)
} }
assemble.dependsOn buildOssDockerBuildContext tasks.named("assemble").configure { dependsOn "buildOssDockerBuildContext" }

View File

@ -59,7 +59,7 @@ void addProcessFilesTask(String type, boolean oss, boolean jdk) {
String packagingFiles = "build/packaging/${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}" String packagingFiles = "build/packaging/${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}"
String taskName = "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files" String taskName = "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files"
task(taskName, type: Copy) { tasks.register(taskName, Copy) {
into packagingFiles into packagingFiles
with copySpec { with copySpec {
@ -464,15 +464,15 @@ subprojects {
} }
} }
check.dependsOn checkExtraction tasks.named("check").configure { dependsOn "checkExtraction" }
if (project.name.contains('deb')) { if (project.name.contains('deb')) {
checkExtraction { tasks.named("checkExtraction").configure {
onlyIf dpkgExists onlyIf dpkgExists
commandLine 'dpkg-deb', '-x', "${-> buildDist.get()outputs.files.filter(debFilter).singleFile}", packageExtractionDir commandLine 'dpkg-deb', '-x', "${-> buildDist.get().outputs.files.filter(debFilter).singleFile}", packageExtractionDir
} }
} else { } else {
assert project.name.contains('rpm') assert project.name.contains('rpm')
checkExtraction { tasks.named("checkExtraction").configure {
onlyIf rpmExists onlyIf rpmExists
final File rpmDatabase = new File(extractionDir, 'rpm-database') final File rpmDatabase = new File(extractionDir, 'rpm-database')
commandLine 'rpm', commandLine 'rpm',
@ -491,12 +491,12 @@ subprojects {
} }
} }
task checkLicense { tasks.register("checkLicense") {
dependsOn buildDist, checkExtraction dependsOn buildDist, "checkExtraction"
} }
check.dependsOn checkLicense check.dependsOn "checkLicense"
if (project.name.contains('deb')) { if (project.name.contains('deb')) {
checkLicense { tasks.named("checkLicense").configure {
onlyIf dpkgExists onlyIf dpkgExists
doLast { doLast {
Path copyrightPath Path copyrightPath
@ -521,7 +521,7 @@ subprojects {
} }
} else { } else {
assert project.name.contains('rpm') assert project.name.contains('rpm')
checkLicense { tasks.named("checkLicense").configure {
onlyIf rpmExists onlyIf rpmExists
doLast { doLast {
String licenseFilename String licenseFilename
@ -537,8 +537,8 @@ subprojects {
} }
} }
task checkNotice { tasks.register("checkNotice") {
dependsOn buildDist, checkExtraction dependsOn buildDist, "checkExtraction"
onlyIf { onlyIf {
(project.name.contains('deb') && dpkgExists.call(it)) || (project.name.contains('rpm') && rpmExists.call(it)) (project.name.contains('deb') && dpkgExists.call(it)) || (project.name.contains('rpm') && rpmExists.call(it))
} }
@ -548,10 +548,10 @@ subprojects {
assertLinesInFile(noticePath, noticeLines) assertLinesInFile(noticePath, noticeLines)
} }
} }
check.dependsOn checkNotice tasks.named("check").configure { dependsOn "checkNotice" }
tasks.register('checkLicenseMetadata', LoggedExec) { tasks.register('checkLicenseMetadata', LoggedExec) {
dependsOn buildDist, checkExtraction dependsOn buildDist, "checkExtraction"
} }
check.dependsOn checkLicenseMetadata check.dependsOn checkLicenseMetadata
if (project.name.contains('deb')) { if (project.name.contains('deb')) {

View File

@ -16,7 +16,7 @@ buildscript {
allprojects { allprojects {
apply plugin: 'idea' apply plugin: 'idea'
tasks.named('idea') { tasks.named('idea').configure {
doFirst { throw new GradleException("Use of the 'idea' task has been deprecated. For details on importing into IntelliJ see CONTRIBUTING.md.") } doFirst { throw new GradleException("Use of the 'idea' task has been deprecated. For details on importing into IntelliJ see CONTRIBUTING.md.") }
} }
} }

View File

@ -43,40 +43,41 @@ restResources {
} }
} }
task copyDefaultGeoIp2DatabaseFiles(type: Copy) { tasks.register("copyDefaultGeoIp2DatabaseFiles", Copy) {
from { zipTree(configurations.testCompileClasspath.files.find { it.name.contains('geolite2-databases') }) } from { zipTree(configurations.testCompileClasspath.files.find { it.name.contains('geolite2-databases') }) }
into "${project.buildDir}/ingest-geoip" into "${project.buildDir}/ingest-geoip"
include "*.mmdb" include "*.mmdb"
} }
project.bundlePlugin.dependsOn(copyDefaultGeoIp2DatabaseFiles) tasks.named("bundlePlugin").configure {
dependsOn("copyDefaultGeoIp2DatabaseFiles")
bundlePlugin {
from("${project.buildDir}/ingest-geoip") { from("${project.buildDir}/ingest-geoip") {
into '/' into '/'
} }
} }
thirdPartyAudit.ignoreMissingClasses( tasks.named("thirdPartyAudit").configure {
// geoip WebServiceClient needs apache http client, but we're not using WebServiceClient: ignoreMissingClasses(
'org.apache.http.HttpEntity', // geoip WebServiceClient needs apache http client, but we're not using WebServiceClient:
'org.apache.http.HttpHost', 'org.apache.http.HttpEntity',
'org.apache.http.HttpResponse', 'org.apache.http.HttpHost',
'org.apache.http.StatusLine', 'org.apache.http.HttpResponse',
'org.apache.http.auth.UsernamePasswordCredentials', 'org.apache.http.StatusLine',
'org.apache.http.client.config.RequestConfig$Builder', 'org.apache.http.auth.UsernamePasswordCredentials',
'org.apache.http.client.config.RequestConfig', 'org.apache.http.client.config.RequestConfig$Builder',
'org.apache.http.client.methods.CloseableHttpResponse', 'org.apache.http.client.config.RequestConfig',
'org.apache.http.client.methods.HttpGet', 'org.apache.http.client.methods.CloseableHttpResponse',
'org.apache.http.client.utils.URIBuilder', 'org.apache.http.client.methods.HttpGet',
'org.apache.http.impl.auth.BasicScheme', 'org.apache.http.client.utils.URIBuilder',
'org.apache.http.impl.client.CloseableHttpClient', 'org.apache.http.impl.auth.BasicScheme',
'org.apache.http.impl.client.HttpClientBuilder', 'org.apache.http.impl.client.CloseableHttpClient',
'org.apache.http.util.EntityUtils' 'org.apache.http.impl.client.HttpClientBuilder',
) 'org.apache.http.util.EntityUtils'
)
}
test { if (Os.isFamily(Os.FAMILY_WINDOWS)) {
if (Os.isFamily(Os.FAMILY_WINDOWS)) { tasks.named("test").configure {
// Windows cannot cleanup database files properly unless it loads everything on heap. // Windows cannot cleanup database files properly unless it loads everything on heap.
// See https://github.com/maxmind/MaxMind-DB-Reader-java#file-lock-on-windows for more information // See https://github.com/maxmind/MaxMind-DB-Reader-java#file-lock-on-windows for more information
systemProperty 'es.geoip.load_db_on_heap', 'true' systemProperty 'es.geoip.load_db_on_heap', 'true'

View File

@ -56,27 +56,28 @@ restResources {
integTest.enabled = false integTest.enabled = false
test { tasks.named("test").configure {
// in WhenThingsGoWrongTests we intentionally generate an out of memory error, this prevents the heap from being dumped to disk // in WhenThingsGoWrongTests we intentionally generate an out of memory error, this prevents the heap from being dumped to disk
jvmArgs '-XX:-OmitStackTraceInFastThrow', '-XX:-HeapDumpOnOutOfMemoryError' jvmArgs '-XX:-OmitStackTraceInFastThrow', '-XX:-HeapDumpOnOutOfMemoryError'
} }
/* Build Javadoc for the Java classes in Painless's public API that are in the /* Build Javadoc for the Java classes in Painless's public API that are in the
* Painless plugin */ * Painless plugin */
task apiJavadoc(type: Javadoc) { tasks.register("apiJavadoc", Javadoc) {
source = sourceSets.main.allJava source = sourceSets.main.allJava
classpath = sourceSets.main.runtimeClasspath classpath = sourceSets.main.runtimeClasspath
include '**/org/elasticsearch/painless/api/' include '**/org/elasticsearch/painless/api/'
destinationDir = new File(docsDir, 'apiJavadoc') destinationDir = new File(docsDir, 'apiJavadoc')
} }
task apiJavadocJar(type: Jar) { tasks.register("apiJavadocJar", Jar) {
archiveClassifier = 'apiJavadoc' archiveClassifier = 'apiJavadoc'
from apiJavadoc from apiJavadoc
} }
assemble.dependsOn apiJavadocJar tasks.named("assemble").configure {
dependsOn "apiJavadocJar"
}
/********************************************** /**********************************************
* Context API Generation * * Context API Generation *
**********************************************/ **********************************************/
@ -96,7 +97,7 @@ testClusters {
} }
} }
task generateContextDoc(type: DefaultTestClustersTask) { tasks.register("generateContextDoc", DefaultTestClustersTask) {
dependsOn sourceSets.doc.runtimeClasspath dependsOn sourceSets.doc.runtimeClasspath
useCluster testClusters.generateContextCluster useCluster testClusters.generateContextCluster
doFirst { doFirst {
@ -123,7 +124,7 @@ dependencies {
String grammarPath = 'src/main/antlr' String grammarPath = 'src/main/antlr'
String outputPath = 'src/main/java/org/elasticsearch/painless/antlr' String outputPath = 'src/main/java/org/elasticsearch/painless/antlr'
task cleanGenerated(type: Delete) { tasks.register("cleanGenerated", Delete) {
delete fileTree(grammarPath) { delete fileTree(grammarPath) {
include '*.tokens' include '*.tokens'
} }
@ -132,8 +133,8 @@ task cleanGenerated(type: Delete) {
} }
} }
task regenLexer(type: JavaExec) { tasks.register("regenLexer", JavaExec) {
dependsOn cleanGenerated dependsOn "cleanGenerated"
main = 'org.antlr.v4.Tool' main = 'org.antlr.v4.Tool'
classpath = configurations.regenerate classpath = configurations.regenerate
systemProperty 'file.encoding', 'UTF-8' systemProperty 'file.encoding', 'UTF-8'
@ -146,8 +147,8 @@ task regenLexer(type: JavaExec) {
"${file(grammarPath)}/PainlessLexer.g4" "${file(grammarPath)}/PainlessLexer.g4"
} }
task regenParser(type: JavaExec) { tasks.register("regenParser", JavaExec) {
dependsOn regenLexer dependsOn "regenLexer"
main = 'org.antlr.v4.Tool' main = 'org.antlr.v4.Tool'
classpath = configurations.regenerate classpath = configurations.regenerate
systemProperty 'file.encoding', 'UTF-8' systemProperty 'file.encoding', 'UTF-8'
@ -163,8 +164,8 @@ task regenParser(type: JavaExec) {
"${file(grammarPath)}/PainlessParser.g4" "${file(grammarPath)}/PainlessParser.g4"
} }
task regen { tasks.register("regen") {
dependsOn regenParser dependsOn "regenParser"
doLast { doLast {
// moves token files to grammar directory for use with IDE's // moves token files to grammar directory for use with IDE's
ant.move(file: "${outputPath}/PainlessLexer.tokens", toDir: grammarPath) ant.move(file: "${outputPath}/PainlessLexer.tokens", toDir: grammarPath)

View File

@ -72,7 +72,7 @@ String host = InetAddress.getLoopbackAddress().getHostAddress()
File keystore = new File(project.buildDir, 'keystore/test-node.jks') File keystore = new File(project.buildDir, 'keystore/test-node.jks')
// generate the keystore // generate the keystore
task createKey(type: LoggedExec) { TaskProvider createKey = tasks.register("createKey", LoggedExec) {
doFirst { doFirst {
project.delete(keystore.parentFile) project.delete(keystore.parentFile)
keystore.parentFile.mkdirs() keystore.parentFile.mkdirs()
@ -91,9 +91,9 @@ task createKey(type: LoggedExec) {
'-storepass', 'keypass' '-storepass', 'keypass'
} }
//no unit tests //no unit tests
test.enabled = false tasks.named("test").configure { enabled = false }
// add keystore to test classpath: it expects it there // add keystore to test classpath: it expects it there
processInternalClusterTestResources { tasks.named("processInternalClusterTestResources").configure {
from createKey from createKey
} }
@ -110,7 +110,9 @@ tasks.named("dependencyLicenses").configure {
mapping from: /jaxb-.*/, to: 'jaxb' mapping from: /jaxb-.*/, to: 'jaxb'
} }
thirdPartyAudit.ignoreMissingClasses(
tasks.named("thirdPartyAudit").configure {
ignoreMissingClasses(
'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener', 'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger', 'org.apache.avalon.framework.logger.Logger',
@ -147,7 +149,7 @@ thirdPartyAudit.ignoreMissingClasses(
// jarhell with jdk (intentionally, because jaxb was removed from default modules in java 9) // jarhell with jdk (intentionally, because jaxb was removed from default modules in java 9)
if (BuildParams.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { if (BuildParams.runtimeJavaVersion <= JavaVersion.VERSION_1_8) {
thirdPartyAudit.ignoreJarHellWithJDK( ignoreJarHellWithJDK(
'javax.xml.bind.Binder', 'javax.xml.bind.Binder',
'javax.xml.bind.ContextFinder$1', 'javax.xml.bind.ContextFinder$1',
'javax.xml.bind.ContextFinder', 'javax.xml.bind.ContextFinder',
@ -251,14 +253,15 @@ if (BuildParams.runtimeJavaVersion <= JavaVersion.VERSION_1_8) {
'javax.xml.bind.util.ValidationEventCollector' 'javax.xml.bind.util.ValidationEventCollector'
) )
} else { } else {
thirdPartyAudit.ignoreMissingClasses( ignoreMissingClasses(
'javax.activation.ActivationDataFlavor', 'javax.activation.ActivationDataFlavor',
'javax.activation.DataContentHandler', 'javax.activation.DataContentHandler',
'javax.activation.DataHandler', 'javax.activation.DataHandler',
'javax.activation.DataSource', 'javax.activation.DataSource',
'javax.activation.FileDataSource', 'javax.activation.FileDataSource',
'javax.activation.FileTypeMap', 'javax.activation.FileTypeMap',
'javax.activation.MimeType', 'javax.activation.MimeType',
'javax.activation.MimeTypeParseException', 'javax.activation.MimeTypeParseException',
) )
}
} }

View File

@ -53,13 +53,13 @@ tasks.named("dependencyLicenses").configure {
mapping from: /jackson-.*/, to: 'jackson' mapping from: /jackson-.*/, to: 'jackson'
} }
bundlePlugin { tasks.named("bundlePlugin").configure {
from('config/discovery-ec2') { from('config/discovery-ec2') {
into 'config' into 'config'
} }
} }
task writeTestJavaPolicy { tasks.register("writeTestJavaPolicy") {
doLast { doLast {
final File tmp = file("${buildDir}/tmp") final File tmp = file("${buildDir}/tmp")
if (tmp.exists() == false && tmp.mkdirs() == false) { if (tmp.exists() == false && tmp.mkdirs() == false) {
@ -97,8 +97,8 @@ task writeTestJavaPolicy {
} }
} }
test { tasks.named("test").configure {
dependsOn writeTestJavaPolicy dependsOn "writeTestJavaPolicy"
// this is needed for insecure plugins, remove if possible! // this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name systemProperty 'tests.artifact', project.name
@ -113,43 +113,45 @@ test {
} }
} }
check { tasks.named("check").configure {
// also execute the QA tests when testing the plugin // also execute the QA tests when testing the plugin
dependsOn 'qa:amazon-ec2:check' dependsOn 'qa:amazon-ec2:check'
} }
thirdPartyAudit.ignoreMissingClasses( tasks.named("thirdPartyAudit").configure {
// classes are missing ignoreMissingClasses(
'com.amazonaws.jmespath.JmesPathEvaluationVisitor', // classes are missing
'com.amazonaws.jmespath.JmesPathExpression', 'com.amazonaws.jmespath.JmesPathEvaluationVisitor',
'com.amazonaws.jmespath.JmesPathField', 'com.amazonaws.jmespath.JmesPathExpression',
'com.amazonaws.jmespath.JmesPathFlatten', 'com.amazonaws.jmespath.JmesPathField',
'com.amazonaws.jmespath.JmesPathIdentity', 'com.amazonaws.jmespath.JmesPathFlatten',
'com.amazonaws.jmespath.JmesPathLengthFunction', 'com.amazonaws.jmespath.JmesPathIdentity',
'com.amazonaws.jmespath.JmesPathLiteral', 'com.amazonaws.jmespath.JmesPathLengthFunction',
'com.amazonaws.jmespath.JmesPathProjection', 'com.amazonaws.jmespath.JmesPathLiteral',
'com.amazonaws.jmespath.JmesPathSubExpression', 'com.amazonaws.jmespath.JmesPathProjection',
'com.amazonaws.jmespath.ObjectMapperSingleton', 'com.amazonaws.jmespath.JmesPathSubExpression',
'com.amazonaws.jmespath.OpGreaterThan', 'com.amazonaws.jmespath.ObjectMapperSingleton',
'software.amazon.ion.IonReader', 'com.amazonaws.jmespath.OpGreaterThan',
'software.amazon.ion.IonSystem', 'software.amazon.ion.IonReader',
'software.amazon.ion.IonType', 'software.amazon.ion.IonSystem',
'software.amazon.ion.IonWriter', 'software.amazon.ion.IonType',
'software.amazon.ion.Timestamp', 'software.amazon.ion.IonWriter',
'software.amazon.ion.system.IonBinaryWriterBuilder', 'software.amazon.ion.Timestamp',
'software.amazon.ion.system.IonSystemBuilder', 'software.amazon.ion.system.IonBinaryWriterBuilder',
'software.amazon.ion.system.IonTextWriterBuilder', 'software.amazon.ion.system.IonSystemBuilder',
'software.amazon.ion.system.IonWriterBuilder', 'software.amazon.ion.system.IonTextWriterBuilder',
'javax.servlet.ServletContextEvent', 'software.amazon.ion.system.IonWriterBuilder',
'javax.servlet.ServletContextListener', 'javax.servlet.ServletContextEvent',
'org.apache.avalon.framework.logger.Logger', 'javax.servlet.ServletContextListener',
'org.apache.log.Hierarchy', 'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Logger' 'org.apache.log.Hierarchy',
) 'org.apache.log.Logger'
if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
thirdPartyAudit.ignoreMissingClasses(
'javax.xml.bind.DatatypeConverter',
'javax.xml.bind.JAXBContext'
) )
if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
ignoreMissingClasses(
'javax.xml.bind.DatatypeConverter',
'javax.xml.bind.JAXBContext'
)
}
} }

View File

@ -50,7 +50,7 @@ Map<String, Object> expansions = [
'expected_nodes': gceNumberOfNodes 'expected_nodes': gceNumberOfNodes
] ]
processYamlRestTestResources { tasks.named("processYamlRestTestResources").configure {
inputs.properties(expansions) inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions) MavenFilteringHack.filter(it, expansions)
} }

View File

@ -283,7 +283,7 @@ if (useFixture) {
} }
// 3rd Party Tests // 3rd Party Tests
task s3ThirdPartyTest(type: Test) { TaskProvider s3ThirdPartyTest = tasks.register("s3ThirdPartyTest", Test) {
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
SourceSet internalTestSourceSet = sourceSets.getByName(InternalClusterTestPlugin.SOURCE_SET_NAME) SourceSet internalTestSourceSet = sourceSets.getByName(InternalClusterTestPlugin.SOURCE_SET_NAME)
setTestClassesDirs(internalTestSourceSet.getOutput().getClassesDirs()) setTestClassesDirs(internalTestSourceSet.getOutput().getClassesDirs())
@ -297,7 +297,7 @@ task s3ThirdPartyTest(type: Test) {
nonInputProperties.systemProperty 'test.s3.endpoint', "${-> fixtureAddress('minio-fixture', 'minio-fixture', '9000') }" nonInputProperties.systemProperty 'test.s3.endpoint', "${-> fixtureAddress('minio-fixture', 'minio-fixture', '9000') }"
} }
} }
check.dependsOn(s3ThirdPartyTest) tasks.named("check").configure { dependsOn(s3ThirdPartyTest) }
thirdPartyAudit.ignoreMissingClasses( thirdPartyAudit.ignoreMissingClasses(
// classes are missing // classes are missing

View File

@ -78,7 +78,7 @@ configurations {
testArtifacts.extendsFrom testImplementation testArtifacts.extendsFrom testImplementation
} }
task testJar(type: Jar) { TaskProvider testJar = tasks.register("testJar", Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
} }

View File

@ -53,11 +53,10 @@ testClusters.mixedClusterTest {
setting 'cluster.remote.connections_per_cluster', '1' setting 'cluster.remote.connections_per_cluster', '1'
} }
tasks.register("integTest") {
task integTest { dependsOn "mixedClusterTest"
dependsOn mixedClusterTest
} }
test.enabled = false // no unit tests for multi-cluster-search, only integration tests tasks.named("test").configure { enabled = false }// no unit tests for multi-cluster-search, only integration tests
check.dependsOn(integTest) tasks.named("check").configure { dependsOn("integTest") }

View File

@ -31,7 +31,7 @@ dependencies {
testImplementation project(':client:rest-high-level') testImplementation project(':client:rest-high-level')
} }
task copyKeystore(type: Sync) { tasks.register("copyKeystore", Sync) {
from project(':x-pack:plugin:core') from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
into "${buildDir}/certs" into "${buildDir}/certs"
@ -52,7 +52,7 @@ elasticsearch_distributions {
} }
preProcessFixture { preProcessFixture {
dependsOn copyKeystore, elasticsearch_distributions.docker dependsOn "copyKeystore", elasticsearch_distributions.docker
doLast { doLast {
// tests expect to have an empty repo // tests expect to have an empty repo
project.delete( project.delete(
@ -87,15 +87,15 @@ def createAndSetWritable(Object... locations) {
} }
} }
processTestResources { tasks.named("processTestResources").configure {
from project(':x-pack:plugin:core') from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
} }
task integTest(type: Test) { tasks.register("integTest", Test) {
outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true } outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
maxParallelForks = '1' maxParallelForks = '1'
include '**/*IT.class' include '**/*IT.class'
} }
check.dependsOn integTest tasks.named("check").configure { dependsOn "integTest" }

View File

@ -92,7 +92,7 @@ configurations {
testArtifacts.extendsFrom testImplementation testArtifacts.extendsFrom testImplementation
} }
task testJar(type: Jar) { def testJar = tasks.register("testJar", Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
} }

View File

@ -75,7 +75,7 @@ dockerCompose {
} }
} }
task integTest(type: Test) { tasks.register("integTest", Test) {
outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true } outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
maxParallelForks = '1' maxParallelForks = '1'
include '**/*IT.class' include '**/*IT.class'

View File

@ -132,9 +132,9 @@ dependencies {
} }
} }
compileJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked" tasks.withType(JavaCompile).configureEach {
compileTestJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked" options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
compileInternalClusterTestJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked" }
// Until this project is always being formatted with spotless, we need to // Until this project is always being formatted with spotless, we need to
// guard against `spotless()` not existing. // guard against `spotless()` not existing.
@ -152,187 +152,186 @@ catch (Exception e) {
} }
} }
forbiddenPatterns { tasks.named("forbiddenPatterns").configure {
exclude '**/*.json' exclude '**/*.json'
exclude '**/*.jmx' exclude '**/*.jmx'
exclude '**/*.dic' exclude '**/*.dic'
exclude '**/*.binary' exclude '**/*.binary'
exclude '**/*.st' exclude '**/*.st'
} }
testingConventions { tasks.named("testingConventions").configure {
naming.clear() naming.clear()
naming { naming {
Tests { Tests {
baseClass "org.apache.lucene.util.LuceneTestCase" baseClass "org.apache.lucene.util.LuceneTestCase"
}
IT {
baseClass "org.elasticsearch.test.ESIntegTestCase"
baseClass "org.elasticsearch.test.ESSingleNodeTestCase"
}
} }
IT { }
baseClass "org.elasticsearch.test.ESIntegTestCase"
baseClass "org.elasticsearch.test.ESSingleNodeTestCase" def generateModulesList = tasks.register("generateModulesList") {
List<String> modules = project(':modules').subprojects.collect { it.name }
modules.add('x-pack')
File modulesFile = new File(buildDir, 'generated-resources/modules.txt')
processResources.from(modulesFile)
inputs.property('modules', modules)
outputs.file(modulesFile)
doLast {
modulesFile.parentFile.mkdirs()
modulesFile.setText(modules.join('\n'), 'UTF-8')
} }
}
} }
task generateModulesList { def generatePluginsList = tasks.register("generatePluginsList") {
List<String> modules = project(':modules').subprojects.collect { it.name } Set<String> plugins = new TreeSet<>(project(':plugins').childProjects.keySet())
modules.add('x-pack') plugins.remove('example')
File modulesFile = new File(buildDir, 'generated-resources/modules.txt')
processResources.from(modulesFile) File pluginsFile = new File(buildDir, 'generated-resources/plugins.txt')
inputs.property('modules', modules) processResources.from(pluginsFile)
outputs.file(modulesFile) inputs.property('plugins', plugins)
doLast { outputs.file(pluginsFile)
modulesFile.parentFile.mkdirs() doLast {
modulesFile.setText(modules.join('\n'), 'UTF-8') pluginsFile.parentFile.mkdirs()
} pluginsFile.setText(plugins.join('\n'), 'UTF-8')
}
} }
task generatePluginsList { tasks.named("processResources").configure {
Set<String> plugins = new TreeSet<>(project(':plugins').childProjects.keySet()) dependsOn generateModulesList, generatePluginsList
plugins.remove('example')
File pluginsFile = new File(buildDir, 'generated-resources/plugins.txt')
processResources.from(pluginsFile)
inputs.property('plugins', plugins)
outputs.file(pluginsFile)
doLast {
pluginsFile.parentFile.mkdirs()
pluginsFile.setText(plugins.join('\n'), 'UTF-8')
}
} }
processResources { tasks.named("thirdPartyAudit").configure {
dependsOn generateModulesList, generatePluginsList ignoreMissingClasses(
} // from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml)
'com.fasterxml.jackson.databind.ObjectMapper',
thirdPartyAudit.ignoreMissingClasses( // from log4j
// from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml) 'com.conversantmedia.util.concurrent.DisruptorBlockingQueue',
'com.fasterxml.jackson.databind.ObjectMapper', 'com.conversantmedia.util.concurrent.SpinPolicy',
'com.fasterxml.jackson.annotation.JsonInclude$Include',
'com.fasterxml.jackson.databind.DeserializationContext',
'com.fasterxml.jackson.databind.DeserializationFeature',
'com.fasterxml.jackson.databind.JsonMappingException',
'com.fasterxml.jackson.databind.JsonNode',
'com.fasterxml.jackson.databind.Module$SetupContext',
'com.fasterxml.jackson.databind.ObjectReader',
'com.fasterxml.jackson.databind.ObjectWriter',
'com.fasterxml.jackson.databind.SerializerProvider',
'com.fasterxml.jackson.databind.deser.std.StdDeserializer',
'com.fasterxml.jackson.databind.deser.std.StdScalarDeserializer',
'com.fasterxml.jackson.databind.module.SimpleModule',
'com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter',
'com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider',
'com.fasterxml.jackson.databind.ser.std.StdScalarSerializer',
'com.fasterxml.jackson.databind.ser.std.StdSerializer',
'com.fasterxml.jackson.dataformat.xml.JacksonXmlModule',
'com.fasterxml.jackson.dataformat.xml.XmlMapper',
'com.fasterxml.jackson.dataformat.xml.util.DefaultXmlPrettyPrinter',
'com.fasterxml.jackson.databind.node.ObjectNode',
'org.fusesource.jansi.Ansi',
'org.fusesource.jansi.AnsiRenderer$Code',
'com.lmax.disruptor.BlockingWaitStrategy',
'com.lmax.disruptor.BusySpinWaitStrategy',
'com.lmax.disruptor.EventFactory',
'com.lmax.disruptor.EventTranslator',
'com.lmax.disruptor.EventTranslatorTwoArg',
'com.lmax.disruptor.EventTranslatorVararg',
'com.lmax.disruptor.ExceptionHandler',
'com.lmax.disruptor.LifecycleAware',
'com.lmax.disruptor.RingBuffer',
'com.lmax.disruptor.Sequence',
'com.lmax.disruptor.SequenceReportingEventHandler',
'com.lmax.disruptor.SleepingWaitStrategy',
'com.lmax.disruptor.TimeoutBlockingWaitStrategy',
'com.lmax.disruptor.WaitStrategy',
'com.lmax.disruptor.YieldingWaitStrategy',
'com.lmax.disruptor.dsl.Disruptor',
'com.lmax.disruptor.dsl.ProducerType',
'javax.jms.Connection',
'javax.jms.ConnectionFactory',
'javax.jms.Destination',
'javax.jms.JMSException',
'javax.jms.MapMessage',
'javax.jms.Message',
'javax.jms.MessageConsumer',
'javax.jms.MessageProducer',
'javax.jms.Session',
'javax.mail.Authenticator',
'javax.mail.Message$RecipientType',
'javax.mail.PasswordAuthentication',
'javax.mail.Session',
'javax.mail.Transport',
'javax.mail.internet.InternetAddress',
'javax.mail.internet.InternetHeaders',
'javax.mail.internet.MimeBodyPart',
'javax.mail.internet.MimeMessage',
'javax.mail.internet.MimeMultipart',
'javax.mail.internet.MimeUtility',
'javax.mail.util.ByteArrayDataSource',
'org.apache.commons.compress.compressors.CompressorStreamFactory',
'org.apache.commons.compress.utils.IOUtils',
'org.apache.commons.csv.CSVFormat',
'org.apache.commons.csv.QuoteMode',
'org.apache.kafka.clients.producer.Callback',
'org.apache.kafka.clients.producer.KafkaProducer',
'org.apache.kafka.clients.producer.Producer',
'org.apache.kafka.clients.producer.ProducerRecord',
'org.apache.kafka.clients.producer.RecordMetadata',
'org.codehaus.stax2.XMLStreamWriter2',
'org.jctools.queues.MessagePassingQueue$Consumer',
'org.jctools.queues.MpscArrayQueue',
'org.osgi.framework.AdaptPermission',
'org.osgi.framework.AdminPermission',
'org.osgi.framework.Bundle',
'org.osgi.framework.BundleActivator',
'org.osgi.framework.BundleContext',
'org.osgi.framework.BundleEvent',
'org.osgi.framework.BundleReference',
'org.osgi.framework.FrameworkUtil',
'org.osgi.framework.ServiceRegistration',
'org.osgi.framework.SynchronousBundleListener',
'org.osgi.framework.wiring.BundleWire',
'org.osgi.framework.wiring.BundleWiring',
'org.zeromq.ZMQ$Context',
'org.zeromq.ZMQ$Socket',
'org.zeromq.ZMQ',
// from log4j // from org.locationtech.spatial4j.io.GeoJSONReader (spatial4j)
'com.conversantmedia.util.concurrent.DisruptorBlockingQueue', 'org.noggit.JSONParser',
'com.conversantmedia.util.concurrent.SpinPolicy',
'com.fasterxml.jackson.annotation.JsonInclude$Include',
'com.fasterxml.jackson.databind.DeserializationContext',
'com.fasterxml.jackson.databind.DeserializationFeature',
'com.fasterxml.jackson.databind.JsonMappingException',
'com.fasterxml.jackson.databind.JsonNode',
'com.fasterxml.jackson.databind.Module$SetupContext',
'com.fasterxml.jackson.databind.ObjectReader',
'com.fasterxml.jackson.databind.ObjectWriter',
'com.fasterxml.jackson.databind.SerializerProvider',
'com.fasterxml.jackson.databind.deser.std.StdDeserializer',
'com.fasterxml.jackson.databind.deser.std.StdScalarDeserializer',
'com.fasterxml.jackson.databind.module.SimpleModule',
'com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter',
'com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider',
'com.fasterxml.jackson.databind.ser.std.StdScalarSerializer',
'com.fasterxml.jackson.databind.ser.std.StdSerializer',
'com.fasterxml.jackson.dataformat.xml.JacksonXmlModule',
'com.fasterxml.jackson.dataformat.xml.XmlMapper',
'com.fasterxml.jackson.dataformat.xml.util.DefaultXmlPrettyPrinter',
'com.fasterxml.jackson.databind.node.ObjectNode',
'org.fusesource.jansi.Ansi',
'org.fusesource.jansi.AnsiRenderer$Code',
'com.lmax.disruptor.BlockingWaitStrategy',
'com.lmax.disruptor.BusySpinWaitStrategy',
'com.lmax.disruptor.EventFactory',
'com.lmax.disruptor.EventTranslator',
'com.lmax.disruptor.EventTranslatorTwoArg',
'com.lmax.disruptor.EventTranslatorVararg',
'com.lmax.disruptor.ExceptionHandler',
'com.lmax.disruptor.LifecycleAware',
'com.lmax.disruptor.RingBuffer',
'com.lmax.disruptor.Sequence',
'com.lmax.disruptor.SequenceReportingEventHandler',
'com.lmax.disruptor.SleepingWaitStrategy',
'com.lmax.disruptor.TimeoutBlockingWaitStrategy',
'com.lmax.disruptor.WaitStrategy',
'com.lmax.disruptor.YieldingWaitStrategy',
'com.lmax.disruptor.dsl.Disruptor',
'com.lmax.disruptor.dsl.ProducerType',
'javax.jms.Connection',
'javax.jms.ConnectionFactory',
'javax.jms.Destination',
'javax.jms.JMSException',
'javax.jms.MapMessage',
'javax.jms.Message',
'javax.jms.MessageConsumer',
'javax.jms.MessageProducer',
'javax.jms.Session',
'javax.mail.Authenticator',
'javax.mail.Message$RecipientType',
'javax.mail.PasswordAuthentication',
'javax.mail.Session',
'javax.mail.Transport',
'javax.mail.internet.InternetAddress',
'javax.mail.internet.InternetHeaders',
'javax.mail.internet.MimeBodyPart',
'javax.mail.internet.MimeMessage',
'javax.mail.internet.MimeMultipart',
'javax.mail.internet.MimeUtility',
'javax.mail.util.ByteArrayDataSource',
'org.apache.commons.compress.compressors.CompressorStreamFactory',
'org.apache.commons.compress.utils.IOUtils',
'org.apache.commons.csv.CSVFormat',
'org.apache.commons.csv.QuoteMode',
'org.apache.kafka.clients.producer.Callback',
'org.apache.kafka.clients.producer.KafkaProducer',
'org.apache.kafka.clients.producer.Producer',
'org.apache.kafka.clients.producer.ProducerRecord',
'org.apache.kafka.clients.producer.RecordMetadata',
'org.codehaus.stax2.XMLStreamWriter2',
'org.jctools.queues.MessagePassingQueue$Consumer',
'org.jctools.queues.MpscArrayQueue',
'org.osgi.framework.AdaptPermission',
'org.osgi.framework.AdminPermission',
'org.osgi.framework.Bundle',
'org.osgi.framework.BundleActivator',
'org.osgi.framework.BundleContext',
'org.osgi.framework.BundleEvent',
'org.osgi.framework.BundleReference',
'org.osgi.framework.FrameworkUtil',
'org.osgi.framework.ServiceRegistration',
'org.osgi.framework.SynchronousBundleListener',
'org.osgi.framework.wiring.BundleWire',
'org.osgi.framework.wiring.BundleWiring',
'org.zeromq.ZMQ$Context',
'org.zeromq.ZMQ$Socket',
'org.zeromq.ZMQ',
// from org.locationtech.spatial4j.io.GeoJSONReader (spatial4j) // from lucene-spatial
'org.noggit.JSONParser', 'com.fasterxml.jackson.databind.JsonSerializer',
'com.fasterxml.jackson.databind.JsonDeserializer',
'com.fasterxml.jackson.databind.node.ArrayNode',
'com.google.common.geometry.S2Cell',
'com.google.common.geometry.S2CellId',
'com.google.common.geometry.S2Projections',
'com.google.common.geometry.S2Point',
'com.google.common.geometry.S2$Metric',
'com.google.common.geometry.S2LatLng'
)
// from lucene-spatial if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
'com.fasterxml.jackson.databind.JsonSerializer', ignoreMissingClasses 'javax.xml.bind.DatatypeConverter'
'com.fasterxml.jackson.databind.JsonDeserializer', }
'com.fasterxml.jackson.databind.node.ArrayNode',
'com.google.common.geometry.S2Cell',
'com.google.common.geometry.S2CellId',
'com.google.common.geometry.S2Projections',
'com.google.common.geometry.S2Point',
'com.google.common.geometry.S2$Metric',
'com.google.common.geometry.S2LatLng'
)
if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
thirdPartyAudit.ignoreMissingClasses 'javax.xml.bind.DatatypeConverter'
} }
tasks.named("dependencyLicenses").configure { tasks.named("dependencyLicenses").configure {
mapping from: /lucene-.*/, to: 'lucene' mapping from: /lucene-.*/, to: 'lucene'
dependencies = project.configurations.runtimeClasspath.fileCollection { dependencies = project.configurations.runtimeClasspath.fileCollection {
it.group.startsWith('org.elasticsearch') == false || it.group.startsWith('org.elasticsearch') == false ||
// keep the following org.elasticsearch jars in // keep the following org.elasticsearch jars in
(it.name == 'jna' || (it.name == 'jna' ||
it.name == 'securesm') it.name == 'securesm')
} }
} }
licenseHeaders { tasks.named("licenseHeaders").configure {
// Ignore our vendored version of Google Guice // Ignore our vendored version of Google Guice
excludes << 'org/elasticsearch/common/inject/**/*' excludes << 'org/elasticsearch/common/inject/**/*'
} excludes << 'org/elasticsearch/client/documentation/placeholder.txt'
}
licenseHeaders {
excludes << 'org/elasticsearch/client/documentation/placeholder.txt'
}

View File

@ -66,7 +66,7 @@ test {
systemProperty 'tests.gradle_unreleased_versions', BuildParams.bwcVersions.unreleased.join(',') systemProperty 'tests.gradle_unreleased_versions', BuildParams.bwcVersions.unreleased.join(',')
} }
task integTest(type: Test) { tasks.register("integTest", Test) {
include "**/*IT.class" include "**/*IT.class"
} }

View File

@ -25,22 +25,27 @@ dependencies {
testImplementation project(":test:framework") testImplementation project(":test:framework")
} }
loggerUsageCheck.enabled = false tasks.named("loggerUsageCheck").configure {
enabled = false
}
tasks.named('forbiddenApisMain').configure { tasks.named('forbiddenApisMain').configure {
replaceSignatureFiles 'jdk-signatures' // does not depend on core, only jdk signatures replaceSignatureFiles 'jdk-signatures' // does not depend on core, only jdk signatures
} }
jarHell.enabled = true // disabled by parent project jarHell.enabled = true // disabled by parent project
thirdPartyAudit.ignoreMissingClasses( tasks.named("thirdPartyAudit").configure {
// log4j ignoreMissingClasses(
'org.osgi.framework.AdaptPermission', // log4j
'org.osgi.framework.AdminPermission', 'org.osgi.framework.AdaptPermission',
'org.osgi.framework.Bundle', 'org.osgi.framework.AdminPermission',
'org.osgi.framework.BundleActivator', 'org.osgi.framework.Bundle',
'org.osgi.framework.BundleContext', 'org.osgi.framework.BundleActivator',
'org.osgi.framework.BundleEvent', 'org.osgi.framework.BundleContext',
'org.osgi.framework.SynchronousBundleListener', 'org.osgi.framework.BundleEvent',
'org.osgi.framework.wiring.BundleWire', 'org.osgi.framework.SynchronousBundleListener',
'org.osgi.framework.wiring.BundleWiring' 'org.osgi.framework.wiring.BundleWire',
) 'org.osgi.framework.wiring.BundleWiring'
)
}

View File

@ -31,7 +31,7 @@ subprojects {
project.esplugin.noticeFile = xpackRootProject.file('NOTICE.txt') project.esplugin.noticeFile = xpackRootProject.file('NOTICE.txt')
} }
tasks.withType(LicenseHeadersTask.class) { tasks.withType(LicenseHeadersTask.class).configureEach {
approvedLicenses = ['Elastic License', 'Generated', 'Vendored'] approvedLicenses = ['Elastic License', 'Generated', 'Vendored']
additionalLicense 'ELAST', 'Elastic License', 'Licensed under the Elastic License' additionalLicense 'ELAST', 'Elastic License', 'Licensed under the Elastic License'
} }

View File

@ -12,7 +12,8 @@ project.forbiddenPatterns {
tasks.named("dependencyLicenses").configure { it.enabled = false } tasks.named("dependencyLicenses").configure { it.enabled = false }
task buildZip(type: Zip, dependsOn: jar) { tasks.register("buildZip", Zip) {
dependsOn "jar"
String parentDir = "license-tools-${archiveVersion}" String parentDir = "license-tools-${archiveVersion}"
into(parentDir + '/lib') { into(parentDir + '/lib') {
from jar from jar
@ -23,4 +24,4 @@ task buildZip(type: Zip, dependsOn: jar) {
} }
} }
assemble.dependsOn buildZip tasks.named("assemble").configure { dependsOn("buildZip") }

View File

@ -75,7 +75,7 @@ artifacts {
restXpackTests(new File(projectDir, "src/test/resources/rest-api-spec/test")) restXpackTests(new File(projectDir, "src/test/resources/rest-api-spec/test"))
} }
task testJar(type: Jar) { def testJar = tasks.register("testJar", Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
/* /*
@ -107,7 +107,7 @@ File nodeCert = file("$keystoreDir/testnode.crt")
// it can run in a FIPS 140 JVM // it can run in a FIPS 140 JVM
// TODO: Remove all existing uses of cross project file references when the new approach for referencing static files is available // TODO: Remove all existing uses of cross project file references when the new approach for referencing static files is available
// https://github.com/elastic/elasticsearch/pull/32201 // https://github.com/elastic/elasticsearch/pull/32201
task copyKeyCerts(type: Copy) { tasks.register("copyKeyCerts", Copy) {
from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) { from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) {
include 'testnode.crt', 'testnode.pem' include 'testnode.crt', 'testnode.pem'
} }
@ -115,7 +115,7 @@ task copyKeyCerts(type: Copy) {
} }
// Add keystores to test classpath: it expects it there // Add keystores to test classpath: it expects it there
sourceSets.test.resources.srcDir(keystoreDir) sourceSets.test.resources.srcDir(keystoreDir)
processTestResources.dependsOn(copyKeyCerts) processTestResources.dependsOn("copyKeyCerts")
integTest.runner { integTest.runner {
/* /*

View File

@ -22,7 +22,7 @@ testClusters."leader-cluster" {
} }
File policyFile = file("${buildDir}/tmp/java.policy") File policyFile = file("${buildDir}/tmp/java.policy")
task writeJavaPolicy { tasks.register("writeJavaPolicy") {
doLast { doLast {
if (policyFile.parentFile.exists() == false && policyFile.parentFile.mkdirs() == false) { if (policyFile.parentFile.exists() == false && policyFile.parentFile.mkdirs() == false) {
throw new GradleException("failed to create temporary directory [${tmp}]") throw new GradleException("failed to create temporary directory [${tmp}]")
@ -82,5 +82,6 @@ testClusters."follow-cluster" {
} }
check.dependsOn "follow-cluster" tasks.named("check").configure { dependsOn "follow-cluster" }
test.enabled = false // no unit tests for multi-cluster-search, only the rest integration test // no unit tests for multi-cluster-search, only the rest integration test
tasks.named("test").configure { enabled = false }

View File

@ -9,7 +9,7 @@ dependencies {
testImplementation project(':x-pack:plugin:ccr:qa') testImplementation project(':x-pack:plugin:ccr:qa')
} }
task resolve { tasks.register("resolve") {
doLast { doLast {
configurations.testCompileClasspath.files.each { configurations.testCompileClasspath.files.each {
println it println it

View File

@ -125,7 +125,7 @@ configurations {
testArtifacts.extendsFrom testRuntime testArtifacts.extendsFrom testRuntime
testArtifacts.extendsFrom testImplementation testArtifacts.extendsFrom testImplementation
} }
task testJar(type: Jar) { def testJar = tasks.register("testJar", Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
} }
@ -136,15 +136,17 @@ artifacts {
testArtifacts testJar testArtifacts testJar
} }
thirdPartyAudit.ignoreMissingClasses( tasks.named("thirdPartyAudit").configure {
//commons-logging optional dependencies ignoreMissingClasses(
'org.apache.avalon.framework.logger.Logger', //commons-logging optional dependencies
'org.apache.log.Hierarchy', 'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Logger', 'org.apache.log.Hierarchy',
//commons-logging provided dependencies 'org.apache.log.Logger',
'javax.servlet.ServletContextEvent', //commons-logging provided dependencies
'javax.servlet.ServletContextListener' 'javax.servlet.ServletContextEvent',
) 'javax.servlet.ServletContextListener'
)
}
restResources { restResources {
restApi { restApi {
@ -160,4 +162,4 @@ testClusters.yamlRestTest {
user username: "x_pack_rest_user", password: "x-pack-test-password" user username: "x_pack_rest_user", password: "x-pack-test-password"
} }
testingConventions.enabled = false tasks.named("testingConventions").configure { enabled = false }

View File

@ -73,7 +73,7 @@ dependencies {
String grammarPath = 'src/main/antlr' String grammarPath = 'src/main/antlr'
String outputPath = 'src/main/java/org/elasticsearch/xpack/eql/parser' String outputPath = 'src/main/java/org/elasticsearch/xpack/eql/parser'
task cleanGenerated(type: Delete) { tasks.register("cleanGenerated", Delete) {
delete fileTree(grammarPath) { delete fileTree(grammarPath) {
include '*.tokens' include '*.tokens'
} }
@ -82,8 +82,8 @@ task cleanGenerated(type: Delete) {
} }
} }
task regenParser(type: JavaExec) { tasks.register("regenParser", JavaExec) {
dependsOn cleanGenerated dependsOn "cleanGenerated"
main = 'org.antlr.v4.Tool' main = 'org.antlr.v4.Tool'
classpath = configurations.regenerate classpath = configurations.regenerate
systemProperty 'file.encoding', 'UTF-8' systemProperty 'file.encoding', 'UTF-8'
@ -98,8 +98,8 @@ task regenParser(type: JavaExec) {
"${file(grammarPath)}/EqlBase.g4" "${file(grammarPath)}/EqlBase.g4"
} }
task regen { tasks.register("regen") {
dependsOn regenParser dependsOn "regenParser"
doLast { doLast {
// moves token files to grammar directory for use with IDE's // moves token files to grammar directory for use with IDE's
ant.move(file: "${outputPath}/EqlBase.tokens", toDir: grammarPath) ant.move(file: "${outputPath}/EqlBase.tokens", toDir: grammarPath)

View File

@ -69,17 +69,17 @@ configurations {
testArtifacts.extendsFrom testRuntime testArtifacts.extendsFrom testRuntime
testArtifacts.extendsFrom testImplementation testArtifacts.extendsFrom testImplementation
} }
task testJar(type: Jar) { TaskProvider testJar = tasks.register("testJar", Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
} }
artifacts { artifacts {
// normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions
archives jar archives tasks.named('jar')
testArtifacts testJar testArtifacts testJar
} }
task extractNativeLicenses(type: Copy) { tasks.register("extractNativeLicenses", Copy) {
dependsOn configurations.nativeBundle dependsOn configurations.nativeBundle
into "${buildDir}" into "${buildDir}"
from { from {
@ -89,23 +89,28 @@ task extractNativeLicenses(type: Copy) {
} }
project.afterEvaluate { project.afterEvaluate {
// Add an extra licenses directory to the combined notices // Add an extra licenses directory to the combined notices
project.tasks.findByName('generateNotice').dependsOn extractNativeLicenses tasks.named('generateNotice').configure {
project.tasks.findByName('generateNotice').licensesDir new File("${project.buildDir}/platform/licenses") dependsOn "extractNativeLicenses"
project.tasks.findByName('generateNotice').outputs.upToDateWhen { licensesDir new File("${project.buildDir}/platform/licenses")
extractNativeLicenses.state.upToDate outputs.upToDateWhen {
extractNativeLicenses.didWork
}
} }
} }
// xpack modules are installed in real clusters as the meta plugin, so // xpack modules are installed in real clusters as the meta plugin, so
// installing them as individual plugins for integ tests doesn't make sense, // installing them as individual plugins for integ tests doesn't make sense,
// so we disable integ tests // so we disable integ tests
integTest.enabled = false tasks.named("integTest").configure {
enabled = false
}
def checkTask = tasks.named("check")
// add all sub-projects of the qa sub-project // add all sub-projects of the qa sub-project
gradle.projectsEvaluated { gradle.projectsEvaluated {
project.subprojects project.subprojects
.find { it.path == project.path + ":qa" } .find { it.path == project.path + ":qa" }
.subprojects .subprojects
.findAll { it.path.startsWith(project.path + ":qa") } .findAll { it.path.startsWith(project.path + ":qa") }
.each { check.dependsOn it.check } .each { subProj -> checkTask.configure { dependsOn subProj.tasks.named("check") } }
} }

View File

@ -16,7 +16,7 @@ File keystoreDir = new File(project.buildDir, 'keystore')
File nodeKey = file("$keystoreDir/testnode.pem") File nodeKey = file("$keystoreDir/testnode.pem")
File nodeCert = file("$keystoreDir/testnode.crt") File nodeCert = file("$keystoreDir/testnode.crt")
// Add key and certs to test classpath: it expects it there // Add key and certs to test classpath: it expects it there
task copyKeyCerts(type: Copy) { tasks.register("copyKeyCerts", Copy) {
from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) { from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) {
include 'testnode.crt', 'testnode.pem' include 'testnode.crt', 'testnode.pem'
} }
@ -24,10 +24,10 @@ task copyKeyCerts(type: Copy) {
} }
// Add keys and cets to test classpath: it expects it there // Add keys and cets to test classpath: it expects it there
sourceSets.test.resources.srcDir(keystoreDir) sourceSets.test.resources.srcDir(keystoreDir)
processTestResources.dependsOn(copyKeyCerts) tasks.named("processTestResources").configure { dependsOn("copyKeyCerts") }
integTest { integTest {
dependsOn copyKeyCerts dependsOn "copyKeyCerts"
runner { runner {
/* /*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each

View File

@ -28,13 +28,13 @@ configurations {
testArtifacts.extendsFrom testRuntime testArtifacts.extendsFrom testRuntime
testArtifacts.extendsFrom testImplementation testArtifacts.extendsFrom testImplementation
} }
task testJar(type: Jar) { TaskProvider testJar = tasks.register("testJar", Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
} }
artifacts { artifacts {
// normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions
archives jar archives tasks.named("jar")
testArtifacts testJar testArtifacts testJar
} }
@ -46,4 +46,4 @@ tasks.named("dependencyLicenses").configure {
// xpack modules are installed in real clusters as the meta plugin, so // xpack modules are installed in real clusters as the meta plugin, so
// installing them as individual plugins for integ tests doesn't make sense, // installing them as individual plugins for integ tests doesn't make sense,
// so we disable integ tests // so we disable integ tests
integTest.enabled = false tasks.named("integTest").configure { enabled = false }

View File

@ -21,17 +21,18 @@ configurations {
testArtifacts.extendsFrom testImplementation testArtifacts.extendsFrom testImplementation
} }
task testJar(type: Jar) { TaskProvider testJar = tasks.register("testJar", Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
} }
artifacts { artifacts {
// normal es plugins do not publish the jar but we need to since users need it for extensions // normal es plugins do not publish the jar but we need to since users need it for extensions
archives jar archives tasks.named("jar")
testArtifacts testJar testArtifacts testJar
} }
// disable integration tests for now // disable integration tests for now
integTest.enabled = false tasks.named("integTest").configure {
enabled = false
}

View File

@ -35,7 +35,7 @@ configurations {
testArtifacts.extendsFrom testImplementation testArtifacts.extendsFrom testImplementation
} }
task testJar(type: Jar) { def testJar = tasks.register("testJar", Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
} }

View File

@ -101,7 +101,7 @@ testClusters.integTest {
} }
} }
task azureThirdPartyTest { tasks.register("azureThirdPartyTest") {
dependsOn integTest dependsOn "integTest"
} }

View File

@ -63,7 +63,7 @@ if (!gcsServiceAccount && !gcsBucket && !gcsBasePath) {
} }
/** A service account file that points to the Google Cloud Storage service emulated by the fixture **/ /** A service account file that points to the Google Cloud Storage service emulated by the fixture **/
task createServiceAccountFile() { tasks.register("createServiceAccountFile") {
doLast { doLast {
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA") KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA")
keyPairGenerator.initialize(1024) keyPairGenerator.initialize(1024)
@ -133,6 +133,6 @@ testClusters.integTest {
setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.license.self_generated.type', 'trial'
} }
task gcsThirdPartyTest { tasks.register("gcsThirdPartyTest") {
dependsOn integTest dependsOn "integTest"
} }

View File

@ -77,6 +77,6 @@ testClusters.integTest {
} }
} }
task s3ThirdPartyTest { tasks.register("s3ThirdPartyTest") {
dependsOn integTest dependsOn "integTest"
} }

View File

@ -145,13 +145,13 @@ configurations {
testArtifacts.extendsFrom testRuntime testArtifacts.extendsFrom testRuntime
testArtifacts.extendsFrom testImplementation testArtifacts.extendsFrom testImplementation
} }
task testJar(type: Jar) { TaskProvider testJar = tasks.register("testJar", Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
} }
artifacts { artifacts {
// normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions
archives jar archives tasks.named('jar')
testArtifacts testJar testArtifacts testJar
} }

View File

@ -55,5 +55,5 @@ task integTestSecurity(type: RestTestRunnerTask) {
nonInputProperties.systemProperty 'tests.rest.cluster', "${-> testClusters.integTest.getAllHttpSocketURI().join(",")}" nonInputProperties.systemProperty 'tests.rest.cluster', "${-> testClusters.integTest.getAllHttpSocketURI().join(",")}"
} }
} }
check.dependsOn(integTestSecurity) tasks.named("check").configure { dependsOn(integTestSecurity) }

View File

@ -85,7 +85,7 @@ dependencies {
String grammarPath = 'src/main/antlr' String grammarPath = 'src/main/antlr'
String outputPath = 'src/main/java/org/elasticsearch/xpack/sql/parser' String outputPath = 'src/main/java/org/elasticsearch/xpack/sql/parser'
task cleanGenerated(type: Delete) { tasks.register("cleanGenerated", Delete) {
delete fileTree(grammarPath) { delete fileTree(grammarPath) {
include '*.tokens' include '*.tokens'
} }
@ -94,8 +94,8 @@ task cleanGenerated(type: Delete) {
} }
} }
task regenParser(type: JavaExec) { tasks.register("regenParser", JavaExec) {
dependsOn cleanGenerated dependsOn "cleanGenerated"
main = 'org.antlr.v4.Tool' main = 'org.antlr.v4.Tool'
classpath = configurations.regenerate classpath = configurations.regenerate
systemProperty 'file.encoding', 'UTF-8' systemProperty 'file.encoding', 'UTF-8'
@ -110,8 +110,8 @@ task regenParser(type: JavaExec) {
"${file(grammarPath)}/SqlBase.g4" "${file(grammarPath)}/SqlBase.g4"
} }
task regen { tasks.register("regen") {
dependsOn regenParser dependsOn "regenParser"
doLast { doLast {
// moves token files to grammar directory for use with IDE's // moves token files to grammar directory for use with IDE's
ant.move(file: "${outputPath}/SqlBase.tokens", toDir: grammarPath) ant.move(file: "${outputPath}/SqlBase.tokens", toDir: grammarPath)

View File

@ -33,15 +33,12 @@ dependencies {
testRuntimeOnly "org.elasticsearch:jna:${versions.jna}" testRuntimeOnly "org.elasticsearch:jna:${versions.jna}"
} }
/* disable unit tests because these are all integration tests used // this is just a test fixture used by other projects and not in production
* other qa projects. */ ['test', 'dependencyLicenses', 'thirdPartyAudit', 'dependenciesInfo'].each {
test.enabled = false tasks.named(it).configure {
enabled = false
tasks.named("dependencyLicenses").configure { it.enabled = false } }
dependenciesInfo.enabled = false }
// just a test fixture: we aren't using this jars in releases and H2GIS requires disabling a lot of checks
thirdPartyAudit.enabled = false
subprojects { subprojects {
if (subprojects.isEmpty()) { if (subprojects.isEmpty()) {

View File

@ -54,9 +54,9 @@ tasks.named('forbiddenApisMain').configure {
signaturesFiles += files('src/forbidden/cli-signatures.txt') signaturesFiles += files('src/forbidden/cli-signatures.txt')
} }
task runcli { tasks.register("runcli") {
description = 'Run the CLI and connect to elasticsearch running on 9200' description = 'Run the CLI and connect to elasticsearch running on 9200'
dependsOn shadowJar dependsOn "shadowJar"
doLast { doLast {
List command = ["${BuildParams.runtimeJavaHome}/bin/java"] List command = ["${BuildParams.runtimeJavaHome}/bin/java"]
if ('true'.equals(System.getProperty('debug', 'false'))) { if ('true'.equals(System.getProperty('debug', 'false'))) {

View File

@ -14,7 +14,7 @@ File keystoreDir = new File(project.buildDir, 'keystore')
File nodeKey = file("$keystoreDir/testnode.pem") File nodeKey = file("$keystoreDir/testnode.pem")
File nodeCert = file("$keystoreDir/testnode.crt") File nodeCert = file("$keystoreDir/testnode.crt")
// Add key and certs to test classpath: it expects it there // Add key and certs to test classpath: it expects it there
task copyKeyCerts(type: Copy) { tasks.register("copyKeyCerts", Copy) {
from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) { from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) {
include 'testnode.crt', 'testnode.pem' include 'testnode.crt', 'testnode.pem'
} }
@ -22,9 +22,9 @@ task copyKeyCerts(type: Copy) {
} }
// Add keys and cets to test classpath: it expects it there // Add keys and cets to test classpath: it expects it there
sourceSets.test.resources.srcDir(keystoreDir) sourceSets.test.resources.srcDir(keystoreDir)
processTestResources.dependsOn(copyKeyCerts) tasks.named("processTestResources").configure { dependsOn("copyKeyCerts") }
integTest.dependsOn copyKeyCerts tasks.named("integTest").configure { dependsOn "copyKeyCerts" }
testClusters.integTest { testClusters.integTest {
testDistribution = 'DEFAULT' testDistribution = 'DEFAULT'

View File

@ -12,7 +12,7 @@ configurations {
testArtifacts.extendsFrom testImplementation testArtifacts.extendsFrom testImplementation
} }
task testJar(type: Jar) { def testJar = tasks.register("testJar", Jar) {
appendix 'test' appendix 'test'
from sourceSets.test.output from sourceSets.test.output
} }

View File

@ -15,11 +15,11 @@ dependencies {
testImplementation project(':x-pack:qa') testImplementation project(':x-pack:qa')
} }
licenseHeaders { tasks.named("licenseHeaders").configure {
approvedLicenses << 'Apache' approvedLicenses << 'Apache'
} }
forbiddenPatterns { tasks.named("forbiddenPatterns") {
exclude '**/system_key' exclude '**/system_key'
} }

View File

@ -41,7 +41,7 @@ testClusters.integTest {
user username: "test_kibana_user", password: "x-pack-test-password", role: "kibana_system" user username: "test_kibana_user", password: "x-pack-test-password", role: "kibana_system"
} }
task copyKeytabToGeneratedResources(type: Copy) { tasks.register("copyKeytabToGeneratedResources", Copy) {
from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "peppa.keytab") from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "peppa.keytab")
into "$buildDir/generated-resources/keytabs" into "$buildDir/generated-resources/keytabs"
dependsOn project(':test:fixtures:krb5kdc-fixture').postProcessFixture dependsOn project(':test:fixtures:krb5kdc-fixture').postProcessFixture

View File

@ -55,9 +55,9 @@ testClusters.'mixed-cluster' {
user username: "test_user", password: "x-pack-test-password" user username: "test_user", password: "x-pack-test-password"
} }
task integTest { tasks.register("integTest") {
dependsOn 'mixed-cluster' dependsOn 'mixed-cluster'
} }
test.enabled = false // no unit tests for multi-cluster-search, only the rest integration test tasks.named("test").configure { enabled = false } // no unit tests for multi-cluster-search, only the rest integration test
check.dependsOn(integTest) tasks.named("check").configure { dependsOn("integTest") }

View File

@ -54,9 +54,9 @@ testClusters.'mixed-cluster' {
user username: "test_user", password: "x-pack-test-password" user username: "test_user", password: "x-pack-test-password"
} }
task integTest { tasks.register("integTest") {
dependsOn 'mixed-cluster' dependsOn 'mixed-cluster'
} }
test.enabled = false // no unit tests for multi-cluster-search, only the rest integration test tasks.named("test").configure { enabled = false } // no unit tests for multi-cluster-search, only the rest integration test
check.dependsOn(integTest) tasks.named("check").configure { dependsOn("integTest") }

View File

@ -14,7 +14,7 @@ testFixtures.useFixture ":x-pack:test:idp-fixture", "oidc-provider"
String ephemeralOpPort String ephemeralOpPort
String ephemeralProxyPort String ephemeralProxyPort
task setupPorts { tasks.register("setupPorts") {
// Don't attempt to get ephemeral ports when Docker is not available // Don't attempt to get ephemeral ports when Docker is not available
onlyIf { idpFixtureProject.postProcessFixture.state.skipped == false } onlyIf { idpFixtureProject.postProcessFixture.state.skipped == false }
dependsOn idpFixtureProject.postProcessFixture dependsOn idpFixtureProject.postProcessFixture
@ -25,7 +25,7 @@ task setupPorts {
} }
integTest.runner { integTest.runner {
dependsOn setupPorts dependsOn "setupPorts"
} }
testClusters.integTest { testClusters.integTest {

View File

@ -11,7 +11,7 @@ testFixtures.useFixture ":x-pack:test:idp-fixture", "openldap"
Project idpFixtureProject = xpackProject("test:idp-fixture") Project idpFixtureProject = xpackProject("test:idp-fixture")
String outputDir = "${project.buildDir}/generated-resources/${project.name}" String outputDir = "${project.buildDir}/generated-resources/${project.name}"
task copyIdpTrust(type: Copy) { def copyIdpTrust = tasks.register("copyIdpTrust", Copy) {
from idpFixtureProject.file('openldap/certs/ca.jks'); from idpFixtureProject.file('openldap/certs/ca.jks');
from idpFixtureProject.file('openldap/certs/ca_server.pem'); from idpFixtureProject.file('openldap/certs/ca_server.pem');
into outputDir into outputDir

View File

@ -25,7 +25,7 @@ forbiddenPatterns {
String outputDir = "${buildDir}/generated-resources/${project.name}" String outputDir = "${buildDir}/generated-resources/${project.name}"
task copyTestNodeKeyMaterial(type: Copy) { tasks.register("copyTestNodeKeyMaterial", Copy) {
from project(':x-pack:plugin:core').files('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem', from project(':x-pack:plugin:core').files('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem',
'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt') 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt')
into outputDir into outputDir
@ -92,7 +92,7 @@ for (Version bwcVersion : BuildParams.bwcVersions.wireCompatible) {
tasks.register("${baseName}#oldClusterTest", RestTestRunnerTask) { tasks.register("${baseName}#oldClusterTest", RestTestRunnerTask) {
useCluster testClusters."${baseName}" useCluster testClusters."${baseName}"
mustRunAfter(precommit) mustRunAfter(precommit)
dependsOn copyTestNodeKeyMaterial dependsOn "copyTestNodeKeyMaterial"
systemProperty 'tests.rest.suite', 'old_cluster' systemProperty 'tests.rest.suite', 'old_cluster'
systemProperty 'tests.upgrade_from_version', oldVersion systemProperty 'tests.upgrade_from_version', oldVersion
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}") nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")

View File

@ -13,13 +13,13 @@ testFixtures.useFixture ":x-pack:test:idp-fixture"
String outputDir = "${project.buildDir}/generated-resources/${project.name}" String outputDir = "${project.buildDir}/generated-resources/${project.name}"
task copyIdpFiles(type: Copy) { def copyIdpFiles = tasks.register("copyIdpFiles", Copy) {
from idpFixtureProject.files('idp/shibboleth-idp/credentials/idp-browser.pem', 'idp/shibboleth-idp/metadata/idp-metadata.xml'); from idpFixtureProject.files('idp/shibboleth-idp/credentials/idp-browser.pem', 'idp/shibboleth-idp/metadata/idp-metadata.xml');
into outputDir into outputDir
} }
project.sourceSets.test.output.dir(outputDir, builtBy: copyIdpFiles) project.sourceSets.test.output.dir(outputDir, builtBy: copyIdpFiles)
task setupPorts { tasks.register("setupPorts") {
dependsOn copyIdpFiles, idpFixtureProject.postProcessFixture dependsOn copyIdpFiles, idpFixtureProject.postProcessFixture
// Don't attempt to get ephemeral ports when Docker is not available // Don't attempt to get ephemeral ports when Docker is not available
onlyIf { idpFixtureProject.postProcessFixture.state.skipped == false } onlyIf { idpFixtureProject.postProcessFixture.state.skipped == false }
@ -39,7 +39,7 @@ task setupPorts {
} }
integTest.runner.dependsOn setupPorts integTest.runner.dependsOn "setupPorts"
testClusters.integTest { testClusters.integTest {
testDistribution = 'DEFAULT' testDistribution = 'DEFAULT'
@ -85,13 +85,13 @@ testClusters.integTest {
user username: "test_admin", password: 'x-pack-test-password' user username: "test_admin", password: 'x-pack-test-password'
} }
forbiddenPatterns { tasks.named("forbiddenPatterns").configure {
exclude '**/*.der' exclude '**/*.der'
exclude '**/*.p12' exclude '**/*.p12'
exclude '**/*.key' exclude '**/*.key'
} }
thirdPartyAudit { tasks.named("thirdPartyAudit").configure {
ignoreViolations( ignoreViolations(
// uses internal java api: sun.misc.Unsafe // uses internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64', 'com.google.common.cache.Striped64',

View File

@ -12,7 +12,7 @@ dependencies {
} }
String outputDir = "${buildDir}/generated-resources/${project.name}" String outputDir = "${buildDir}/generated-resources/${project.name}"
task copyXPackPluginProps(type: Copy) { def copyXPackPluginProps = tasks.register("copyXPackPluginProps", Copy) {
from project(xpackModule('core')).file('src/main/plugin-metadata') from project(xpackModule('core')).file('src/main/plugin-metadata')
from project(xpackModule('core')).tasks.pluginProperties from project(xpackModule('core')).tasks.pluginProperties
into outputDir into outputDir
@ -29,7 +29,7 @@ File clientKey = file("$keystoreDir/testclient.pem")
File clientCert = file("$keystoreDir/testclient.crt") File clientCert = file("$keystoreDir/testclient.crt")
// Add keystores to test classpath: it expects it there // Add keystores to test classpath: it expects it there
task copyKeyCerts(type: Copy) { def copyKeyCerts = tasks.register("copyKeyCerts", Copy) {
from('./') { from('./') {
include '*.crt', '*.pem', '*.jks' include '*.crt', '*.pem', '*.jks'
} }
@ -77,7 +77,7 @@ ext.expansions = [
'expected.plugins.count': pluginsCount 'expected.plugins.count': pluginsCount
] ]
processTestResources { tasks.named("processTestResources").configure {
from(sourceSets.test.resources.srcDirs) { from(sourceSets.test.resources.srcDirs) {
duplicatesStrategy = DuplicatesStrategy.INCLUDE duplicatesStrategy = DuplicatesStrategy.INCLUDE
include '**/*.yml' include '**/*.yml'

View File

@ -24,7 +24,7 @@ String jiraUser = System.getenv('jira_user')
String jiraPassword = System.getenv('jira_password') String jiraPassword = System.getenv('jira_password')
String jiraProject = System.getenv('jira_project') String jiraProject = System.getenv('jira_project')
task cleanJira(type: DefaultTask) { tasks.register("cleanJira", DefaultTask) {
doLast { doLast {
List<String> issues = jiraIssues(jiraProject) List<String> issues = jiraIssues(jiraProject)
assert issues instanceof List assert issues instanceof List
@ -54,7 +54,7 @@ if (!jiraUrl && !jiraUser && !jiraPassword && !jiraProject) {
keystore 'xpack.notification.jira.account.test.secure_user', jiraUser keystore 'xpack.notification.jira.account.test.secure_user', jiraUser
keystore 'xpack.notification.jira.account.test.secure_password', jiraPassword keystore 'xpack.notification.jira.account.test.secure_password', jiraPassword
} }
integTest.runner.finalizedBy cleanJira integTest.runner.finalizedBy "cleanJira"
} }
/** List all issues associated to a given Jira project **/ /** List all issues associated to a given Jira project **/