OpenSearch/distribution/docker/build.gradle

253 lines
8.2 KiB
Groovy
Raw Normal View History

import org.elasticsearch.gradle.Architecture
import org.elasticsearch.gradle.DockerBase
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.docker.DockerBuildTask
import org.elasticsearch.gradle.info.BuildParams
import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.test.fixtures'
apply plugin: 'elasticsearch.internal-distribution-download'
7.x - Create plugin for yamlTest task (#56841) (#59090) This commit creates a new Gradle plugin to provide a separate task name and source set for running YAML based REST tests. The only project converted to use the new plugin in this PR is distribution/archives/integ-test-zip. For which the testing has been moved to :rest-api-spec since it makes the most sense and it avoids a small but awkward change to the distribution plugin. The remaining cases in modules, plugins, and x-pack will be handled in followups. This plugin is distinctly different from the plugin introduced in #55896 since the YAML REST tests are intended to be black box tests over HTTP. As such they should not (by default) have access to the classpath for that which they are testing. The YAML based REST tests will be moved to separate source sets (yamlRestTest). The which source is the target for the test resources is dependent on if this new plugin is applied. If it is not applied, it will default to the test source set. Further, this introduces a breaking change for plugin developers that use the YAML testing framework. They will now need to either use the new source set and matching task, or configure the rest resources to use the old "test" source set that matches the old integTest task. (The former should be preferred). As part of this change (which is also breaking for plugin developers) the rest resources plugin has been removed from the build plugin and now requires either explicit application or application via the new YAML REST test plugin. Plugin developers should be able to fix the breaking changes to the YAML tests by adding apply plugin: 'elasticsearch.yaml-rest-test' and moving the YAML tests under a yamlRestTest folder (instead of test)
2020-07-06 15:16:26 -04:00
apply plugin: 'elasticsearch.rest-resources'
testFixtures.useFixture()
configurations {
aarch64OssDockerSource
ossDockerSource
}
dependencies {
aarch64OssDockerSource project(path: ":distribution:archives:oss-linux-aarch64-tar", configuration:"default")
ossDockerSource project(path: ":distribution:archives:oss-linux-tar", configuration:"default")
}
ext.expansions = { Architecture architecture, DockerBase base, boolean local ->
String classifier
if (local) {
if (architecture == Architecture.AARCH64) {
classifier = "linux-aarch64"
} else if (architecture == Architecture.X64) {
classifier = "linux-x86_64"
} else {
throw new IllegalArgumentException("Unsupported architecture [" + architecture + "]")
}
} else {
/* When sourcing the Elasticsearch build remotely, the same Dockerfile needs
* to be able to fetch the artifact for any supported platform. We can't make
* the decision here. Bash will interpolate the `arch` command for us. */
classifier = "linux-\$(arch)"
}
final String elasticsearch = "elasticsearch-oss-${VersionProperties.elasticsearch}-${classifier}.tar.gz"
/* Both the following Dockerfile commands put the resulting artifact at
* the same location, regardless of classifier, so that the commands that
* follow in the Dockerfile don't have to know about the runtime
* architecture. */
String sourceElasticsearch
if (local) {
sourceElasticsearch = "COPY $elasticsearch /opt/elasticsearch.tar.gz"
} else {
sourceElasticsearch = """
RUN curl --retry 8 -S -L \\
--output /opt/elasticsearch.tar.gz \\
https://artifacts-no-kpi.elastic.co/downloads/elasticsearch/$elasticsearch
"""
}
return [
'base_image' : base.getImage(),
'build_date' : BuildParams.buildDate,
'git_revision' : BuildParams.gitRevision,
'license' : 'Apache-2.0',
'package_manager' : 'yum',
'source_elasticsearch': sourceElasticsearch,
'docker_base' : base.name().toLowerCase(),
'version' : VersionProperties.elasticsearch
]
}
private static String buildPath(Architecture architecture, DockerBase base) {
return 'build/' +
(architecture == Architecture.AARCH64 ? 'aarch64-' : '') +
'oss-' +
'docker'
}
private static String taskName(String prefix, Architecture architecture, DockerBase base, String suffix) {
return prefix +
(architecture == Architecture.AARCH64 ? 'Aarch64' : '') +
'Oss' +
suffix
}
project.ext {
dockerBuildContext = { Architecture architecture, DockerBase base, boolean local ->
copySpec {
into('bin') {
from project.projectDir.toPath().resolve("src/docker/bin")
}
into('config') {
from project.projectDir.toPath().resolve("src/docker/config")
}
from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) {
expand(expansions(architecture, base, local))
}
}
}
}
void addCopyDockerContextTask(Architecture architecture, DockerBase base) {
if (base != DockerBase.CENTOS) {
throw new GradleException("The only allowed docker base image for OSS builds is CENTOS")
}
tasks.register(taskName("copy", architecture, base, "DockerContext"), Sync) {
expansions(architecture, base, true).findAll { it.key != 'build_date' }.each { k, v ->
inputs.property(k, { v.toString() })
}
into buildPath(architecture, base)
with dockerBuildContext(architecture, base, true)
if (architecture == Architecture.AARCH64) {
from configurations.aarch64OssDockerSource
} else {
from configurations.ossDockerSource
}
}
}
def createAndSetWritable(Object... locations) {
locations.each { location ->
File file = file(location)
file.mkdirs()
file.setWritable(true, false)
}
}
elasticsearch_distributions {
Architecture.values().each { eachArchitecture ->
"docker${ eachArchitecture == Architecture.AARCH64 ? '_aarch64' : '' }" {
architecture = eachArchitecture
type = 'docker'
version = VersionProperties.getElasticsearch()
failIfUnavailable = false // This ensures we don't attempt to build images if docker is unavailable
}
}
}
tasks.named("preProcessFixture").configure {
dependsOn elasticsearch_distributions.docker
doLast {
// tests expect to have an empty repo
project.delete(
"${buildDir}/oss-repo"
)
createAndSetWritable(
"${buildDir}/oss-repo",
"${buildDir}/logs/oss-1",
"${buildDir}/logs/oss-2"
)
}
}
tasks.register("integTest", Test) {
outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
maxParallelForks = '1'
include '**/*IT.class'
}
tasks.named("check").configure {
dependsOn "integTest"
}
void addBuildDockerImage(Architecture architecture, DockerBase base) {
if (base != DockerBase.CENTOS) {
throw new GradleException("The only allowed docker base image for OSS builds is CENTOS")
}
final TaskProvider<DockerBuildTask> buildDockerImageTask =
tasks.register(taskName("build", architecture, base, "DockerImage"), DockerBuildTask) {
onlyIf { Architecture.current() == architecture }
TaskProvider<Sync> copyContextTask = tasks.named(taskName("copy", architecture, base, "DockerContext"))
dependsOn(copyContextTask)
dockerContext.fileProvider(copyContextTask.map { it.destinationDir })
baseImages = [ base.getImage() ]
String version = VersionProperties.elasticsearch
tags = [
"docker.elastic.co/elasticsearch/elasticsearch-oss:${version}",
"elasticsearch-oss:test"
]
}
tasks.named("assemble").configure {
dependsOn(buildDockerImageTask)
}
}
for (final Architecture architecture : Architecture.values()) {
// We only create Docker images for the OSS distribution on CentOS.
for (final DockerBase base : DockerBase.values()) {
if (base == DockerBase.CENTOS) {
addCopyDockerContextTask(architecture, base)
addBuildDockerImage(architecture, base)
}
}
}
// We build the images used in compose locally, but the pull command insists on using a repository
// thus we must disable it to prevent it from doing so.
// Everything will still be pulled since we will build the local images on a pull
2019-04-23 07:54:57 -04:00
if (tasks.findByName("composePull")) {
tasks.composePull.enabled = false
2019-04-23 07:54:57 -04:00
}
/*
* The export subprojects write out the generated Docker images to disk, so
* that they can be easily reloaded, for example into a VM for distribution testing
*/
subprojects { Project subProject ->
if (subProject.name.endsWith('-export')) {
apply plugin: 'distribution'
final Architecture architecture = subProject.name.contains('aarch64-') ? Architecture.AARCH64 : Architecture.X64
final DockerBase base = DockerBase.CENTOS
final String arch = architecture == Architecture.AARCH64 ? '-aarch64' : ''
final String suffix = '-oss'
final String extension = 'docker.tar'
final String artifactName = "elasticsearch${arch}${suffix}_test"
final String exportTaskName = taskName("export", architecture, base, "DockerImage")
final String buildTaskName = taskName("build", architecture, base, "DockerImage")
final String tarFile = "${parent.projectDir}/build/${artifactName}_${VersionProperties.elasticsearch}.${extension}"
tasks.register(exportTaskName, LoggedExec) {
inputs.file("${parent.projectDir}/build/markers/${buildTaskName}.marker")
executable 'docker'
outputs.file(tarFile)
args "save",
"-o",
tarFile,
"elasticsearch${suffix}:test"
dependsOn(parent.path + ":" + buildTaskName)
onlyIf { Architecture.current() == architecture }
}
artifacts.add('default', file(tarFile)) {
type 'tar'
name artifactName
builtBy exportTaskName
}
tasks.named("assemble").configure {
dependsOn(exportTaskName)
}
}
}