OpenSearch/distribution/docker/build.gradle

277 lines
9.3 KiB
Groovy
Raw Normal View History

import org.elasticsearch.gradle.Architecture
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.docker.DockerBuildTask
import org.elasticsearch.gradle.info.BuildParams
import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.test.fixtures'
apply plugin: 'elasticsearch.distribution-download'
testFixtures.useFixture()
configurations {
dockerPlugins
aarch64DockerSource
dockerSource
aarch64OssDockerSource
ossDockerSource
}
dependencies {
aarch64DockerSource project(path: ":distribution:archives:linux-aarch64-tar")
dockerSource project(path: ":distribution:archives:linux-tar")
aarch64OssDockerSource project(path: ":distribution:archives:oss-linux-aarch64-tar")
ossDockerSource project(path: ":distribution:archives:oss-linux-tar")
}
ext.expansions = { architecture, oss, local ->
String classifier
if (local) {
switch (architecture) {
case "aarch64":
classifier = "linux-aarch64"
break
case "x64":
classifier = "linux-x86_64"
break
default:
throw new IllegalArgumentException("Unrecognized architecture [" + architecture + "], must be one of (aarch64|x64)")
}
} else {
/* When sourcing the Elasticsearch build remotely, the same Dockerfile needs
* to be able to fetch the artifact for any supported platform. We can't make
* the decision here. Bash will interpolate the `arch` command for us. */
classifier = "linux-\$(arch)"
}
final String elasticsearch = "elasticsearch-${oss ? 'oss-' : ''}${VersionProperties.elasticsearch}-${classifier}.tar.gz"
/* Both the following Dockerfile commands put the resulting artifact at
* the same location, regardless of classifier, so that the commands that
* follow in the Dockerfile don't have to know about the runtime
* architecture. */
String sourceElasticsearch
if (local) {
sourceElasticsearch = "COPY $elasticsearch /opt/elasticsearch.tar.gz"
} else {
sourceElasticsearch = """
RUN curl --retry 8 -S -L \\
--output /opt/elasticsearch.tar.gz \\
https://artifacts.elastic.co/downloads/elasticsearch/$elasticsearch
"""
}
return [
'build_date' : BuildParams.buildDate,
'git_revision' : BuildParams.gitRevision,
'license' : oss ? 'Apache-2.0' : 'Elastic-License',
'source_elasticsearch': sourceElasticsearch,
'version' : VersionProperties.elasticsearch
]
}
private static String buildPath(final String architecture, final boolean oss) {
return "build/${"aarch64".equals(architecture) ? 'aarch64-' : ''}${oss ? 'oss-' : ''}docker"
}
private static String taskName(final String prefix, final String architecture, final boolean oss, final String suffix) {
return "${prefix}${"aarch64".equals(architecture) ? 'Aarch64' : ''}${oss ? 'Oss' : ''}${suffix}"
}
project.ext {
dockerBuildContext = { String architecture, boolean oss, boolean local ->
copySpec {
into('bin') {
from project.projectDir.toPath().resolve("src/docker/bin")
}
into('config') {
/*
* Oss and default distribution can have different configuration, therefore we want to allow overriding the default configuration
* by creating config files in oss or default build-context sub-modules.
*/
from project.projectDir.toPath().resolve("src/docker/config")
if (oss) {
from project.projectDir.toPath().resolve("src/docker/config/oss")
}
}
from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) {
expand(expansions(architecture, oss, local))
}
}
}
}
void addCopyDockerContextTask(final String architecture, final boolean oss) {
task(taskName("copy", architecture, oss, "DockerContext"), type: Sync) {
expansions(architecture, oss, true).findAll { it.key != 'build_date' }.each { k, v ->
inputs.property(k, { v.toString() })
}
into buildPath(architecture, oss)
with dockerBuildContext(architecture, oss, true)
if ("aarch64".equals(architecture)) {
if (oss) {
from configurations.aarch64OssDockerSource
} else {
from configurations.aarch64DockerSource
}
} else {
if (oss) {
from configurations.ossDockerSource
} else {
from configurations.dockerSource
}
}
from configurations.dockerPlugins
}
}
def createAndSetWritable(Object... locations) {
locations.each { location ->
File file = file(location)
file.mkdirs()
file.setWritable(true, false)
}
}
task copyKeystore(type: Sync) {
from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
into "${buildDir}/certs"
doLast {
file("${buildDir}/certs").setReadable(true, false)
file("${buildDir}/certs/testnode.jks").setReadable(true, false)
}
}
elasticsearch_distributions {
Architecture.values().each { eachArchitecture ->
Flavor.values().each { distroFlavor ->
"docker_$distroFlavor${ eachArchitecture == Architecture.AARCH64 ? '_aarch64' : '' }" {
architecture = eachArchitecture
flavor = distroFlavor
type = 'docker'
version = VersionProperties.getElasticsearch()
failIfUnavailable = false // This ensures we don't attempt to build images if docker is unavailable
}
}
}
}
preProcessFixture {
dependsOn elasticsearch_distributions.docker_default, elasticsearch_distributions.docker_oss
dependsOn copyKeystore
doLast {
// tests expect to have an empty repo
project.delete(
"${buildDir}/repo",
"${buildDir}/oss-repo"
)
createAndSetWritable(
"${buildDir}/repo",
"${buildDir}/oss-repo",
"${buildDir}/logs/default-1",
"${buildDir}/logs/default-2",
"${buildDir}/logs/oss-1",
"${buildDir}/logs/oss-2"
)
}
}
processTestResources {
from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
}
task integTest(type: Test) {
outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
maxParallelForks = '1'
include '**/*IT.class'
}
check.dependsOn integTest
void addBuildDockerImage(final String architecture, final boolean oss) {
final Task buildDockerImageTask = task(taskName("build", architecture, oss, "DockerImage"), type: DockerBuildTask) {
TaskProvider<Sync> copyContextTask = tasks.named(taskName("copy", architecture, oss, "DockerContext"))
dependsOn(copyContextTask)
dockerContext.fileProvider(copyContextTask.map { it.destinationDir })
if (oss) {
tags = [
"docker.elastic.co/elasticsearch/elasticsearch-oss:${VersionProperties.elasticsearch}",
"elasticsearch-oss:test"
]
} else {
tags = [
"elasticsearch:${VersionProperties.elasticsearch}",
"docker.elastic.co/elasticsearch/elasticsearch:${VersionProperties.elasticsearch}",
"docker.elastic.co/elasticsearch/elasticsearch-full:${VersionProperties.elasticsearch}",
"elasticsearch:test",
]
}
}
buildDockerImageTask.onlyIf { Architecture.current().name().toLowerCase().equals(architecture) }
assemble.dependsOn(buildDockerImageTask)
}
for (final String architecture : ["aarch64", "x64"]) {
for (final boolean oss : [false, true]) {
addCopyDockerContextTask(architecture, oss)
addBuildDockerImage(architecture, oss)
}
}
// We build the images used in compose locally, but the pull command insists on using a repository
// thus we must disable it to prevent it from doing so.
// Everything will still be pulled since we will build the local images on a pull
2019-04-23 07:54:57 -04:00
if (tasks.findByName("composePull")) {
tasks.composePull.enabled = false
2019-04-23 07:54:57 -04:00
}
/*
* The export subprojects write out the generated Docker images to disk, so
* that they can be easily reloaded, for example into a VM.
*/
subprojects { Project subProject ->
if (subProject.name.endsWith('-export')) {
apply plugin: 'distribution'
final String architecture = subProject.name.contains('aarch64-') ? 'aarch64' : 'x64'
final boolean oss = subProject.name.contains('oss-')
def exportTaskName = taskName("export", architecture, oss, "DockerImage")
def buildTaskName = taskName("build", architecture, oss, "DockerImage")
def tarFile = "${parent.projectDir}/build/elasticsearch${"aarch64".equals(architecture) ? '-aarch64' : ''}${oss ? '-oss' : ''}_test.${VersionProperties.elasticsearch}.docker.tar"
final Task exportDockerImageTask = task(exportTaskName, type: LoggedExec) {
inputs.file("${parent.projectDir}/build/markers/${buildTaskName}.marker")
executable 'docker'
outputs.file(tarFile)
args "save",
"-o",
tarFile,
"elasticsearch${oss ? '-oss' : ''}:test"
}
exportDockerImageTask.dependsOn(parent.tasks.getByName(buildTaskName))
exportDockerImageTask.onlyIf { Architecture.current().name().toLowerCase().equals(architecture) }
artifacts.add('default', file(tarFile)) {
type 'tar'
name "elasticsearch${"aarch64".equals(architecture) ? '-aarch64' : ''}${oss ? '-oss' : ''}"
builtBy exportTaskName
}
assemble.dependsOn exportTaskName
}
}