Add UBI docker builds

Backport of #60742.

This PR resurrects support for building Docker images based on one of
Red Hat's UBI images. It also adds support for running the existing
Docker tests against the image. The image is named
`elasticsearch-ubi8:<version>`.

I also changed the Docker build file uses enums instead strings in a lot
of places, for added rigour.
This commit is contained in:
Rory Hunter 2020-08-18 09:27:23 +01:00 committed by Rory Hunter
parent 6ffa7f0737
commit d8aacbdd31
16 changed files with 294 additions and 108 deletions

View File

@ -0,0 +1,39 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle;
/**
* This class models the different Docker base images that are used to build Docker distributions of Elasticsearch.
*/
public enum DockerBase {
CENTOS("centos:8"),
// "latest" here is intentional, since the image name specifies "8"
UBI("registry.access.redhat.com/ubi8/ubi-minimal:latest");
private final String image;
DockerBase(String image) {
this.image = image;
}
public String getImage() {
return image;
}
}

View File

@ -50,7 +50,9 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
ARCHIVE,
RPM,
DEB,
DOCKER;
DOCKER,
// This is a different flavour of Docker image
DOCKER_UBI;
@Override
public String toString() {
@ -61,6 +63,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
switch (this) {
case DEB:
case DOCKER:
case DOCKER_UBI:
case RPM:
return false;
@ -188,6 +191,11 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
return bundledJdk.getOrElse(true);
}
public boolean isDocker() {
final Type type = this.type.get();
return type == Type.DOCKER || type == Type.DOCKER_UBI;
}
public void setBundledJdk(Boolean bundledJdk) {
this.bundledJdk.set(bundledJdk);
}
@ -230,9 +238,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
@Override
public TaskDependency getBuildDependencies() {
// For non-required Docker distributions, skip building the distribution is Docker is unavailable
if (getType() == Type.DOCKER
&& getFailIfUnavailable() == false
&& dockerSupport.get().getDockerAvailability().isAvailable == false) {
if (isDocker() && getFailIfUnavailable() == false && dockerSupport.get().getDockerAvailability().isAvailable == false) {
return task -> Collections.emptySet();
}
@ -266,7 +272,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
return;
}
if (getType() != Type.DOCKER && failIfUnavailable.get() == false) {
if (isDocker() == false && failIfUnavailable.get() == false) {
throw new IllegalArgumentException(
"failIfUnavailable cannot be 'false' on elasticsearch distribution [" + name + "] of type [" + getType() + "]"
);
@ -283,10 +289,15 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
"platform cannot be set on elasticsearch distribution [" + name + "] of type [" + getType() + "]"
);
}
if (getType() == Type.DOCKER && bundledJdk.isPresent()) {
throw new IllegalArgumentException(
"bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type [docker]"
);
if (isDocker()) {
if (bundledJdk.isPresent()) {
throw new IllegalArgumentException(
"bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type " + "[docker]"
);
}
if (flavor.get() == Flavor.OSS && type.get() == Type.DOCKER_UBI) {
throw new IllegalArgumentException("Cannot build a UBI docker image for the OSS distribution");
}
}
}

View File

@ -102,6 +102,7 @@ public class InternalDistributionDownloadPlugin implements Plugin<Project> {
break;
case DOCKER:
case DOCKER_UBI:
projectPath += ":docker:";
projectPath += distributionProjectName(distribution);
break;
@ -151,6 +152,10 @@ public class InternalDistributionDownloadPlugin implements Plugin<Project> {
projectName += "docker" + archString + "-export";
break;
case DOCKER_UBI:
projectName += "ubi-docker" + archString + "-export";
break;
default:
projectName += distribution.getType();
break;

View File

@ -107,7 +107,7 @@ public class DistroTestPlugin implements Plugin<Project> {
depsTask.configure(t -> t.dependsOn(distribution, examplePlugin));
depsTasks.put(taskname, depsTask);
TaskProvider<Test> destructiveTask = configureTestTask(project, taskname, distribution, t -> {
t.onlyIf(t2 -> distribution.getType() != Type.DOCKER || dockerSupport.get().getDockerAvailability().isAvailable);
t.onlyIf(t2 -> distribution.isDocker() == false || dockerSupport.get().getDockerAvailability().isAvailable);
addDistributionSysprop(t, DISTRIBUTION_SYSPROP, distribution::toString);
addDistributionSysprop(t, EXAMPLE_PLUGIN_SYSPROP, () -> examplePlugin.getSingleFile().toString());
t.exclude("**/PackageUpgradeTests.class");
@ -191,7 +191,8 @@ public class DistroTestPlugin implements Plugin<Project> {
// auto-detection doesn't work.
//
// The shouldTestDocker property could be null, hence we use Boolean.TRUE.equals()
boolean shouldExecute = type != Type.DOCKER || Boolean.TRUE.equals(vmProject.findProperty("shouldTestDocker"));
boolean shouldExecute = (type != Type.DOCKER && type != Type.DOCKER_UBI)
|| Boolean.TRUE.equals(vmProject.findProperty("shouldTestDocker"));
if (shouldExecute) {
distroTest.configure(t -> t.dependsOn(wrapperTask));
@ -218,6 +219,7 @@ public class DistroTestPlugin implements Plugin<Project> {
Map<ElasticsearchDistribution.Type, TaskProvider<?>> lifecyleTasks = new HashMap<>();
lifecyleTasks.put(Type.DOCKER, project.getTasks().register(taskPrefix + ".docker"));
lifecyleTasks.put(Type.DOCKER_UBI, project.getTasks().register(taskPrefix + ".ubi"));
lifecyleTasks.put(Type.ARCHIVE, project.getTasks().register(taskPrefix + ".archives"));
lifecyleTasks.put(Type.DEB, project.getTasks().register(taskPrefix + ".packages"));
lifecyleTasks.put(Type.RPM, lifecyleTasks.get(Type.DEB));
@ -344,26 +346,28 @@ public class DistroTestPlugin implements Plugin<Project> {
List<ElasticsearchDistribution> currentDistros = new ArrayList<>();
for (Architecture architecture : Architecture.values()) {
for (Type type : Arrays.asList(Type.DEB, Type.RPM, Type.DOCKER)) {
for (Type type : Arrays.asList(Type.DEB, Type.RPM, Type.DOCKER, Type.DOCKER_UBI)) {
for (Flavor flavor : Flavor.values()) {
for (boolean bundledJdk : Arrays.asList(true, false)) {
// All our Docker images include a bundled JDK so it doesn't make sense to test without one.
// Also we'll never publish an ARM (aarch64) build without a bundled JDK.
boolean skip = bundledJdk == false && (type == Type.DOCKER || architecture == Architecture.AARCH64);
if (skip == false) {
currentDistros.add(
createDistro(
distributions,
architecture,
type,
null,
flavor,
bundledJdk,
VersionProperties.getElasticsearch()
)
);
if (bundledJdk == false) {
// We'll never publish an ARM (aarch64) build without a bundled JDK.
if (architecture == Architecture.AARCH64) {
continue;
}
// All our Docker images include a bundled JDK so it doesn't make sense to test without one.
if (type == Type.DOCKER || type == Type.DOCKER_UBI) {
continue;
}
}
// We don't publish the OSS distribution on UBI
if (type == Type.DOCKER_UBI && flavor == Flavor.OSS) {
continue;
}
currentDistros.add(
createDistro(distributions, architecture, type, null, flavor, bundledJdk, VersionProperties.getElasticsearch())
);
}
}
}
@ -408,6 +412,7 @@ public class DistroTestPlugin implements Plugin<Project> {
String version
) {
String name = distroId(type, platform, flavor, bundledJdk, architecture) + "-" + version;
boolean isDocker = type == Type.DOCKER || type == Type.DOCKER_UBI;
ElasticsearchDistribution distro = distributions.create(name, d -> {
d.setArchitecture(architecture);
d.setFlavor(flavor);
@ -415,7 +420,7 @@ public class DistroTestPlugin implements Plugin<Project> {
if (type == Type.ARCHIVE) {
d.setPlatform(platform);
}
if (type != Type.DOCKER) {
if (isDocker == false) {
d.setBundledJdk(bundledJdk);
}
d.setVersion(version);
@ -423,7 +428,7 @@ public class DistroTestPlugin implements Plugin<Project> {
// Allow us to gracefully omit building Docker distributions if Docker is not available on the system.
// In such a case as we can't build the Docker images we'll simply skip the corresponding tests.
if (type == Type.DOCKER) {
if (isDocker) {
distro.setFailIfUnavailable(false);
}

View File

@ -1,4 +1,5 @@
import org.elasticsearch.gradle.Architecture
import org.elasticsearch.gradle.DockerBase
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.VersionProperties
@ -28,18 +29,15 @@ dependencies {
ossDockerSource project(path: ":distribution:archives:oss-linux-tar")
}
ext.expansions = { architecture, oss, local ->
ext.expansions = { Architecture architecture, boolean oss, DockerBase base, boolean local ->
String classifier
if (local) {
switch (architecture) {
case "aarch64":
classifier = "linux-aarch64"
break
case "x64":
classifier = "linux-x86_64"
break
default:
throw new IllegalArgumentException("Unrecognized architecture [" + architecture + "], must be one of (aarch64|x64)")
if (architecture == Architecture.AARCH64) {
classifier = "linux-aarch64"
} else if (architecture == Architecture.X64) {
classifier = "linux-x86_64"
} else {
throw new IllegalArgumentException("Unsupported architecture [" + architecture + "]")
}
} else {
/* When sourcing the Elasticsearch build remotely, the same Dockerfile needs
@ -66,24 +64,35 @@ RUN curl --retry 8 -S -L \\
}
return [
'base_image' : base.getImage(),
'build_date' : BuildParams.buildDate,
'git_revision' : BuildParams.gitRevision,
'license' : oss ? 'Apache-2.0' : 'Elastic-License',
'package_manager' : base == DockerBase.UBI ? 'microdnf' : 'yum',
'source_elasticsearch': sourceElasticsearch,
'docker_base' : base.name().toLowerCase(),
'version' : VersionProperties.elasticsearch
]
}
private static String buildPath(final String architecture, final boolean oss) {
return "build/${"aarch64".equals(architecture) ? 'aarch64-' : ''}${oss ? 'oss-' : ''}docker"
private static String buildPath(Architecture architecture, boolean oss, DockerBase base) {
return 'build/' +
(architecture == Architecture.AARCH64 ? 'aarch64-' : '') +
(oss ? 'oss-' : '') +
(base == DockerBase.UBI ? 'ubi-' : '') +
'docker'
}
private static String taskName(final String prefix, final String architecture, final boolean oss, final String suffix) {
return "${prefix}${"aarch64".equals(architecture) ? 'Aarch64' : ''}${oss ? 'Oss' : ''}${suffix}"
private static String taskName(String prefix, Architecture architecture, boolean oss, DockerBase base, String suffix) {
return prefix +
(architecture == Architecture.AARCH64 ? 'Aarch64' : '') +
(oss ? 'Oss' : '') +
(base == DockerBase.UBI ? 'Ubi' : '') +
suffix
}
project.ext {
dockerBuildContext = { String architecture, boolean oss, boolean local ->
dockerBuildContext = { Architecture architecture, boolean oss, DockerBase base, boolean local ->
copySpec {
into('bin') {
from project.projectDir.toPath().resolve("src/docker/bin")
@ -91,33 +100,40 @@ project.ext {
into('config') {
/*
* Oss and default distribution can have different configuration, therefore we want to allow overriding the default configuration
* by creating config files in oss or default build-context sub-modules.
* The OSS and default distributions have different configurations, therefore we want to allow overriding the default configuration
* from files in the 'oss' sub-directory. We don't want the 'oss' sub-directory to appear in the final build context, however.
*/
duplicatesStrategy = DuplicatesStrategy.INCLUDE
from project.projectDir.toPath().resolve("src/docker/config")
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
from(project.projectDir.toPath().resolve("src/docker/config")) {
exclude 'oss'
}
if (oss) {
// Overlay the config file
from project.projectDir.toPath().resolve("src/docker/config/oss")
}
}
from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) {
expand(expansions(architecture, oss, local))
expand(expansions(architecture, oss, base, local))
}
}
}
}
void addCopyDockerContextTask(final String architecture, final boolean oss) {
tasks.register(taskName("copy", architecture, oss, "DockerContext"), Sync) {
expansions(architecture, oss, true).findAll { it.key != 'build_date' }.each { k, v ->
void addCopyDockerContextTask(Architecture architecture, boolean oss, DockerBase base) {
if (oss && base != DockerBase.CENTOS) {
throw new GradleException("The only allowed docker base image for OSS builds is CENTOS")
}
tasks.register(taskName("copy", architecture, oss, base, "DockerContext"), Sync) {
expansions(architecture, oss, base, true).findAll { it.key != 'build_date' }.each { k, v ->
inputs.property(k, { v.toString() })
}
into buildPath(architecture, oss)
into buildPath(architecture, oss, base)
with dockerBuildContext(architecture, oss, true)
with dockerBuildContext(architecture, oss, base, true)
if ("aarch64".equals(architecture)) {
if (architecture == Architecture.AARCH64) {
if (oss) {
from configurations.aarch64OssDockerSource
} else {
@ -202,25 +218,31 @@ tasks.named("check").configure {
dependsOn "integTest"
}
void addBuildDockerImage(final String architecture, final boolean oss) {
void addBuildDockerImage(Architecture architecture, boolean oss, DockerBase base) {
if (oss && base != DockerBase.CENTOS) {
throw new GradleException("The only allowed docker base image for OSS builds is CENTOS")
}
final TaskProvider<DockerBuildTask> buildDockerImageTask =
tasks.register(taskName("build", architecture, oss, "DockerImage"), DockerBuildTask) {
onlyIf { Architecture.current().name().toLowerCase().equals(architecture) }
TaskProvider<Sync> copyContextTask = tasks.named(taskName("copy", architecture, oss, "DockerContext"))
tasks.register(taskName("build", architecture, oss, base, "DockerImage"), DockerBuildTask) {
onlyIf { Architecture.current() == architecture }
TaskProvider<Sync> copyContextTask = tasks.named(taskName("copy", architecture, oss, base, "DockerContext"))
dependsOn(copyContextTask)
dockerContext.fileProvider(copyContextTask.map { it.destinationDir })
String version = VersionProperties.elasticsearch
if (oss) {
tags = [
"docker.elastic.co/elasticsearch/elasticsearch-oss:${VersionProperties.elasticsearch}",
"docker.elastic.co/elasticsearch/elasticsearch-oss:${version}",
"elasticsearch-oss:test"
]
} else {
String suffix = base == DockerBase.UBI ? '-ubi8' : ''
tags = [
"elasticsearch:${VersionProperties.elasticsearch}",
"docker.elastic.co/elasticsearch/elasticsearch:${VersionProperties.elasticsearch}",
"docker.elastic.co/elasticsearch/elasticsearch-full:${VersionProperties.elasticsearch}",
"elasticsearch:test",
"elasticsearch${suffix}:${version}",
"docker.elastic.co/elasticsearch/elasticsearch${suffix}:${version}",
"docker.elastic.co/elasticsearch/elasticsearch-full${suffix}:${version}",
"elasticsearch${suffix}:test",
]
}
}
@ -229,10 +251,17 @@ void addBuildDockerImage(final String architecture, final boolean oss) {
}
}
for (final String architecture : ["aarch64", "x64"]) {
for (final boolean oss : [false, true]) {
addCopyDockerContextTask(architecture, oss)
addBuildDockerImage(architecture, oss)
for (final Architecture architecture : Architecture.values()) {
for (final DockerBase base : DockerBase.values()) {
for (final boolean oss : [false, true]) {
if (oss && base != DockerBase.CENTOS) {
// We only create Docker images for the OSS distribution on CentOS.
// Other bases only use the default distribution.
continue
}
addCopyDockerContextTask(architecture, oss, base)
addBuildDockerImage(architecture, oss, base)
}
}
}
@ -245,35 +274,41 @@ if (tasks.findByName("composePull")) {
/*
* The export subprojects write out the generated Docker images to disk, so
* that they can be easily reloaded, for example into a VM.
* that they can be easily reloaded, for example into a VM for distribution testing
*/
subprojects { Project subProject ->
if (subProject.name.endsWith('-export')) {
apply plugin: 'distribution'
final String architecture = subProject.name.contains('aarch64-') ? 'aarch64' : 'x64'
final Architecture architecture = subProject.name.contains('aarch64-') ? Architecture.AARCH64 : Architecture.X64
final boolean oss = subProject.name.contains('oss-')
final DockerBase base = subProject.name.contains('ubi-') ? DockerBase.UBI : DockerBase.CENTOS
def exportTaskName = taskName("export", architecture, oss, "DockerImage")
def buildTaskName = taskName("build", architecture, oss, "DockerImage")
def tarFile = "${parent.projectDir}/build/elasticsearch${"aarch64".equals(architecture) ? '-aarch64' : ''}${oss ? '-oss' : ''}_test.${VersionProperties.elasticsearch}.docker.tar"
final String arch = architecture == Architecture.AARCH64 ? '-aarch64' : ''
final String suffix = oss ? '-oss' : base == DockerBase.UBI ? '-ubi8' : ''
final String extension = base == DockerBase.UBI ? 'ubi.tar' : 'docker.tar'
final String artifactName = "elasticsearch${arch}${suffix}_test"
final TaskProvider<LoggedExec> exportDockerImageTask = tasks.register(exportTaskName, LoggedExec) {
final String exportTaskName = taskName("export", architecture, oss, base, "DockerImage")
final String buildTaskName = taskName("build", architecture, oss, base, "DockerImage")
final String tarFile = "${parent.projectDir}/build/${artifactName}_${VersionProperties.elasticsearch}.${extension}"
tasks.register(exportTaskName, LoggedExec) {
inputs.file("${parent.projectDir}/build/markers/${buildTaskName}.marker")
executable 'docker'
outputs.file(tarFile)
args "save",
"-o",
tarFile,
"elasticsearch${oss ? '-oss' : ''}:test"
"elasticsearch${suffix}:test"
dependsOn(parent.path + ":" + buildTaskName)
onlyIf { Architecture.current().name().toLowerCase().equals(architecture) }
onlyIf { Architecture.current() == architecture }
}
artifacts.add('default', file(tarFile)) {
type 'tar'
name "elasticsearch${"aarch64".equals(architecture) ? '-aarch64' : ''}${oss ? '-oss' : ''}"
name artifactName
builtBy exportTaskName
}

View File

@ -1,3 +1,5 @@
import org.elasticsearch.gradle.DockerBase
apply plugin: 'base'
tasks.register("buildDockerBuildContext", Tar) {
@ -6,8 +8,7 @@ tasks.register("buildDockerBuildContext", Tar) {
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch"
// Non-local builds don't need to specify an architecture.
// Make this explicit via the string value.
with dockerBuildContext("<remote>", false, false)
with dockerBuildContext(null, false, DockerBase.CENTOS, false)
}
tasks.named("assemble").configure {dependsOn "buildDockerBuildContext"}

View File

@ -1,3 +1,5 @@
import org.elasticsearch.gradle.DockerBase
apply plugin: 'base'
tasks.register("buildOssDockerBuildContext", Tar) {
@ -6,8 +8,7 @@ tasks.register("buildOssDockerBuildContext", Tar) {
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-oss"
// Non-local builds don't need to specify an architecture.
// Make this explicit via the string value.
with dockerBuildContext("<remote>", true, false)
with dockerBuildContext(null, true, DockerBase.CENTOS, false)
}
tasks.named("assemble").configure { dependsOn "buildOssDockerBuildContext" }

View File

@ -9,15 +9,21 @@
can also comment out blocks, like this one. See:
https://docs.groovy-lang.org/latest/html/api/groovy/text/SimpleTemplateEngine.html
We use control-flow tags in this file to conditionally render the content. The
layout/presentation here has been adjusted so that it looks reasonable when rendered,
at the slight expense of how it looks here.
*/ %>
################################################################################
# Build stage 0 `builder`:
# Extract elasticsearch artifact
# Set gid=0 and make group perms==owner perms
# Extract Elasticsearch artifact
################################################################################
FROM centos:8 AS builder
FROM ${base_image} AS builder
<% if (docker_base == 'ubi') { %>
# Install required packages to extract the Elasticsearch distribution
RUN ${package_manager} install -y tar gzip
<% } %>
# `tini` is a tiny but valid init for containers. This is used to cleanly
# control how ES and any child processes are shut down.
#
@ -25,7 +31,6 @@ FROM centos:8 AS builder
# gpg, but the keyservers are slow to return the key and this can fail the
# build. Instead, we check the binary against the published checksum.
RUN set -eux ; \\
\\
tini_bin="" ; \\
case "\$(arch)" in \\
aarch64) tini_bin='tini-arm64' ;; \\
@ -39,11 +44,7 @@ RUN set -eux ; \\
mv \${tini_bin} /tini ; \\
chmod +x /tini
ENV PATH /usr/share/elasticsearch/bin:\$PATH
RUN groupadd -g 1000 elasticsearch && \\
adduser -u 1000 -g 1000 -d /usr/share/elasticsearch elasticsearch
RUN mkdir /usr/share/elasticsearch
WORKDIR /usr/share/elasticsearch
${source_elasticsearch}
@ -56,29 +57,33 @@ COPY config/elasticsearch.yml config/log4j2.properties config/
RUN chmod 0660 config/elasticsearch.yml config/log4j2.properties
################################################################################
# Build stage 1 (the actual elasticsearch image):
# Build stage 1 (the actual Elasticsearch image):
#
# Copy elasticsearch from stage 0
# Add entrypoint
################################################################################
FROM centos:8
FROM ${base_image}
ENV ELASTIC_CONTAINER true
COPY --from=builder /tini /tini
RUN for iter in {1..10}; do yum update --setopt=tsflags=nodocs -y && \\
yum install --setopt=tsflags=nodocs -y nc shadow-utils zip unzip && \\
yum clean all && exit_code=0 && break || exit_code=\$? && echo "yum error: retry \$iter in 10s" && sleep 10; done; \\
RUN for iter in {1..10}; do \\
${package_manager} update --setopt=tsflags=nodocs -y && \\
${package_manager} install --setopt=tsflags=nodocs -y \\
nc shadow-utils zip unzip <%= docker_base == 'ubi' ? 'findutils procps-ng' : '' %> && \\
${package_manager} clean all && exit_code=0 && break || exit_code=\$? && echo "${package_manager} error: retry \$iter in 10s" && \\
sleep 10; \\
done; \\
(exit \$exit_code)
RUN groupadd -g 1000 elasticsearch && \\
adduser -u 1000 -g 1000 -G 0 -d /usr/share/elasticsearch elasticsearch && \\
chmod 0775 /usr/share/elasticsearch && \\
chgrp 0 /usr/share/elasticsearch
chown -R 1000:0 /usr/share/elasticsearch
WORKDIR /usr/share/elasticsearch
COPY --from=builder --chown=1000:0 /usr/share/elasticsearch /usr/share/elasticsearch
COPY --from=builder --chown=0:0 /tini /tini
# Replace OpenJDK's built-in CA certificate keystore with the one from the OS
# vendor. The latter is superior in several ways.
@ -116,6 +121,18 @@ LABEL org.label-schema.build-date="${build_date}" \\
org.opencontainers.image.url="https://www.elastic.co/products/elasticsearch" \\
org.opencontainers.image.vendor="Elastic" \\
org.opencontainers.image.version="${version}"
<% if (docker_base == 'ubi') { %>
LABEL name="Elasticsearch" \\
maintainer="infra@elastic.co" \\
vendor="Elastic" \\
version="${version}" \\
release="1" \\
summary="Elasticsearch" \\
description="You know, for search."
RUN mkdir /licenses && \\
cp LICENSE.txt /licenses/LICENSE
<% } %>
ENTRYPOINT ["/tini", "--", "/usr/local/bin/docker-entrypoint.sh"]
# Dummy overridable parameter parsed by entrypoint

View File

@ -0,0 +1,2 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.

View File

@ -0,0 +1,13 @@
import org.elasticsearch.gradle.DockerBase
apply plugin: 'base'
task buildUbiDockerBuildContext(type: Tar) {
archiveExtension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-ubi8"
with dockerBuildContext(null, false, DockerBase.UBI, false)
}
assemble.dependsOn buildUbiDockerBuildContext

View File

@ -0,0 +1,2 @@
// This file is intentionally blank. All configuration of the
// export is done in the parent project.

View File

@ -21,6 +21,7 @@ package org.elasticsearch.packaging.test;
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.http.client.fluent.Request;
import org.elasticsearch.packaging.util.Distribution;
import org.elasticsearch.packaging.util.Installation;
import org.elasticsearch.packaging.util.Platforms;
import org.elasticsearch.packaging.util.ServerUtils;
@ -57,6 +58,7 @@ import static org.elasticsearch.packaging.util.FileMatcher.p660;
import static org.elasticsearch.packaging.util.FileMatcher.p775;
import static org.elasticsearch.packaging.util.FileUtils.append;
import static org.elasticsearch.packaging.util.FileUtils.rm;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.emptyString;
@ -631,4 +633,46 @@ public class DockerTests extends PackagingTestCase {
assertThat("Failed to find [cpu] in node OS cgroup stats", cgroupStats.get("cpu"), not(nullValue()));
assertThat("Failed to find [cpuacct] in node OS cgroup stats", cgroupStats.get("cpuacct"), not(nullValue()));
}
/**
* Check that the UBI images has the correct license information in the correct place.
*/
public void test200UbiImagesHaveLicenseDirectory() {
assumeTrue(distribution.packaging == Distribution.Packaging.DOCKER_UBI);
final String[] files = sh.run("find /licenses -type f").stdout.split("\n");
assertThat(files, arrayContaining("/licenses/LICENSE"));
// UBI image doesn't contain `diff`
final String ubiLicense = sh.run("cat /licenses/LICENSE").stdout;
final String distroLicense = sh.run("cat /usr/share/elasticsearch/LICENSE.txt").stdout;
assertThat(ubiLicense, equalTo(distroLicense));
}
/**
* Check that the UBI image has the expected labels
*/
public void test210UbiLabels() throws Exception {
assumeTrue(distribution.packaging == Distribution.Packaging.DOCKER_UBI);
final Map<String, String> labels = getImageLabels(distribution);
final Map<String, String> staticLabels = new HashMap<>();
staticLabels.put("name", "Elasticsearch");
staticLabels.put("maintainer", "infra@elastic.co");
staticLabels.put("vendor", "Elastic");
staticLabels.put("summary", "Elasticsearch");
staticLabels.put("description", "You know, for search.");
final Set<String> dynamicLabels = new HashSet<>();
dynamicLabels.add("release");
dynamicLabels.add("version");
staticLabels.forEach((key, value) -> {
assertThat(labels, hasKey(key));
assertThat(labels.get(key), equalTo(value));
});
dynamicLabels.forEach(key -> assertThat(labels, hasKey(key)));
}
}

View File

@ -39,6 +39,8 @@ public class Distribution {
this.packaging = Packaging.TAR;
} else if (filename.endsWith(".docker.tar")) {
this.packaging = Packaging.DOCKER;
} else if (filename.endsWith(".ubi.tar")) {
this.packaging = Packaging.DOCKER_UBI;
} else {
int lastDot = filename.lastIndexOf('.');
this.packaging = Packaging.valueOf(filename.substring(lastDot + 1).toUpperCase(Locale.ROOT));
@ -71,7 +73,7 @@ public class Distribution {
}
public boolean isDocker() {
return packaging == Packaging.DOCKER;
return packaging == Packaging.DOCKER || packaging == Packaging.DOCKER_UBI;
}
public enum Packaging {
@ -80,7 +82,8 @@ public class Distribution {
ZIP(".zip", Platforms.WINDOWS),
DEB(".deb", Platforms.isDPKG()),
RPM(".rpm", Platforms.isRPM()),
DOCKER(".docker.tar", Platforms.isDocker());
DOCKER(".docker.tar", Platforms.isDocker()),
DOCKER_UBI(".ubi.tar", Platforms.isDocker());
/** The extension of this distribution's file */
public final String extension;

View File

@ -77,7 +77,7 @@ public class Docker {
* @param distribution details about the docker image to potentially load.
*/
public static void ensureImageIsLoaded(Distribution distribution) {
Shell.Result result = sh.run("docker image ls --format '{{.Repository}}' " + distribution.flavor.name);
Shell.Result result = sh.run("docker image ls --format '{{.Repository}}' " + getImageName(distribution));
final long count = Arrays.stream(result.stdout.split("\n")).map(String::trim).filter(s -> s.isEmpty() == false).count();
@ -160,7 +160,8 @@ public class Docker {
volumes.forEach((localPath, containerPath) -> args.add("--volume \"" + localPath + ":" + containerPath + "\""));
}
args.add(distribution.flavor.name + ":test");
// Image name
args.add(getImageName(distribution));
final String command = String.join(" ", args);
logger.info("Running command: " + command);
@ -525,7 +526,7 @@ public class Docker {
public static Map<String, String> getImageLabels(Distribution distribution) throws Exception {
// The format below extracts the .Config.Labels value, and prints it as json. Without the json
// modifier, a stringified Go map is printed instead, which isn't helpful.
String labelsJson = sh.run("docker inspect -f '{{json .Config.Labels}}' " + distribution.flavor.name + ":test").stdout;
String labelsJson = sh.run("docker inspect -f '{{json .Config.Labels}}' " + getImageName(distribution)).stdout;
ObjectMapper mapper = new ObjectMapper();
@ -541,4 +542,8 @@ public class Docker {
public static Shell.Result getContainerLogs() {
return sh.run("docker logs " + containerId);
}
private static String getImageName(Distribution distribution) {
return distribution.flavor.name + (distribution.packaging == Distribution.Packaging.DOCKER_UBI ? "-ubi8" : "") + ":test";
}
}

View File

@ -72,7 +72,7 @@ public class ServerUtils {
boolean xpackEnabled = false;
// TODO: need a way to check if docker has security enabled, the yml config is not bind mounted so can't look from here
if (installation.distribution.packaging != Distribution.Packaging.DOCKER) {
if (installation.distribution.isDocker() == false) {
Path configFilePath = installation.config("elasticsearch.yml");
// this is fragile, but currently doesn't deviate from a single line enablement and not worth the parsing effort
try (Stream<String> lines = Files.lines(configFilePath, StandardCharsets.UTF_8)) {

View File

@ -36,12 +36,15 @@ List projects = [
'distribution:docker',
'distribution:docker:docker-aarch64-build-context',
'distribution:docker:docker-aarch64-export',
'distribution:docker:oss-docker-aarch64-build-context',
'distribution:docker:oss-docker-aarch64-export',
'distribution:docker:docker-build-context',
'distribution:docker:docker-export',
'distribution:docker:oss-docker-aarch64-build-context',
'distribution:docker:oss-docker-aarch64-export',
'distribution:docker:oss-docker-build-context',
'distribution:docker:oss-docker-export',
'distribution:docker:ubi-docker-aarch64-export',
'distribution:docker:ubi-docker-build-context',
'distribution:docker:ubi-docker-export',
'distribution:packages:aarch64-oss-deb',
'distribution:packages:oss-deb',
'distribution:packages:aarch64-deb',