[Backport] Consolidate docker availability logic (#52656)

This commit is contained in:
Mark Vieira 2020-02-21 15:24:05 -08:00 committed by GitHub
parent 8abfda0b59
commit f06d692706
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 679 additions and 553 deletions

View File

@ -34,6 +34,7 @@ import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure
plugins {
id 'lifecycle-base'
id 'elasticsearch.docker-support'
id 'elasticsearch.global-build-info'
id "com.diffplug.gradle.spotless" version "3.24.2" apply false
}

View File

@ -84,8 +84,6 @@ import java.nio.charset.StandardCharsets
import java.nio.file.Files
import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure
import static org.elasticsearch.gradle.tool.DockerUtils.assertDockerIsAvailable
import static org.elasticsearch.gradle.tool.DockerUtils.getDockerPath
/**
* Encapsulates build configuration for elasticsearch projects.
@ -208,51 +206,6 @@ class BuildPlugin implements Plugin<Project> {
}
}
static void requireDocker(final Task task) {
final Project rootProject = task.project.rootProject
ExtraPropertiesExtension ext = rootProject.extensions.getByType(ExtraPropertiesExtension)
if (rootProject.hasProperty('requiresDocker') == false) {
/*
* This is our first time encountering a task that requires Docker. We will add an extension that will let us track the tasks
* that register as requiring Docker. We will add a delayed execution that when the task graph is ready if any such tasks are
* in the task graph, then we check two things:
* - the Docker binary is available
* - we can execute a Docker command that requires privileges
*
* If either of these fail, we fail the build.
*/
// check if the Docker binary exists and record its path
final String dockerBinary = getDockerPath().orElse(null)
final boolean buildDocker
final String buildDockerProperty = System.getProperty("build.docker")
if (buildDockerProperty == null) {
buildDocker = dockerBinary != null
} else if (buildDockerProperty == "true") {
buildDocker = true
} else if (buildDockerProperty == "false") {
buildDocker = false
} else {
throw new IllegalArgumentException(
"expected build.docker to be unset or one of \"true\" or \"false\" but was [" + buildDockerProperty + "]")
}
ext.set('buildDocker', buildDocker)
ext.set('requiresDocker', [])
rootProject.gradle.taskGraph.whenReady { TaskExecutionGraph taskGraph ->
final List<String> tasks = taskGraph.allTasks.intersect(ext.get('requiresDocker') as List<Task>).collect { " ${it.path}".toString()}
if (tasks.isEmpty() == false) {
assertDockerIsAvailable(task.project, tasks)
}
}
}
(ext.get('requiresDocker') as List<Task>).add(task)
}
/** Add a check before gradle execution phase which ensures java home for the given java version is set. */
static void requireJavaHome(Task task, int version) {
// use root project for global accounting

View File

@ -22,8 +22,11 @@ package org.elasticsearch.gradle;
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor;
import org.elasticsearch.gradle.ElasticsearchDistribution.Platform;
import org.elasticsearch.gradle.ElasticsearchDistribution.Type;
import org.elasticsearch.gradle.docker.DockerSupportPlugin;
import org.elasticsearch.gradle.docker.DockerSupportService;
import org.elasticsearch.gradle.info.BuildParams;
import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin;
import org.elasticsearch.gradle.tool.Boilerplate;
import org.gradle.api.GradleException;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
@ -38,6 +41,7 @@ import org.gradle.api.credentials.HttpHeaderCredentials;
import org.gradle.api.file.FileTree;
import org.gradle.api.file.RelativePath;
import org.gradle.api.plugins.ExtraPropertiesExtension;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.Sync;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.authentication.http.HttpHeaderAuthentication;
@ -72,11 +76,17 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
public void apply(Project project) {
// this is needed for isInternal
project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class);
project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class);
Provider<DockerSupportService> dockerSupport = Boilerplate.getBuildService(
project.getGradle().getSharedServices(),
DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME
);
distributionsContainer = project.container(ElasticsearchDistribution.class, name -> {
Configuration fileConfiguration = project.getConfigurations().create("es_distro_file_" + name);
Configuration extractedConfiguration = project.getConfigurations().create("es_distro_extracted_" + name);
return new ElasticsearchDistribution(name, project.getObjects(), fileConfiguration, extractedConfiguration);
return new ElasticsearchDistribution(name, project.getObjects(), dockerSupport, fileConfiguration, extractedConfiguration);
});
project.getExtensions().add(CONTAINER_NAME, distributionsContainer);

View File

@ -19,13 +19,16 @@
package org.elasticsearch.gradle;
import org.elasticsearch.gradle.docker.DockerSupportService;
import org.gradle.api.Buildable;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.provider.Property;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Collections;
import java.util.Iterator;
import java.util.Locale;
@ -110,6 +113,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
}
private final String name;
private final Provider<DockerSupportService> dockerSupport;
// pkg private so plugin can configure
final Configuration configuration;
private final Extracted extracted;
@ -119,14 +123,17 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
private final Property<Platform> platform;
private final Property<Flavor> flavor;
private final Property<Boolean> bundledJdk;
private final Property<Boolean> failIfUnavailable;
ElasticsearchDistribution(
String name,
ObjectFactory objectFactory,
Provider<DockerSupportService> dockerSupport,
Configuration fileConfiguration,
Configuration extractedConfiguration
) {
this.name = name;
this.dockerSupport = dockerSupport;
this.configuration = fileConfiguration;
this.version = objectFactory.property(String.class).convention(VersionProperties.getElasticsearch());
this.type = objectFactory.property(Type.class);
@ -134,6 +141,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
this.platform = objectFactory.property(Platform.class);
this.flavor = objectFactory.property(Flavor.class);
this.bundledJdk = objectFactory.property(Boolean.class);
this.failIfUnavailable = objectFactory.property(Boolean.class).convention(true);
this.extracted = new Extracted(extractedConfiguration);
}
@ -182,6 +190,14 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
this.bundledJdk.set(bundledJdk);
}
public boolean getFailIfUnavailable() {
return this.failIfUnavailable.get();
}
public void setFailIfUnavailable(boolean failIfUnavailable) {
this.failIfUnavailable.set(failIfUnavailable);
}
@Override
public String toString() {
return configuration.getSingleFile().toString();
@ -203,6 +219,13 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
@Override
public TaskDependency getBuildDependencies() {
// For non-required Docker distributions, skip building the distribution is Docker is unavailable
if (getType() == Type.DOCKER
&& getFailIfUnavailable() == false
&& dockerSupport.get().getDockerAvailability().isAvailable == false) {
return task -> Collections.emptySet();
}
return configuration.getBuildDependencies();
}
@ -222,7 +245,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
if (getType() == Type.INTEG_TEST_ZIP) {
if (platform.getOrNull() != null) {
throw new IllegalArgumentException(
"platform not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]"
"platform cannot be set on elasticsearch distribution [" + name + "] of type [integ_test_zip]"
);
}
if (flavor.getOrNull() != null) {
@ -232,12 +255,18 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
}
if (bundledJdk.getOrNull() != null) {
throw new IllegalArgumentException(
"bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]"
"bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type [integ_test_zip]"
);
}
return;
}
if (getType() != Type.DOCKER && failIfUnavailable.get() == false) {
throw new IllegalArgumentException(
"failIfUnavailable cannot be 'false' on elasticsearch distribution [" + name + "] of type [" + getType() + "]"
);
}
if (getType() == Type.ARCHIVE) {
// defaults for archive, set here instead of via convention so integ-test-zip can verify they are not set
if (platform.isPresent() == false) {
@ -246,7 +275,12 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
} else { // rpm, deb or docker
if (platform.isPresent()) {
throw new IllegalArgumentException(
"platform not allowed for elasticsearch distribution [" + name + "] of type [" + getType() + "]"
"platform cannot be set on elasticsearch distribution [" + name + "] of type [" + getType() + "]"
);
}
if (getType() == Type.DOCKER && bundledJdk.isPresent()) {
throw new IllegalArgumentException(
"bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type [docker]"
);
}
}

View File

@ -0,0 +1,61 @@
package org.elasticsearch.gradle.docker;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.plugins.ExtraPropertiesExtension;
import org.gradle.api.provider.Provider;
import java.io.File;
import java.util.List;
import java.util.stream.Collectors;
/**
* Plugin providing {@link DockerSupportService} for detecting Docker installations and determining requirements for Docker-based
* Elasticsearch build tasks.
* <p>
* Additionally registers a task graph listener used to assert a compatible Docker installation exists when task requiring Docker are
* scheduled for execution. Tasks may declare a Docker requirement via an extra property. If a compatible Docker installation is not
* available on the build system an exception will be thrown prior to task execution.
*
* <pre>
* task myDockerTask {
* ext.requiresDocker = true
* }
* </pre>
*/
public class DockerSupportPlugin implements Plugin<Project> {
public static final String DOCKER_SUPPORT_SERVICE_NAME = "dockerSupportService";
public static final String DOCKER_ON_LINUX_EXCLUSIONS_FILE = ".ci/dockerOnLinuxExclusions";
public static final String REQUIRES_DOCKER_ATTRIBUTE = "requiresDocker";
@Override
public void apply(Project project) {
if (project != project.getRootProject()) {
throw new IllegalStateException(this.getClass().getName() + " can only be applied to the root project.");
}
Provider<DockerSupportService> dockerSupportServiceProvider = project.getGradle()
.getSharedServices()
.registerIfAbsent(
DOCKER_SUPPORT_SERVICE_NAME,
DockerSupportService.class,
spec -> spec.parameters(
params -> { params.setExclusionsFile(new File(project.getRootDir(), DOCKER_ON_LINUX_EXCLUSIONS_FILE)); }
)
);
// Ensure that if any tasks declare they require docker, we assert an available Docker installation exists
project.getGradle().getTaskGraph().whenReady(graph -> {
List<String> dockerTasks = graph.getAllTasks().stream().filter(task -> {
ExtraPropertiesExtension ext = task.getExtensions().getExtraProperties();
return ext.has(REQUIRES_DOCKER_ATTRIBUTE) && (boolean) ext.get(REQUIRES_DOCKER_ATTRIBUTE);
}).map(Task::getPath).collect(Collectors.toList());
if (dockerTasks.isEmpty() == false) {
dockerSupportServiceProvider.get().failIfDockerUnavailable(dockerTasks);
}
});
}
}

View File

@ -0,0 +1,375 @@
package org.elasticsearch.gradle.docker;
import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.info.BuildParams;
import org.gradle.api.GradleException;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.services.BuildService;
import org.gradle.api.services.BuildServiceParameters;
import org.gradle.process.ExecOperations;
import org.gradle.process.ExecResult;
import javax.inject.Inject;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* Build service for detecting available Docker installation and checking for compatibility with Elasticsearch Docker image build
* requirements. This includes a minimum version requirement, as well as the ability to run privileged commands.
*/
public abstract class DockerSupportService implements BuildService<DockerSupportService.Parameters> {
private static Logger LOGGER = Logging.getLogger(DockerSupportService.class);
// Defines the possible locations of the Docker CLI. These will be searched in order.
private static String[] DOCKER_BINARIES = { "/usr/bin/docker", "/usr/local/bin/docker" };
private static String[] DOCKER_COMPOSE_BINARIES = { "/usr/local/bin/docker-compose", "/usr/bin/docker-compose" };
private static final Version MINIMUM_DOCKER_VERSION = Version.fromString("17.05.0");
private final ExecOperations execOperations;
private DockerAvailability dockerAvailability;
@Inject
public DockerSupportService(ExecOperations execOperations) {
this.execOperations = execOperations;
}
/**
* Searches for a functional Docker installation, and returns information about the search.
*
* @return the results of the search.
*/
public DockerAvailability getDockerAvailability() {
if (this.dockerAvailability == null) {
String dockerPath = null;
Result lastResult = null;
Version version = null;
boolean isVersionHighEnough = false;
boolean isComposeAvailable = false;
// Check if the Docker binary exists
final Optional<String> dockerBinary = getDockerPath();
if (isExcludedOs() == false && dockerBinary.isPresent()) {
dockerPath = dockerBinary.get();
// Since we use a multi-stage Docker build, check the Docker version meets minimum requirement
lastResult = runCommand(dockerPath, "version", "--format", "{{.Server.Version}}");
if (lastResult.isSuccess()) {
version = Version.fromString(lastResult.stdout.trim(), Version.Mode.RELAXED);
isVersionHighEnough = version.onOrAfter(MINIMUM_DOCKER_VERSION);
if (isVersionHighEnough) {
// Check that we can execute a privileged command
lastResult = runCommand(dockerPath, "images");
// If docker all checks out, see if docker-compose is available and working
Optional<String> composePath = getDockerComposePath();
if (lastResult.isSuccess() && composePath.isPresent()) {
isComposeAvailable = runCommand(composePath.get(), "version").isSuccess();
}
}
}
}
boolean isAvailable = isVersionHighEnough && lastResult != null && lastResult.isSuccess();
this.dockerAvailability = new DockerAvailability(
isAvailable,
isComposeAvailable,
isVersionHighEnough,
dockerPath,
version,
lastResult
);
}
return this.dockerAvailability;
}
/**
* Given a list of tasks that requires Docker, check whether Docker is available, otherwise throw an exception.
*
* @throws GradleException if Docker is not available. The exception message gives the reason.
*/
void failIfDockerUnavailable(List<String> tasks) {
DockerAvailability availability = getDockerAvailability();
// Docker installation is available and compatible
if (availability.isAvailable) {
return;
}
// No Docker binary was located
if (availability.path == null) {
final String message = String.format(
Locale.ROOT,
"Docker (checked [%s]) is required to run the following task%s: \n%s",
String.join(", ", DOCKER_BINARIES),
tasks.size() > 1 ? "s" : "",
String.join("\n", tasks)
);
throwDockerRequiredException(message);
}
// Docker binaries were located, but did not meet the minimum version requirement
if (availability.lastCommand.isSuccess() && availability.isVersionHighEnough == false) {
final String message = String.format(
Locale.ROOT,
"building Docker images requires minimum Docker version of %s due to use of multi-stage builds yet was [%s]",
MINIMUM_DOCKER_VERSION,
availability.version
);
throwDockerRequiredException(message);
}
// Some other problem, print the error
final String message = String.format(
Locale.ROOT,
"a problem occurred while using Docker from [%s]%s yet it is required to run the following task%s: \n%s\n"
+ "the problem is that Docker exited with exit code [%d] with standard error output:\n%s",
availability.path,
availability.version == null ? "" : " v" + availability.version,
tasks.size() > 1 ? "s" : "",
String.join("\n", tasks),
availability.lastCommand.exitCode,
availability.lastCommand.stderr.trim()
);
throwDockerRequiredException(message);
}
private boolean isExcludedOs() {
// We don't attempt to check the current flavor and version of Linux unless we're
// running in CI, because we don't want to stop people running the Docker tests in
// their own environments if they really want to.
if (BuildParams.isCi() == false) {
return false;
}
// Only some hosts in CI are configured with Docker. We attempt to work out the OS
// and version, so that we know whether to expect to find Docker. We don't attempt
// to probe for whether Docker is available, because that doesn't tell us whether
// Docker is unavailable when it should be.
final Path osRelease = Paths.get("/etc/os-release");
if (Files.exists(osRelease)) {
Map<String, String> values;
try {
final List<String> osReleaseLines = Files.readAllLines(osRelease);
values = parseOsRelease(osReleaseLines);
} catch (IOException e) {
throw new GradleException("Failed to read /etc/os-release", e);
}
final String id = deriveId(values);
final boolean excluded = getLinuxExclusionList().contains(id);
if (excluded) {
LOGGER.warn("Linux OS id [{}] is present in the Docker exclude list. Tasks requiring Docker will be disabled.", id);
}
return excluded;
}
return false;
}
private List<String> getLinuxExclusionList() {
File exclusionsFile = getParameters().getExclusionsFile();
if (exclusionsFile.exists()) {
try {
return Files.readAllLines(exclusionsFile.toPath())
.stream()
.map(String::trim)
.filter(line -> (line.isEmpty() || line.startsWith("#")) == false)
.collect(Collectors.toList());
} catch (IOException e) {
throw new GradleException("Failed to read " + exclusionsFile.getAbsolutePath(), e);
}
} else {
return Collections.emptyList();
}
}
// visible for testing
static String deriveId(Map<String, String> values) {
return values.get("ID") + "-" + values.get("VERSION_ID");
}
// visible for testing
static Map<String, String> parseOsRelease(final List<String> osReleaseLines) {
final Map<String, String> values = new HashMap<>();
osReleaseLines.stream().map(String::trim).filter(line -> (line.isEmpty() || line.startsWith("#")) == false).forEach(line -> {
final String[] parts = line.split("=", 2);
final String key = parts[0];
// remove optional leading and trailing quotes and whitespace
final String value = parts[1].replaceAll("^['\"]?\\s*", "").replaceAll("\\s*['\"]?$", "");
values.put(key, value);
});
return values;
}
/**
* Searches the entries in {@link #DOCKER_BINARIES} for the Docker CLI. This method does
* not check whether the Docker installation appears usable, see {@link #getDockerAvailability()}
* instead.
*
* @return the path to a CLI, if available.
*/
private Optional<String> getDockerPath() {
// Check if the Docker binary exists
return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst();
}
/**
* Searches the entries in {@link #DOCKER_COMPOSE_BINARIES} for the Docker Compose CLI. This method does
* not check whether the installation appears usable, see {@link #getDockerAvailability()} instead.
*
* @return the path to a CLI, if available.
*/
private Optional<String> getDockerComposePath() {
// Check if the Docker binary exists
return List.of(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst();
}
private void throwDockerRequiredException(final String message) {
throwDockerRequiredException(message, null);
}
private void throwDockerRequiredException(final String message, Exception e) {
throw new GradleException(
message + "\nyou can address this by attending to the reported issue, or removing the offending tasks from being executed.",
e
);
}
/**
* Runs a command and captures the exit code, standard output and standard error.
*
* @param args the command and any arguments to execute
* @return a object that captures the result of running the command. If an exception occurring
* while running the command, or the process was killed after reaching the 10s timeout,
* then the exit code will be -1.
*/
private Result runCommand(String... args) {
if (args.length == 0) {
throw new IllegalArgumentException("Cannot execute with no command");
}
ByteArrayOutputStream stdout = new ByteArrayOutputStream();
ByteArrayOutputStream stderr = new ByteArrayOutputStream();
final ExecResult execResult = execOperations.exec(spec -> {
// The redundant cast is to silence a compiler warning.
spec.setCommandLine((Object[]) args);
spec.setStandardOutput(stdout);
spec.setErrorOutput(stderr);
spec.setIgnoreExitValue(true);
});
return new Result(execResult.getExitValue(), stdout.toString(), stderr.toString());
}
/**
* An immutable class that represents the results of a Docker search from {@link #getDockerAvailability()}}.
*/
public static class DockerAvailability {
/**
* Indicates whether Docker is available and meets the required criteria.
* True if, and only if, Docker is:
* <ul>
* <li>Installed</li>
* <li>Executable</li>
* <li>Is at least version compatibile with minimum version</li>
* <li>Can execute a command that requires privileges</li>
* </ul>
*/
public final boolean isAvailable;
/**
* True if docker-compose is available.
*/
public final boolean isComposeAvailable;
/**
* True if the installed Docker version is &gt;= 17.05
*/
public final boolean isVersionHighEnough;
/**
* The path to the Docker CLI, or null
*/
public final String path;
/**
* The installed Docker version, or null
*/
public final Version version;
/**
* Information about the last command executes while probing Docker, or null.
*/
final Result lastCommand;
DockerAvailability(
boolean isAvailable,
boolean isComposeAvailable,
boolean isVersionHighEnough,
String path,
Version version,
Result lastCommand
) {
this.isAvailable = isAvailable;
this.isComposeAvailable = isComposeAvailable;
this.isVersionHighEnough = isVersionHighEnough;
this.path = path;
this.version = version;
this.lastCommand = lastCommand;
}
}
/**
* This class models the result of running a command. It captures the exit code, standard output and standard error.
*/
private static class Result {
final int exitCode;
final String stdout;
final String stderr;
Result(int exitCode, String stdout, String stderr) {
this.exitCode = exitCode;
this.stdout = stdout;
this.stderr = stderr;
}
boolean isSuccess() {
return exitCode == 0;
}
public String toString() {
return "exitCode = [" + exitCode + "] " + "stdout = [" + stdout.trim() + "] " + "stderr = [" + stderr.trim() + "]";
}
}
interface Parameters extends BuildServiceParameters {
File getExclusionsFile();
void setExclusionsFile(File exclusionsFile);
}
}

View File

@ -19,14 +19,42 @@
package org.elasticsearch.gradle.test;
import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertLinuxPath;
import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath;
import org.elasticsearch.gradle.BwcVersions;
import org.elasticsearch.gradle.DistributionDownloadPlugin;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor;
import org.elasticsearch.gradle.ElasticsearchDistribution.Platform;
import org.elasticsearch.gradle.ElasticsearchDistribution.Type;
import org.elasticsearch.gradle.Jdk;
import org.elasticsearch.gradle.JdkDownloadPlugin;
import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.docker.DockerSupportPlugin;
import org.elasticsearch.gradle.docker.DockerSupportService;
import org.elasticsearch.gradle.info.BuildParams;
import org.elasticsearch.gradle.tool.Boilerplate;
import org.elasticsearch.gradle.vagrant.BatsProgressLogger;
import org.elasticsearch.gradle.vagrant.VagrantBasePlugin;
import org.elasticsearch.gradle.vagrant.VagrantExtension;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.file.Directory;
import org.gradle.api.plugins.ExtraPropertiesExtension;
import org.gradle.api.plugins.JavaBasePlugin;
import org.gradle.api.provider.Provider;
import org.gradle.api.specs.Specs;
import org.gradle.api.tasks.Copy;
import org.gradle.api.tasks.TaskInputs;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.testing.Test;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@ -37,42 +65,10 @@ import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.elasticsearch.gradle.BwcVersions;
import org.elasticsearch.gradle.DistributionDownloadPlugin;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor;
import org.elasticsearch.gradle.ElasticsearchDistribution.Platform;
import org.elasticsearch.gradle.ElasticsearchDistribution.Type;
import org.elasticsearch.gradle.Jdk;
import org.elasticsearch.gradle.JdkDownloadPlugin;
import org.elasticsearch.gradle.OS;
import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.info.BuildParams;
import org.elasticsearch.gradle.vagrant.BatsProgressLogger;
import org.elasticsearch.gradle.vagrant.VagrantBasePlugin;
import org.elasticsearch.gradle.vagrant.VagrantExtension;
import org.gradle.api.GradleException;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.file.Directory;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.plugins.ExtraPropertiesExtension;
import org.gradle.api.plugins.JavaBasePlugin;
import org.gradle.api.provider.Provider;
import org.gradle.api.specs.Specs;
import org.gradle.api.tasks.Copy;
import org.gradle.api.tasks.TaskInputs;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.testing.Test;
import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertLinuxPath;
import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath;
public class DistroTestPlugin implements Plugin<Project> {
private static final Logger logger = Logging.getLogger(DistroTestPlugin.class);
private static final String GRADLE_JDK_VERSION = "13.0.1+9@cec27d702aa74d5a8630c65ae61e4305";
private static final String GRADLE_JDK_VENDOR = "openjdk";
@ -88,11 +84,15 @@ public class DistroTestPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
final boolean runDockerTests = shouldRunDockerTests(project);
project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class);
project.getPluginManager().apply(DistributionDownloadPlugin.class);
project.getPluginManager().apply("elasticsearch.build");
Provider<DockerSupportService> dockerSupport = Boilerplate.getBuildService(
project.getGradle().getSharedServices(),
DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME
);
// TODO: it would be useful to also have the SYSTEM_JAVA_HOME setup in the root project, so that running from GCP only needs
// a java for gradle to run, and the tests are self sufficient and consistent with the java they use
@ -101,17 +101,15 @@ public class DistroTestPlugin implements Plugin<Project> {
Provider<Directory> upgradeDir = project.getLayout().getBuildDirectory().dir("packaging/upgrade");
Provider<Directory> pluginsDir = project.getLayout().getBuildDirectory().dir("packaging/plugins");
List<ElasticsearchDistribution> distributions = configureDistributions(project, upgradeVersion, runDockerTests);
List<ElasticsearchDistribution> distributions = configureDistributions(project, upgradeVersion);
TaskProvider<Copy> copyDistributionsTask = configureCopyDistributionsTask(project, distributionsDir);
TaskProvider<Copy> copyUpgradeTask = configureCopyUpgradeTask(project, upgradeVersion, upgradeDir);
TaskProvider<Copy> copyPluginsTask = configureCopyPluginsTask(project, pluginsDir);
TaskProvider<Task> destructiveDistroTest = project.getTasks().register("destructiveDistroTest");
for (ElasticsearchDistribution distribution : distributions) {
if (distribution.getType() != Type.DOCKER || runDockerTests) {
TaskProvider<?> destructiveTask = configureDistroTest(project, distribution);
destructiveDistroTest.configure(t -> t.dependsOn(destructiveTask));
}
TaskProvider<?> destructiveTask = configureDistroTest(project, distribution, dockerSupport);
destructiveDistroTest.configure(t -> t.dependsOn(destructiveTask));
}
Map<String, TaskProvider<?>> batsTests = new HashMap<>();
configureBatsTest(project, "plugins", distributionsDir, copyDistributionsTask, copyPluginsTask).configure(
@ -325,8 +323,14 @@ public class DistroTestPlugin implements Plugin<Project> {
});
}
private static TaskProvider<?> configureDistroTest(Project project, ElasticsearchDistribution distribution) {
private static TaskProvider<?> configureDistroTest(
Project project,
ElasticsearchDistribution distribution,
Provider<DockerSupportService> dockerSupport
) {
return project.getTasks().register(destructiveDistroTestTaskName(distribution), Test.class, t -> {
// Disable Docker distribution tests unless a Docker installation is available
t.onlyIf(t2 -> distribution.getType() != Type.DOCKER || dockerSupport.get().getDockerAvailability().isAvailable);
t.getOutputs().doNotCacheIf("Build cache is disabled for packaging tests", Specs.satisfyAll());
t.setMaxParallelForks(1);
t.setWorkingDir(project.getProjectDir());
@ -355,7 +359,7 @@ public class DistroTestPlugin implements Plugin<Project> {
});
}
private List<ElasticsearchDistribution> configureDistributions(Project project, Version upgradeVersion, boolean runDockerTests) {
private List<ElasticsearchDistribution> configureDistributions(Project project, Version upgradeVersion) {
NamedDomainObjectContainer<ElasticsearchDistribution> distributions = DistributionDownloadPlugin.getContainer(project);
List<ElasticsearchDistribution> currentDistros = new ArrayList<>();
List<ElasticsearchDistribution> upgradeDistros = new ArrayList<>();
@ -364,7 +368,7 @@ public class DistroTestPlugin implements Plugin<Project> {
for (Flavor flavor : Flavor.values()) {
for (boolean bundledJdk : Arrays.asList(true, false)) {
// All our Docker images include a bundled JDK so it doesn't make sense to test without one
boolean skip = type == Type.DOCKER && (runDockerTests == false || bundledJdk == false);
boolean skip = type == Type.DOCKER && bundledJdk == false;
if (skip == false) {
addDistro(distributions, type, null, flavor, bundledJdk, VersionProperties.getElasticsearch(), currentDistros);
@ -431,7 +435,6 @@ public class DistroTestPlugin implements Plugin<Project> {
String version,
List<ElasticsearchDistribution> container
) {
String name = distroId(type, platform, flavor, bundledJdk) + "-" + version;
if (distributions.findByName(name) != null) {
return;
@ -442,9 +445,18 @@ public class DistroTestPlugin implements Plugin<Project> {
if (type == Type.ARCHIVE) {
d.setPlatform(platform);
}
d.setBundledJdk(bundledJdk);
if (type != Type.DOCKER) {
d.setBundledJdk(bundledJdk);
}
d.setVersion(version);
});
// Allow us to gracefully omit building Docker distributions if Docker is not available on the system.
// In such a case as we can't build the Docker images we'll simply skip the corresponding tests.
if (type == Type.DOCKER) {
distro.setFailIfUnavailable(false);
}
container.add(distro);
}
@ -461,91 +473,4 @@ public class DistroTestPlugin implements Plugin<Project> {
Type type = distro.getType();
return "destructiveDistroTest." + distroId(type, distro.getPlatform(), distro.getFlavor(), distro.getBundledJdk());
}
static Map<String, String> parseOsRelease(final List<String> osReleaseLines) {
final Map<String, String> values = new HashMap<>();
osReleaseLines.stream().map(String::trim).filter(line -> (line.isEmpty() || line.startsWith("#")) == false).forEach(line -> {
final String[] parts = line.split("=", 2);
final String key = parts[0];
// remove optional leading and trailing quotes and whitespace
final String value = parts[1].replaceAll("^['\"]?\\s*", "").replaceAll("\\s*['\"]?$", "");
values.put(key, value);
});
return values;
}
static String deriveId(final Map<String, String> osRelease) {
return osRelease.get("ID") + "-" + osRelease.get("VERSION_ID");
}
private static List<String> getLinuxExclusionList(Project project) {
final String exclusionsFilename = "dockerOnLinuxExclusions";
final Path exclusionsPath = project.getRootDir().toPath().resolve(".ci").resolve(exclusionsFilename);
try {
return Files.readAllLines(exclusionsPath)
.stream()
.map(String::trim)
.filter(line -> (line.isEmpty() || line.startsWith("#")) == false)
.collect(Collectors.toList());
} catch (IOException e) {
throw new GradleException("Failed to read .ci/" + exclusionsFilename, e);
}
}
/**
* The {@link DistroTestPlugin} generates a number of test tasks, some
* of which are Docker packaging tests. When running on the host OS or in CI
* i.e. not in a Vagrant VM, only certain operating systems are supported. This
* method determines whether the Docker tests should be run on the host
* OS. Essentially, unless an OS and version is specifically excluded, we expect
* to be able to run Docker and test the Docker images.
*/
private static boolean shouldRunDockerTests(Project project) {
switch (OS.current()) {
case WINDOWS:
// Not yet supported.
return false;
case MAC:
// Assume that Docker for Mac is installed, since Docker is part of the dev workflow.
return true;
case LINUX:
// Only some hosts in CI are configured with Docker. We attempt to work out the OS
// and version, so that we know whether to expect to find Docker. We don't attempt
// to probe for whether Docker is available, because that doesn't tell us whether
// Docker is unavailable when it should be.
final Path osRelease = Paths.get("/etc/os-release");
if (Files.exists(osRelease)) {
Map<String, String> values;
try {
final List<String> osReleaseLines = Files.readAllLines(osRelease);
values = parseOsRelease(osReleaseLines);
} catch (IOException e) {
throw new GradleException("Failed to read /etc/os-release", e);
}
final String id = deriveId(values);
final boolean shouldExclude = getLinuxExclusionList(project).contains(id);
logger.warn("Linux OS id [" + id + "] is " + (shouldExclude ? "" : "not ") + "present in the Docker exclude list");
return shouldExclude == false;
}
logger.warn("/etc/os-release does not exist!");
return false;
default:
logger.warn("Unknown OS [" + OS.current() + "], answering false to shouldRunDockerTests()");
return false;
}
}
}

View File

@ -39,6 +39,8 @@ import org.gradle.api.tasks.TaskState;
import java.io.File;
import static org.elasticsearch.gradle.tool.Boilerplate.noop;
public class TestClustersPlugin implements Plugin<Project> {
public static final String EXTENSION_NAME = "testClusters";
@ -72,7 +74,7 @@ public class TestClustersPlugin implements Plugin<Project> {
createListClustersTask(project, container);
// register cluster registry as a global build service
project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, spec -> {});
project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop());
// register throttle so we only run at most max-workers/2 nodes concurrently
project.getGradle()

View File

@ -21,20 +21,27 @@ package org.elasticsearch.gradle.testfixtures;
import com.avast.gradle.dockercompose.ComposeExtension;
import com.avast.gradle.dockercompose.DockerComposePlugin;
import com.avast.gradle.dockercompose.ServiceInfo;
import com.avast.gradle.dockercompose.tasks.ComposeDown;
import com.avast.gradle.dockercompose.tasks.ComposePull;
import com.avast.gradle.dockercompose.tasks.ComposeUp;
import org.elasticsearch.gradle.OS;
import org.elasticsearch.gradle.SystemPropertyCommandLineArgumentProvider;
import org.elasticsearch.gradle.docker.DockerSupportPlugin;
import org.elasticsearch.gradle.docker.DockerSupportService;
import org.elasticsearch.gradle.info.BuildParams;
import org.elasticsearch.gradle.precommit.TestingConventionsTasks;
import org.elasticsearch.gradle.tool.Boilerplate;
import org.gradle.api.Action;
import org.gradle.api.DefaultTask;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.plugins.BasePlugin;
import org.gradle.api.plugins.ExtraPropertiesExtension;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.TaskContainer;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.testing.Test;
import java.io.File;
@ -46,75 +53,76 @@ import java.util.function.BiConsumer;
public class TestFixturesPlugin implements Plugin<Project> {
private static final Logger LOGGER = Logging.getLogger(TestFixturesPlugin.class);
private static final String DOCKER_COMPOSE_THROTTLE = "dockerComposeThrottle";
static final String DOCKER_COMPOSE_YML = "docker-compose.yml";
@Override
public void apply(Project project) {
TaskContainer tasks = project.getTasks();
project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class);
TaskContainer tasks = project.getTasks();
TestFixtureExtension extension = project.getExtensions().create("testFixtures", TestFixtureExtension.class, project);
Provider<DockerComposeThrottle> dockerComposeThrottle = project.getGradle()
.getSharedServices()
.registerIfAbsent(DOCKER_COMPOSE_THROTTLE, DockerComposeThrottle.class, spec -> spec.getMaxParallelUsages().set(1));
Provider<DockerSupportService> dockerSupport = Boilerplate.getBuildService(
project.getGradle().getSharedServices(),
DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME
);
ExtraPropertiesExtension ext = project.getExtensions().getByType(ExtraPropertiesExtension.class);
File testfixturesDir = project.file("testfixtures_shared");
ext.set("testFixturesDir", testfixturesDir);
if (project.file(DOCKER_COMPOSE_YML).exists()) {
Task buildFixture = project.getTasks().create("buildFixture");
Task pullFixture = project.getTasks().create("pullFixture");
Task preProcessFixture = project.getTasks().create("preProcessFixture");
preProcessFixture.doFirst((task) -> {
try {
Files.createDirectories(testfixturesDir.toPath());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
preProcessFixture.getOutputs().dir(testfixturesDir);
buildFixture.dependsOn(preProcessFixture);
pullFixture.dependsOn(preProcessFixture);
Task postProcessFixture = project.getTasks().create("postProcessFixture");
postProcessFixture.dependsOn(buildFixture);
preProcessFixture.onlyIf(spec -> buildFixture.getEnabled());
postProcessFixture.onlyIf(spec -> buildFixture.getEnabled());
project.getPluginManager().apply(BasePlugin.class);
project.getPluginManager().apply(DockerComposePlugin.class);
if (dockerComposeSupported() == false) {
preProcessFixture.setEnabled(false);
postProcessFixture.setEnabled(false);
buildFixture.setEnabled(false);
pullFixture.setEnabled(false);
} else {
project.getPluginManager().apply(BasePlugin.class);
project.getPluginManager().apply(DockerComposePlugin.class);
ComposeExtension composeExtension = project.getExtensions().getByType(ComposeExtension.class);
composeExtension.setUseComposeFiles(Collections.singletonList(DOCKER_COMPOSE_YML));
composeExtension.setRemoveContainers(true);
composeExtension.setExecutable(
project.file("/usr/local/bin/docker-compose").exists() ? "/usr/local/bin/docker-compose" : "/usr/bin/docker-compose"
);
buildFixture.dependsOn(tasks.named("composeUp"));
pullFixture.dependsOn(tasks.named("composePull"));
tasks.named("composeUp").configure(t -> {
// Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions
if (BuildParams.isCi()) {
t.usesService(dockerComposeThrottle);
TaskProvider<Task> preProcessFixture = project.getTasks().register("preProcessFixture", t -> {
t.getOutputs().dir(testfixturesDir);
t.doFirst(t2 -> {
try {
Files.createDirectories(testfixturesDir.toPath());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
t.mustRunAfter(preProcessFixture);
});
tasks.named("composePull").configure(t -> t.mustRunAfter(preProcessFixture));
tasks.named("composeDown").configure(t -> t.doLast(t2 -> project.delete(testfixturesDir)));
});
TaskProvider<Task> buildFixture = project.getTasks()
.register("buildFixture", t -> t.dependsOn(preProcessFixture, tasks.named("composeUp")));
TaskProvider<Task> postProcessFixture = project.getTasks().register("postProcessFixture", task -> {
task.dependsOn(buildFixture);
configureServiceInfoForTask(
postProcessFixture,
task,
project,
false,
(name, port) -> postProcessFixture.getExtensions().getByType(ExtraPropertiesExtension.class).set(name, port)
(name, port) -> task.getExtensions().getByType(ExtraPropertiesExtension.class).set(name, port)
);
}
});
maybeSkipTask(dockerSupport, preProcessFixture);
maybeSkipTask(dockerSupport, postProcessFixture);
maybeSkipTask(dockerSupport, buildFixture);
ComposeExtension composeExtension = project.getExtensions().getByType(ComposeExtension.class);
composeExtension.setUseComposeFiles(Collections.singletonList(DOCKER_COMPOSE_YML));
composeExtension.setRemoveContainers(true);
composeExtension.setExecutable(
project.file("/usr/local/bin/docker-compose").exists() ? "/usr/local/bin/docker-compose" : "/usr/bin/docker-compose"
);
tasks.named("composeUp").configure(t -> {
// Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions
if (BuildParams.isCi()) {
t.usesService(dockerComposeThrottle);
}
t.mustRunAfter(preProcessFixture);
});
tasks.named("composePull").configure(t -> t.mustRunAfter(preProcessFixture));
tasks.named("composeDown").configure(t -> t.doLast(t2 -> project.delete(testfixturesDir)));
} else {
project.afterEvaluate(spec -> {
if (extension.fixtures.isEmpty()) {
@ -129,43 +137,44 @@ public class TestFixturesPlugin implements Plugin<Project> {
extension.fixtures.matching(fixtureProject -> fixtureProject.equals(project) == false)
.all(fixtureProject -> project.evaluationDependsOn(fixtureProject.getPath()));
conditionTaskByType(tasks, extension, Test.class);
conditionTaskByType(tasks, extension, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask"));
conditionTaskByType(tasks, extension, TestingConventionsTasks.class);
conditionTaskByType(tasks, extension, ComposeUp.class);
// Skip docker compose tasks if it is unavailable
maybeSkipTasks(tasks, dockerSupport, Test.class);
maybeSkipTasks(tasks, dockerSupport, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask"));
maybeSkipTasks(tasks, dockerSupport, TestingConventionsTasks.class);
maybeSkipTasks(tasks, dockerSupport, getTaskClass("org.elasticsearch.gradle.test.AntFixture"));
maybeSkipTasks(tasks, dockerSupport, ComposeUp.class);
maybeSkipTasks(tasks, dockerSupport, ComposePull.class);
maybeSkipTasks(tasks, dockerSupport, ComposeDown.class);
if (dockerComposeSupported() == false) {
project.getLogger()
.info(
"Tests for {} require docker-compose at /usr/local/bin/docker-compose or /usr/bin/docker-compose "
+ "but none could be found so these will be skipped",
project.getPath()
);
return;
}
tasks.withType(Test.class, task -> extension.fixtures.all(fixtureProject -> {
fixtureProject.getTasks().matching(it -> it.getName().equals("buildFixture")).all(task::dependsOn);
fixtureProject.getTasks().matching(it -> it.getName().equals("composeDown")).all(task::finalizedBy);
tasks.withType(Test.class).configureEach(task -> extension.fixtures.all(fixtureProject -> {
task.dependsOn(fixtureProject.getTasks().named("postProcessFixture"));
task.finalizedBy(fixtureProject.getTasks().named("composeDown"));
configureServiceInfoForTask(
task,
fixtureProject,
true,
(name, host) -> task.getExtensions().getByType(SystemPropertyCommandLineArgumentProvider.class).systemProperty(name, host)
);
task.dependsOn(fixtureProject.getTasks().getByName("postProcessFixture"));
}));
}
private void conditionTaskByType(TaskContainer tasks, TestFixtureExtension extension, Class<? extends DefaultTask> taskClass) {
tasks.withType(taskClass)
.configureEach(
task -> task.onlyIf(
spec -> extension.fixtures.stream()
.anyMatch(fixtureProject -> fixtureProject.getTasks().getByName("buildFixture").getEnabled() == false) == false
)
);
private void maybeSkipTasks(TaskContainer tasks, Provider<DockerSupportService> dockerSupport, Class<? extends DefaultTask> taskClass) {
tasks.withType(taskClass).configureEach(t -> maybeSkipTask(dockerSupport, t));
}
private void maybeSkipTask(Provider<DockerSupportService> dockerSupport, TaskProvider<Task> task) {
task.configure(t -> maybeSkipTask(dockerSupport, t));
}
private void maybeSkipTask(Provider<DockerSupportService> dockerSupport, Task task) {
task.onlyIf(spec -> {
boolean isComposeAvailable = dockerSupport.get().getDockerAvailability().isComposeAvailable;
if (isComposeAvailable == false) {
LOGGER.info("Task {} requires docker-compose but it is unavailable. Task will be skipped.", task.getPath());
}
return isComposeAvailable;
});
}
private void configureServiceInfoForTask(
@ -176,10 +185,11 @@ public class TestFixturesPlugin implements Plugin<Project> {
) {
// Configure ports for the tests as system properties.
// We only know these at execution time so we need to do it in doFirst
TestFixtureExtension extension = task.getProject().getExtensions().getByType(TestFixtureExtension.class);
task.doFirst(new Action<Task>() {
@Override
public void execute(Task theTask) {
TestFixtureExtension extension = theTask.getProject().getExtensions().getByType(TestFixtureExtension.class);
fixtureProject.getExtensions()
.getByType(ComposeExtension.class)
.getServicesInfos()
@ -204,19 +214,6 @@ public class TestFixturesPlugin implements Plugin<Project> {
});
}
public static boolean dockerComposeSupported() {
if (OS.current().equals(OS.WINDOWS)) {
return false;
}
final boolean hasDockerCompose = (new File("/usr/local/bin/docker-compose")).exists()
|| (new File("/usr/bin/docker-compose").exists());
return hasDockerCompose && Boolean.parseBoolean(System.getProperty("tests.fixture.enabled", "true"));
}
private void disableTaskByType(TaskContainer tasks, Class<? extends Task> type) {
tasks.withType(type, task -> task.setEnabled(false));
}
@SuppressWarnings("unchecked")
private Class<? extends DefaultTask> getTaskClass(String type) {
Class<?> aClass;

View File

@ -38,6 +38,10 @@ import java.util.Optional;
public abstract class Boilerplate {
public static <T> Action<T> noop() {
return t -> {};
}
public static SourceSetContainer getJavaSourceSets(Project project) {
return project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets();
}

View File

@ -1,243 +0,0 @@
package org.elasticsearch.gradle.tool;
import org.elasticsearch.gradle.Version;
import org.gradle.api.GradleException;
import org.gradle.api.Project;
import org.gradle.process.ExecResult;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
/**
* Contains utilities for checking whether Docker is installed, is executable,
* has a recent enough version, and appears to be functional. The Elasticsearch build
* requires Docker &gt;= 17.05 as it uses a multi-stage build.
*/
public class DockerUtils {
/**
* Defines the possible locations of the Docker CLI. These will be searched in order.
*/
private static String[] DOCKER_BINARIES = { "/usr/bin/docker", "/usr/local/bin/docker" };
/**
* Searches the entries in {@link #DOCKER_BINARIES} for the Docker CLI. This method does
* not check whether the Docker installation appears usable, see {@link #getDockerAvailability(Project)}
* instead.
*
* @return the path to a CLI, if available.
*/
public static Optional<String> getDockerPath() {
// Check if the Docker binary exists
return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst();
}
/**
* Searches for a functional Docker installation, and returns information about the search.
* @return the results of the search.
*/
private static DockerAvailability getDockerAvailability(Project project) {
String dockerPath = null;
Result lastResult = null;
Version version = null;
boolean isVersionHighEnough = false;
// Check if the Docker binary exists
final Optional<String> dockerBinary = getDockerPath();
if (dockerBinary.isPresent()) {
dockerPath = dockerBinary.get();
// Since we use a multi-stage Docker build, check the Docker version since 17.05
lastResult = runCommand(project, dockerPath, "version", "--format", "{{.Server.Version}}");
if (lastResult.isSuccess()) {
version = Version.fromString(lastResult.stdout.trim(), Version.Mode.RELAXED);
isVersionHighEnough = version.onOrAfter("17.05.0");
if (isVersionHighEnough) {
// Check that we can execute a privileged command
lastResult = runCommand(project, dockerPath, "images");
}
}
}
boolean isAvailable = isVersionHighEnough && lastResult.isSuccess();
return new DockerAvailability(isAvailable, isVersionHighEnough, dockerPath, version, lastResult);
}
/**
* An immutable class that represents the results of a Docker search from {@link #getDockerAvailability(Project)}}.
*/
private static class DockerAvailability {
/**
* Indicates whether Docker is available and meets the required criteria.
* True if, and only if, Docker is:
* <ul>
* <li>Installed</li>
* <li>Executable</li>
* <li>Is at least version 17.05</li>
* <li>Can execute a command that requires privileges</li>
* </ul>
*/
final boolean isAvailable;
/**
* True if the installed Docker version is &gt;= 17.05
*/
final boolean isVersionHighEnough;
/**
* The path to the Docker CLI, or null
*/
public final String path;
/**
* The installed Docker version, or null
*/
public final Version version;
/**
* Information about the last command executes while probing Docker, or null.
*/
final Result lastCommand;
DockerAvailability(boolean isAvailable, boolean isVersionHighEnough, String path, Version version, Result lastCommand) {
this.isAvailable = isAvailable;
this.isVersionHighEnough = isVersionHighEnough;
this.path = path;
this.version = version;
this.lastCommand = lastCommand;
}
}
/**
* Given a list of tasks that requires Docker, check whether Docker is available, otherwise
* throw an exception.
* @param project a Gradle project
* @param tasks the tasks that require Docker
* @throws GradleException if Docker is not available. The exception message gives the reason.
*/
public static void assertDockerIsAvailable(Project project, List<String> tasks) {
DockerAvailability availability = getDockerAvailability(project);
if (availability.isAvailable) {
return;
}
/*
* There are tasks in the task graph that require Docker.
* Now we are failing because either the Docker binary does
* not exist or because execution of a privileged Docker
* command failed.
*/
if (availability.path == null) {
final String message = String.format(
Locale.ROOT,
"Docker (checked [%s]) is required to run the following task%s: \n%s",
String.join(", ", DOCKER_BINARIES),
tasks.size() > 1 ? "s" : "",
String.join("\n", tasks)
);
throwDockerRequiredException(message);
}
if (availability.version == null) {
final String message = String.format(
Locale.ROOT,
"Docker is required to run the following task%s, but it doesn't appear to be running: \n%s",
tasks.size() > 1 ? "s" : "",
String.join("\n", tasks)
);
throwDockerRequiredException(message);
}
if (availability.isVersionHighEnough == false) {
final String message = String.format(
Locale.ROOT,
"building Docker images requires Docker version 17.05+ due to use of multi-stage builds yet was [%s]",
availability.version
);
throwDockerRequiredException(message);
}
// Some other problem, print the error
final String message = String.format(
Locale.ROOT,
"a problem occurred running Docker from [%s] yet it is required to run the following task%s: \n%s\n"
+ "the problem is that Docker exited with exit code [%d] with standard error output [%s]",
availability.path,
tasks.size() > 1 ? "s" : "",
String.join("\n", tasks),
availability.lastCommand.exitCode,
availability.lastCommand.stderr.trim()
);
throwDockerRequiredException(message);
}
private static void throwDockerRequiredException(final String message) {
throwDockerRequiredException(message, null);
}
private static void throwDockerRequiredException(final String message, Exception e) {
throw new GradleException(
message
+ "\nyou can address this by attending to the reported issue, "
+ "removing the offending tasks from being executed, "
+ "or by passing -Dbuild.docker=false",
e
);
}
/**
* Runs a command and captures the exit code, standard output and standard error.
* @param args the command and any arguments to execute
* @return a object that captures the result of running the command. If an exception occurring
* while running the command, or the process was killed after reaching the 10s timeout,
* then the exit code will be -1.
*/
private static Result runCommand(Project project, String... args) {
if (args.length == 0) {
throw new IllegalArgumentException("Cannot execute with no command");
}
ByteArrayOutputStream stdout = new ByteArrayOutputStream();
ByteArrayOutputStream stderr = new ByteArrayOutputStream();
final ExecResult execResult = project.exec(spec -> {
// The redundant cast is to silence a compiler warning.
spec.setCommandLine((Object[]) args);
spec.setStandardOutput(stdout);
spec.setErrorOutput(stderr);
});
return new Result(execResult.getExitValue(), stdout.toString(), stderr.toString());
}
/**
* This class models the result of running a command. It captures the exit code, standard output and standard error.
*/
private static class Result {
final int exitCode;
final String stdout;
final String stderr;
Result(int exitCode, String stdout, String stderr) {
this.exitCode = exitCode;
this.stdout = stdout;
this.stderr = stderr;
}
boolean isSuccess() {
return exitCode == 0;
}
public String toString() {
return "exitCode = [" + exitCode + "] " + "stdout = [" + stdout.trim() + "] " + "stderr = [" + stderr.trim() + "]";
}
}
}

View File

@ -0,0 +1 @@
implementation-class=org.elasticsearch.gradle.docker.DockerSupportPlugin

View File

@ -123,7 +123,7 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
Platform.LINUX,
null,
null,
"platform not allowed for elasticsearch distribution [testdistro]"
"platform cannot be set on elasticsearch distribution [testdistro]"
);
}
@ -175,7 +175,7 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
null,
null,
true,
"bundledJdk not allowed for elasticsearch distribution [testdistro]"
"bundledJdk cannot be set on elasticsearch distribution [testdistro]"
);
}

View File

@ -1,14 +1,16 @@
package org.elasticsearch.gradle.test;
package org.elasticsearch.gradle.docker;
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.gradle.test.DistroTestPlugin.deriveId;
import static org.elasticsearch.gradle.test.DistroTestPlugin.parseOsRelease;
import static org.elasticsearch.gradle.docker.DockerSupportService.deriveId;
import static org.elasticsearch.gradle.docker.DockerSupportService.parseOsRelease;
import static org.hamcrest.CoreMatchers.equalTo;
public class DistroTestPluginTests extends GradleIntegrationTestCase {
public class DockerSupportServiceTests extends GradleIntegrationTestCase {
public void testParseOsReleaseOnOracle() {
final List<String> lines = List.of(

View File

@ -1,4 +1,4 @@
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.info.BuildParams
@ -6,6 +6,7 @@ import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.test.fixtures'
apply plugin: 'elasticsearch.distribution-download'
testFixtures.useFixture()
@ -105,10 +106,19 @@ task copyKeystore(type: Sync) {
}
}
preProcessFixture {
if (TestFixturesPlugin.dockerComposeSupported()) {
dependsOn assemble
elasticsearch_distributions {
Flavor.values().each { distroFlavor ->
"docker_$distroFlavor" {
flavor = distroFlavor
type = 'docker'
version = VersionProperties.getElasticsearch()
failIfUnavailable = false // This ensures we don't attempt to build images if docker is unavailable
}
}
}
preProcessFixture {
dependsOn elasticsearch_distributions.docker_default, elasticsearch_distributions.docker_oss
dependsOn copyKeystore
doLast {
// tests expect to have an empty repo
@ -140,16 +150,13 @@ task integTest(type: Test) {
outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
maxParallelForks = '1'
include '**/*IT.class'
// don't add the tasks to build the docker images if we have no way of testing them
if (TestFixturesPlugin.dockerComposeSupported()) {
dependsOn assemble
}
}
check.dependsOn integTest
void addBuildDockerImage(final boolean oss) {
final Task buildDockerImageTask = task(taskName("build", oss, "DockerImage"), type: LoggedExec) {
ext.requiresDocker = true // mark this task as requiring docker to execute
inputs.files(tasks.named(taskName("copy", oss, "DockerContext")))
List<String> tags
if (oss) {
@ -179,7 +186,6 @@ void addBuildDockerImage(final boolean oss) {
}
}
assemble.dependsOn(buildDockerImageTask)
BuildPlugin.requireDocker(buildDockerImageTask)
}
for (final boolean oss : [false, true]) {

View File

@ -97,7 +97,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
executable = "${BuildParams.runtimeJavaHome}/bin/java"
env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
maxWaitInSeconds 60
onlyIf { project(':test:fixtures:krb5kdc-fixture').buildFixture.enabled && BuildParams.inFipsJvm == false }
onlyIf { BuildParams.inFipsJvm == false }
waitCondition = { fixture, ant ->
// the hdfs.MiniHDFS fixture writes the ports file when
// it's ready, so we can just wait for the file to exist

View File

@ -16,10 +16,12 @@
* specific language governing permissions and limitations
* under the License.
*/
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.test.fixtures'
apply plugin: 'elasticsearch.distribution-download'
testFixtures.useFixture()
@ -42,15 +44,17 @@ task copyKeystore(type: Sync) {
}
}
preProcessFixture {
if (TestFixturesPlugin.dockerComposeSupported()) {
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) {
dependsOn ":distribution:docker:buildDockerImage"
} else {
dependsOn ":distribution:docker:buildOssDockerImage"
}
elasticsearch_distributions {
docker {
type = 'docker'
flavor = System.getProperty('tests.distribution', 'default')
version = VersionProperties.getElasticsearch()
failIfUnavailable = false // This ensures we skip this testing if Docker is unavailable
}
dependsOn copyKeystore
}
preProcessFixture {
dependsOn copyKeystore, elasticsearch_distributions.docker
doLast {
// tests expect to have an empty repo
project.delete(
@ -68,14 +72,12 @@ preProcessFixture {
}
}
if (TestFixturesPlugin.dockerComposeSupported()) {
dockerCompose {
tcpPortsToIgnoreWhenWaiting = [9600, 9601]
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) {
useComposeFiles = ['docker-compose.yml']
} else {
useComposeFiles = ['docker-compose-oss.yml']
}
dockerCompose {
tcpPortsToIgnoreWhenWaiting = [9600, 9601]
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) {
useComposeFiles = ['docker-compose.yml']
} else {
useComposeFiles = ['docker-compose-oss.yml']
}
}
@ -100,10 +102,6 @@ task integTest(type: Test) {
outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
maxParallelForks = '1'
include '**/*IT.class'
// don't add the tasks to build the docker images if we have no way of testing them
if (TestFixturesPlugin.dockerComposeSupported()) {
dependsOn ":distribution:docker:buildDockerImage"
}
}
check.dependsOn integTest