Testclusters: start using it for testing some plugins (#39693)

* enable testclusters for some plugins
This commit is contained in:
Alpar Torok 2019-03-07 17:48:49 +02:00
parent 7dcc191aa8
commit 6c75a2f2b0
10 changed files with 228 additions and 82 deletions

View File

@ -46,6 +46,8 @@ allprojects {
description = "Elasticsearch subproject ${project.path}"
}
BuildPlugin.configureRepositories(project)
apply plugin: 'nebula.info-scm'
String licenseCommit
if (VersionProperties.elasticsearch.toString().endsWith('-SNAPSHOT')) {
@ -227,7 +229,6 @@ allprojects {
"org.elasticsearch.client:transport:${version}": ':client:transport',
"org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${version}": ':modules:lang-painless:spi',
"org.elasticsearch.test:framework:${version}": ':test:framework',
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:archives:integ-test-zip',
"org.elasticsearch.test:logger-usage:${version}": ':test:logger-usage',
"org.elasticsearch.xpack.test:feature-aware:${version}": ':x-pack:test:feature-aware',
// for transport client

View File

@ -24,6 +24,7 @@ import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.NoticeTask
import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.test.RunTask
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.gradle.api.Project
import org.gradle.api.publish.maven.MavenPublication
import org.gradle.api.publish.maven.plugins.MavenPublishPlugin
@ -51,21 +52,36 @@ public class PluginBuildPlugin extends BuildPlugin {
String name = project.pluginProperties.extension.name
project.archivesBaseName = name
// set teh project description so it will be picked up by publishing
// set the project description so it will be picked up by publishing
project.description = project.pluginProperties.extension.description
configurePublishing(project)
project.integTestCluster.dependsOn(project.bundlePlugin)
project.tasks.run.dependsOn(project.bundlePlugin)
if (project.plugins.hasPlugin(TestClustersPlugin.class) == false) {
project.integTestCluster.dependsOn(project.tasks.bundlePlugin)
if (isModule) {
project.integTestCluster.module(project)
} else {
project.integTestCluster.plugin(project.path)
}
} else {
project.tasks.integTest.dependsOn(project.tasks.bundlePlugin)
if (isModule) {
throw new RuntimeException("Testclusters does not support modules yet");
} else {
project.testClusters.integTestCluster.plugin(
project.file(project.tasks.bundlePlugin.archiveFile)
)
}
}
project.tasks.run.dependsOn(project.tasks.bundlePlugin)
if (isModule) {
project.integTestCluster.module(project)
project.tasks.run.clusterConfig.module(project)
project.tasks.run.clusterConfig.distribution = System.getProperty(
'run.distribution', 'integ-test-zip'
)
} else {
project.integTestCluster.plugin(project.path)
project.tasks.run.clusterConfig.plugin(project.path)
}
@ -136,7 +152,10 @@ public class PluginBuildPlugin extends BuildPlugin {
private static void createIntegTestTask(Project project) {
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
integTest.mustRunAfter(project.precommit, project.test)
project.integTestCluster.distribution = System.getProperty('tests.distribution', 'integ-test-zip')
if (project.plugins.hasPlugin(TestClustersPlugin.class) == false) {
// only if not using test clusters
project.integTestCluster.distribution = System.getProperty('tests.distribution', 'integ-test-zip')
}
project.check.dependsOn(integTest)
}
@ -214,7 +233,7 @@ public class PluginBuildPlugin extends BuildPlugin {
protected void addNoticeGeneration(Project project) {
File licenseFile = project.pluginProperties.extension.licenseFile
if (licenseFile != null) {
project.bundlePlugin.from(licenseFile.parentFile) {
project.tasks.bundlePlugin.from(licenseFile.parentFile) {
include(licenseFile.name)
rename { 'LICENSE.txt' }
}
@ -223,7 +242,7 @@ public class PluginBuildPlugin extends BuildPlugin {
if (noticeFile != null) {
NoticeTask generateNotice = project.tasks.create('generateNotice', NoticeTask.class)
generateNotice.inputFile = noticeFile
project.bundlePlugin.from(generateNotice)
project.tasks.bundlePlugin.from(generateNotice)
}
}
}

View File

@ -174,10 +174,20 @@ class ClusterFormationTasks {
/** Adds a dependency on the given distribution */
static void configureDistributionDependency(Project project, String distro, Configuration configuration, String elasticsearchVersion) {
boolean internalBuild = project.hasProperty('bwcVersions')
if (distro.equals("integ-test-zip")) {
// short circuit integ test so it doesn't complicate the rest of the distribution setup below
project.dependencies.add(configuration.name,
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${elasticsearchVersion}@zip")
if (internalBuild) {
project.dependencies.add(
configuration.name,
project.dependencies.project(path: ":distribution", configuration: 'integ-test-zip')
)
} else {
project.dependencies.add(
configuration.name,
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${elasticsearchVersion}@zip"
)
}
return
}
// TEMP HACK
@ -209,8 +219,9 @@ class ClusterFormationTasks {
if (distro.equals("oss")) {
snapshotProject = "oss-" + snapshotProject
}
boolean internalBuild = project.hasProperty('bwcVersions')
BwcVersions.UnreleasedVersionInfo unreleasedInfo = null
if (project.hasProperty('bwcVersions')) {
// NOTE: leniency is needed for external plugin authors using build-tools. maybe build the version compat info into build-tools?
unreleasedInfo = project.bwcVersions.unreleasedInfo(version)

View File

@ -20,6 +20,8 @@ package org.elasticsearch.gradle.test
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.testclusters.ElasticsearchNode
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.gradle.api.DefaultTask
import org.gradle.api.Task
import org.gradle.api.execution.TaskExecutionAdapter
@ -55,7 +57,19 @@ public class RestIntegTestTask extends DefaultTask {
super.dependsOn(runner)
clusterInit = project.tasks.create(name: "${name}Cluster#init", dependsOn: project.testClasses)
runner.dependsOn(clusterInit)
clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project)
boolean usesTestclusters = project.plugins.hasPlugin(TestClustersPlugin.class)
if (usesTestclusters == false) {
clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project)
} else {
project.testClusters {
integTestCluster {
distribution = 'INTEG_TEST'
version = project.version
javaHome = project.file(project.ext.runtimeJavaHome)
}
}
runner.useCluster project.testClusters.integTestCluster
}
// override/add more for rest tests
runner.parallelism = '1'
@ -66,31 +80,38 @@ public class RestIntegTestTask extends DefaultTask {
if (System.getProperty("tests.cluster") != null) {
throw new IllegalArgumentException("tests.rest.cluster and tests.cluster must both be null or non-null")
}
// we pass all nodes to the rest cluster to allow the clients to round-robin between them
// this is more realistic than just talking to a single node
runner.systemProperty('tests.rest.cluster', "${-> nodes.collect{it.httpUri()}.join(",")}")
runner.systemProperty('tests.config.dir', "${-> nodes[0].pathConf}")
// TODO: our "client" qa tests currently use the rest-test plugin. instead they should have their own plugin
// that sets up the test cluster and passes this transport uri instead of http uri. Until then, we pass
// both as separate sysprops
runner.systemProperty('tests.cluster', "${-> nodes[0].transportUri()}")
if (usesTestclusters == true) {
ElasticsearchNode node = project.testClusters.integTestCluster
runner.systemProperty('tests.rest.cluster', {node.allHttpSocketURI.join(",") })
runner.systemProperty('tests.config.dir', {node.getConfigDir()})
runner.systemProperty('tests.cluster', {node.transportPortURI})
} else {
// we pass all nodes to the rest cluster to allow the clients to round-robin between them
// this is more realistic than just talking to a single node
runner.systemProperty('tests.rest.cluster', "${-> nodes.collect { it.httpUri() }.join(",")}")
runner.systemProperty('tests.config.dir', "${-> nodes[0].pathConf}")
// TODO: our "client" qa tests currently use the rest-test plugin. instead they should have their own plugin
// that sets up the test cluster and passes this transport uri instead of http uri. Until then, we pass
// both as separate sysprops
runner.systemProperty('tests.cluster', "${-> nodes[0].transportUri()}")
// dump errors and warnings from cluster log on failure
TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() {
@Override
void afterExecute(Task task, TaskState state) {
if (state.failure != null) {
for (NodeInfo nodeInfo : nodes) {
printLogExcerpt(nodeInfo)
// dump errors and warnings from cluster log on failure
TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() {
@Override
void afterExecute(Task task, TaskState state) {
if (state.failure != null) {
for (NodeInfo nodeInfo : nodes) {
printLogExcerpt(nodeInfo)
}
}
}
}
}
runner.doFirst {
project.gradle.addListener(logDumpListener)
}
runner.doLast {
project.gradle.removeListener(logDumpListener)
runner.doFirst {
project.gradle.addListener(logDumpListener)
}
runner.doLast {
project.gradle.removeListener(logDumpListener)
}
}
} else {
if (System.getProperty("tests.cluster") == null) {
@ -113,11 +134,13 @@ public class RestIntegTestTask extends DefaultTask {
clusterInit.enabled = false
return // no need to add cluster formation tasks if the task won't run!
}
// only create the cluster if needed as otherwise an external cluster to use was specified
if (System.getProperty("tests.rest.cluster") == null) {
nodes = ClusterFormationTasks.setup(project, "${name}Cluster", runner, clusterConfig)
if (usesTestclusters == false) {
// only create the cluster if needed as otherwise an external cluster to use was specified
if (System.getProperty("tests.rest.cluster") == null) {
nodes = ClusterFormationTasks.setup(project, "${name}Cluster", runner, clusterConfig)
}
super.dependsOn(runner.finalizedBy)
}
super.dependsOn(runner.finalizedBy)
}
}

View File

@ -20,18 +20,24 @@ package org.elasticsearch.gradle;
public enum Distribution {
INTEG_TEST("integ-test"),
DEFAULT("elasticsearch"),
OSS("elasticsearch-oss");
INTEG_TEST("elasticsearch", "integ-test-zip"),
DEFAULT("elasticsearch", "elasticsearch"),
OSS("elasticsearch-oss", "elasticsearch-oss");
private final String fileName;
private final String artifactName;
private final String group;
Distribution(String name) {
this.fileName = name;
Distribution(String name, String group) {
this.artifactName = name;
this.group = group;
}
public String getArtifactName() {
return fileName;
return artifactName;
}
public String getGroup() {
return "org.elasticsearch.distribution." + group;
}
public String getFileExtension() {
@ -46,10 +52,27 @@ public enum Distribution {
}
public String getClassifier() {
return OS.<String>conditional()
.onLinux(() -> "linux-x86_64")
.onWindows(() -> "windows-x86_64")
.onMac(() -> "darwin-x86_64")
.supply();
if (this.equals(INTEG_TEST)) {
return "";
} else {
return OS.<String>conditional()
.onLinux(() -> "linux-x86_64")
.onWindows(() -> "windows-x86_64")
.onMac(() -> "darwin-x86_64")
.supply();
}
}
public String getLiveConfiguration() {
if (this.equals(INTEG_TEST)) {
return "integ-test-zip";
} else {
return (this.equals(OSS) ? "oss-" : "") + OS.<String>conditional()
.onLinux(() -> "linux-tar")
.onWindows(() -> "windows-zip")
.onMac(() -> "darwin-tar")
.supply();
}
}
}

View File

@ -143,6 +143,10 @@ public class ElasticsearchNode {
plugin(plugin.toURI());
}
public Path getConfigDir() {
return configFile.getParent();
}
public void freeze() {
requireNonNull(distribution, "null distribution passed when configuring test cluster `" + this + "`");
requireNonNull(version, "null version passed when configuring test cluster `" + this + "`");
@ -196,6 +200,7 @@ public class ElasticsearchNode {
logger.info("Starting `{}`", this);
Path distroArtifact = artifactsExtractDir
.resolve(distribution.getGroup())
.resolve(distribution.getArtifactName() + "-" + getVersion());
if (Files.exists(distroArtifact) == false) {
@ -205,8 +210,8 @@ public class ElasticsearchNode {
throw new TestClustersException("Can not start " + this + ", is not a directory: " + distroArtifact);
}
services.sync(spec -> {
spec.from(distroArtifact.resolve("config").toFile());
spec.into(configFile.getParent());
spec.from(distroArtifact);
spec.into(workingDir);
});
try {
@ -297,6 +302,16 @@ public class ElasticsearchNode {
return getTransportPortInternal().get(0);
}
public List<String> getAllHttpSocketURI() {
waitForAllConditions();
return getHttpPortInternal();
}
public List<String> getAllTransportPortURI() {
waitForAllConditions();
return getTransportPortInternal();
}
synchronized void stop(boolean tailLogs) {
if (esProcess == null && tailLogs) {
// This is a special case. If start() throws an exception the plugin will still call stop

View File

@ -20,6 +20,10 @@ package org.elasticsearch.gradle.testclusters;
import groovy.lang.Closure;
import org.elasticsearch.GradleServicesAdapter;
import org.elasticsearch.gradle.BwcVersions;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.Version;
import org.gradle.api.Action;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
@ -27,16 +31,17 @@ import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.execution.TaskActionListener;
import org.gradle.api.execution.TaskExecutionListener;
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileTree;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.plugins.ExtraPropertiesExtension;
import org.gradle.api.tasks.Sync;
import org.gradle.api.tasks.TaskState;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@ -96,27 +101,33 @@ public class TestClustersPlugin implements Plugin<Project> {
claimsInventory.clear();
runningClusters.clear();
// We have a single task to sync the helper configuration to "artifacts dir"
// the clusters will look for artifacts there based on the naming conventions.
// Tasks that use a cluster will add this as a dependency automatically so it's guaranteed to run early in
// the build.
rootProject.getTasks().create(SYNC_ARTIFACTS_TASK_NAME, Sync.class, sync -> {
sync.from((Callable<List<FileTree>>) () ->
helperConfiguration.getFiles()
.stream()
.map(file -> {
if (file.getName().endsWith(".zip")) {
return project.zipTree(file);
} else if (file.getName().endsWith("tar.gz")) {
return project.tarTree(file);
} else {
throw new IllegalArgumentException("Can't extract " + file + " unknown file extension");
}
})
.collect(Collectors.toList())
);
sync.into(getTestClustersConfigurationExtractDir(project));
rootProject.getTasks().create(SYNC_ARTIFACTS_TASK_NAME, sync -> {
sync.getInputs().files((Callable<FileCollection>) helperConfiguration::getAsFileTree);
sync.getOutputs().dir(getTestClustersConfigurationExtractDir(project));
sync.doLast(new Action<Task>() {
@Override
public void execute(Task task) {
project.sync(spec ->
helperConfiguration.getResolvedConfiguration().getResolvedArtifacts().forEach(resolvedArtifact -> {
final FileTree files;
File file = resolvedArtifact.getFile();
if (file.getName().endsWith(".zip")) {
files = project.zipTree(file);
} else if (file.getName().endsWith("tar.gz")) {
files = project.tarTree(file);
} else {
throw new IllegalArgumentException("Can't extract " + file + " unknown file extension");
}
spec.from(files).into(getTestClustersConfigurationExtractDir(project) + "/" +
resolvedArtifact.getModuleVersion().getId().getGroup()
);
}));
}
});
});
// When we know what tasks will run, we claim the clusters of those task to differentiate between clusters
@ -293,16 +304,54 @@ public class TestClustersPlugin implements Plugin<Project> {
// We need afterEvaluate here despite the fact that container is a domain object, we can't implement this with
// all because fields can change after the fact.
project.afterEvaluate(ip -> container.forEach(esNode -> {
// declare dependencies against artifacts needed by cluster formation.
String dependency = String.format(
"unused:%s:%s:%s@%s",
esNode.getDistribution().getArtifactName(),
esNode.getVersion(),
esNode.getDistribution().getClassifier(),
esNode.getDistribution().getFileExtension()
);
logger.info("Cluster {} depends on {}", esNode.getName(), dependency);
rootProject.getDependencies().add(HELPER_CONFIGURATION_NAME, dependency);
BwcVersions.UnreleasedVersionInfo unreleasedInfo;
final List<Version> unreleased;
{
ExtraPropertiesExtension extraProperties = project.getExtensions().getExtraProperties();
if (extraProperties.has("bwcVersions")) {
Object bwcVersionsObj = extraProperties.get("bwcVersions");
if (bwcVersionsObj instanceof BwcVersions == false) {
throw new IllegalStateException("Expected project.bwcVersions to be of type VersionCollection " +
"but instead it was " + bwcVersionsObj.getClass());
}
final BwcVersions bwcVersions = (BwcVersions) bwcVersionsObj;
unreleased = ((BwcVersions) bwcVersionsObj).getUnreleased();
unreleasedInfo = bwcVersions.unreleasedInfo(Version.fromString(esNode.getVersion()));
} else {
logger.info("No version information available, assuming all versions used are released");
unreleased = Collections.emptyList();
unreleasedInfo = null;
}
}
if (unreleased.contains(Version.fromString(esNode.getVersion()))) {
Map<String, Object> projectNotation = new HashMap<>();
projectNotation.put("path", unreleasedInfo.gradleProjectPath);
projectNotation.put("configuration", esNode.getDistribution().getLiveConfiguration());
rootProject.getDependencies().add(
HELPER_CONFIGURATION_NAME,
project.getDependencies().project(projectNotation)
);
} else {
if (esNode.getDistribution().equals(Distribution.INTEG_TEST)) {
rootProject.getDependencies().add(
HELPER_CONFIGURATION_NAME, "org.elasticsearch.distribution.integ-test-zip:elasticsearch:" + esNode.getVersion()
);
} else {
// declare dependencies to be downloaded from the download service.
// The BuildPlugin sets up the right repo for this to work
// TODO: move the repo definition in this plugin when ClusterFormationTasks is removed
String dependency = String.format(
"%s:%s:%s:%s@%s",
esNode.getDistribution().getGroup(),
esNode.getDistribution().getArtifactName(),
esNode.getVersion(),
esNode.getDistribution().getClassifier(),
esNode.getDistribution().getFileExtension()
);
logger.info("Cluster {} depends on {}", esNode.getName(), dependency);
rootProject.getDependencies().add(HELPER_CONFIGURATION_NAME, dependency);
}
}
}));
}

View File

@ -92,7 +92,7 @@ tasks.withType(AbstractArchiveTask) {
dependsOn createLogsDir, createPluginsDir
String subdir = it.name.substring('build'.size()).replaceAll(/[A-Z]/) { '-' + it.toLowerCase() }.substring(1)
destinationDir = file("${subdir}/build/distributions")
baseName = "elasticsearch${ subdir.contains('oss') ? '-oss' : ''}"
baseName = "elasticsearch${subdir.contains('oss') ? '-oss' : ''}"
}
Closure commonZipConfig = {

View File

@ -520,6 +520,7 @@ subprojects {
['archives:windows-zip','archives:oss-windows-zip',
'archives:darwin-tar','archives:oss-darwin-tar',
'archives:linux-tar', 'archives:oss-linux-tar',
'archives:integ-test-zip',
'packages:rpm', 'packages:deb',
'packages:oss-rpm', 'packages:oss-deb',
].forEach { subName ->

View File

@ -20,6 +20,10 @@
// only configure immediate children of plugins dir
configure(subprojects.findAll { it.parent.path == project.path }) {
group = 'org.elasticsearch.plugin'
// TODO exclude some plugins as they require features not yet supproted by testclusters
if (false == name in ['repository-azure', 'repository-hdfs', 'repository-s3']) {
apply plugin: 'elasticsearch.testclusters'
}
apply plugin: 'elasticsearch.esplugin'