Testsclusters use seprate configurations per version (#41504)

* artifact config tests WIP

* wip version configuration

* Tests for older versions

* use separate configurations per version

* checkstyle

* PR review
This commit is contained in:
Alpar Torok 2019-05-06 14:49:01 +03:00
parent d54a921032
commit 2455295c6d
7 changed files with 157 additions and 75 deletions

View File

@ -20,16 +20,14 @@ package org.elasticsearch.gradle;
public enum Distribution { public enum Distribution {
INTEG_TEST("elasticsearch", "integ-test-zip"), INTEG_TEST("elasticsearch"),
DEFAULT("elasticsearch", "elasticsearch"), DEFAULT("elasticsearch"),
OSS("elasticsearch-oss", "elasticsearch-oss"); OSS("elasticsearch-oss");
private final String artifactName; private final String artifactName;
private final String group;
Distribution(String name, String group) { Distribution(String name) {
this.artifactName = name; this.artifactName = name;
this.group = group;
} }
public String getArtifactName() { public String getArtifactName() {
@ -37,7 +35,11 @@ public enum Distribution {
} }
public String getGroup() { public String getGroup() {
return "org.elasticsearch.distribution." + group; if (this.equals(INTEG_TEST)) {
return "org.elasticsearch.distribution.integ-test-zip";
} else {
return "org.elasticsearch.distribution." + name().toLowerCase();
}
} }
public String getFileExtension() { public String getFileExtension() {

View File

@ -279,7 +279,9 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
} }
void eachVersionedDistribution(BiConsumer<String, Distribution> consumer) { void eachVersionedDistribution(BiConsumer<String, Distribution> consumer) {
nodes.forEach(each -> consumer.accept(each.getVersion(), each.getDistribution())); nodes.forEach(each -> {
consumer.accept(each.getVersion(), each.getDistribution());
});
} }
public ElasticsearchNode singleNode() { public ElasticsearchNode singleNode() {

View File

@ -276,7 +276,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
Path distroArtifact = artifactsExtractDir Path distroArtifact = artifactsExtractDir
.resolve(distribution.getGroup()) .resolve(distribution.getGroup())
.resolve(distribution.getArtifactName() + "-" + getVersion()); .resolve("elasticsearch-" + getVersion());
if (Files.exists(distroArtifact) == false) { if (Files.exists(distroArtifact) == false) {
throw new TestClustersException("Can not start " + this + ", missing: " + distroArtifact); throw new TestClustersException("Can not start " + this + ", missing: " + distroArtifact);

View File

@ -20,17 +20,18 @@ package org.elasticsearch.gradle.testclusters;
import groovy.lang.Closure; import groovy.lang.Closure;
import org.elasticsearch.gradle.BwcVersions; import org.elasticsearch.gradle.BwcVersions;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.tool.Boilerplate;
import org.gradle.api.Action; import org.gradle.api.Action;
import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin; import org.gradle.api.Plugin;
import org.gradle.api.Project; import org.gradle.api.Project;
import org.gradle.api.Task; import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.repositories.MavenArtifactRepository;
import org.gradle.api.credentials.HttpHeaderCredentials;
import org.gradle.api.execution.TaskActionListener; import org.gradle.api.execution.TaskActionListener;
import org.gradle.api.execution.TaskExecutionListener; import org.gradle.api.execution.TaskExecutionListener;
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileTree; import org.gradle.api.file.FileTree;
import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging; import org.gradle.api.logging.Logging;
@ -46,7 +47,6 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -56,7 +56,7 @@ public class TestClustersPlugin implements Plugin<Project> {
private static final String LIST_TASK_NAME = "listTestClusters"; private static final String LIST_TASK_NAME = "listTestClusters";
private static final String NODE_EXTENSION_NAME = "testClusters"; private static final String NODE_EXTENSION_NAME = "testClusters";
private static final String HELPER_CONFIGURATION_NAME = "testclusters"; private static final String HELPER_CONFIGURATION_PREFIX = "testclusters";
private static final String SYNC_ARTIFACTS_TASK_NAME = "syncTestClustersArtifacts"; private static final String SYNC_ARTIFACTS_TASK_NAME = "syncTestClustersArtifacts";
private static final int EXECUTOR_SHUTDOWN_TIMEOUT = 1; private static final int EXECUTOR_SHUTDOWN_TIMEOUT = 1;
private static final TimeUnit EXECUTOR_SHUTDOWN_TIMEOUT_UNIT = TimeUnit.MINUTES; private static final TimeUnit EXECUTOR_SHUTDOWN_TIMEOUT_UNIT = TimeUnit.MINUTES;
@ -69,6 +69,10 @@ public class TestClustersPlugin implements Plugin<Project> {
private final Thread shutdownHook = new Thread(this::shutDownAllClusters); private final Thread shutdownHook = new Thread(this::shutDownAllClusters);
private ExecutorService executorService = Executors.newSingleThreadExecutor(); private ExecutorService executorService = Executors.newSingleThreadExecutor();
public static String getHelperConfigurationName(String version) {
return HELPER_CONFIGURATION_PREFIX + "-" + version;
}
@Override @Override
public void apply(Project project) { public void apply(Project project) {
Project rootProject = project.getRootProject(); Project rootProject = project.getRootProject();
@ -82,47 +86,6 @@ public class TestClustersPlugin implements Plugin<Project> {
// create DSL for tasks to mark clusters these use // create DSL for tasks to mark clusters these use
createUseClusterTaskExtension(project, container); createUseClusterTaskExtension(project, container);
if (rootProject.getConfigurations().findByName(HELPER_CONFIGURATION_NAME) == null) {
// We use a single configuration on the root project to resolve all testcluster dependencies ( like distros )
// at once, only once without the need to repeat it for each project. This pays off assuming that most
// projects use the same dependencies.
Configuration helperConfiguration = project.getRootProject().getConfigurations().create(HELPER_CONFIGURATION_NAME);
helperConfiguration.setDescription(
"Internal helper configuration used by cluster configuration to download " +
"ES distributions and plugins."
);
// We have a single task to sync the helper configuration to "artifacts dir"
// the clusters will look for artifacts there based on the naming conventions.
// Tasks that use a cluster will add this as a dependency automatically so it's guaranteed to run early in
// the build.
rootProject.getTasks().create(SYNC_ARTIFACTS_TASK_NAME, sync -> {
sync.getInputs().files((Callable<FileCollection>) helperConfiguration::getAsFileTree);
sync.getOutputs().dir(new File(project.getRootProject().getBuildDir(), "testclusters/extract"));
// NOTE: Gradle doesn't allow a lambda here ( fails at runtime )
sync.doLast(new Action<Task>() {
@Override
public void execute(Task task) {
project.sync(spec ->
helperConfiguration.getResolvedConfiguration().getResolvedArtifacts().forEach(resolvedArtifact -> {
final FileTree files;
File file = resolvedArtifact.getFile();
if (file.getName().endsWith(".zip")) {
files = project.zipTree(file);
} else if (file.getName().endsWith("tar.gz")) {
files = project.tarTree(file);
} else {
throw new IllegalArgumentException("Can't extract " + file + " unknown file extension");
}
spec.from(files).into(new File(project.getRootProject().getBuildDir(), "testclusters/extract") + "/" +
resolvedArtifact.getModuleVersion().getId().getGroup()
);
}));
}
});
});
}
// When we know what tasks will run, we claim the clusters of those task to differentiate between clusters // When we know what tasks will run, we claim the clusters of those task to differentiate between clusters
// that are defined in the build script and the ones that will actually be used in this invocation of gradle // that are defined in the build script and the ones that will actually be used in this invocation of gradle
// we use this information to determine when the last task that required the cluster executed so that we can // we use this information to determine when the last task that required the cluster executed so that we can
@ -143,6 +106,10 @@ public class TestClustersPlugin implements Plugin<Project> {
autoConfigureClusterDependencies(project, rootProject, container); autoConfigureClusterDependencies(project, rootProject, container);
} }
private static File getExtractDir(Project project) {
return new File(project.getRootProject().getBuildDir(), "testclusters/extract/");
}
private NamedDomainObjectContainer<ElasticsearchCluster> createTestClustersContainerExtension(Project project) { private NamedDomainObjectContainer<ElasticsearchCluster> createTestClustersContainerExtension(Project project) {
// Create an extensions that allows describing clusters // Create an extensions that allows describing clusters
NamedDomainObjectContainer<ElasticsearchCluster> container = project.container( NamedDomainObjectContainer<ElasticsearchCluster> container = project.container(
@ -290,12 +257,59 @@ public class TestClustersPlugin implements Plugin<Project> {
Project rootProject, Project rootProject,
NamedDomainObjectContainer<ElasticsearchCluster> container NamedDomainObjectContainer<ElasticsearchCluster> container
) { ) {
// Download integ test distribution from maven central
MavenArtifactRepository mavenCentral = project.getRepositories().mavenCentral();
mavenCentral.content(spec -> {
spec.includeGroupByRegex("org\\.elasticsearch\\.distribution\\..*");
});
// Other distributions from the download service
project.getRepositories().add(
project.getRepositories().ivy(spec -> {
spec.setUrl("https://artifacts.elastic.co/downloads");
spec.patternLayout(p -> p.artifact("elasticsearch/[module]-[revision](-[classifier]).[ext]"));
HttpHeaderCredentials headerConfig = spec.getCredentials(HttpHeaderCredentials.class);
headerConfig.setName("X-Elastic-No-KPI");
headerConfig.setValue("1");
spec.content(c-> c.includeGroupByRegex("org\\.elasticsearch\\.distribution\\..*"));
})
);
// We have a single task to sync the helper configuration to "artifacts dir"
// the clusters will look for artifacts there based on the naming conventions.
// Tasks that use a cluster will add this as a dependency automatically so it's guaranteed to run early in
// the build.
Task sync = Boilerplate.maybeCreate(rootProject.getTasks(), SYNC_ARTIFACTS_TASK_NAME, onCreate -> {
onCreate.getOutputs().dir(getExtractDir(rootProject));
// NOTE: Gradle doesn't allow a lambda here ( fails at runtime )
onCreate.doFirst(new Action<Task>() {
@Override
public void execute(Task task) {
// Clean up the extract dir first to make sure we have no stale files from older
// previous builds of the same distribution
project.delete(getExtractDir(rootProject));
}
});
});
// When the project evaluated we know of all tasks that use clusters. // When the project evaluated we know of all tasks that use clusters.
// Each of these have to depend on the artifacts being synced. // Each of these have to depend on the artifacts being synced.
// We need afterEvaluate here despite the fact that container is a domain object, we can't implement this with // We need afterEvaluate here despite the fact that container is a domain object, we can't implement this with
// all because fields can change after the fact. // all because fields can change after the fact.
project.afterEvaluate(ip -> container.forEach(esCluster -> project.afterEvaluate(ip -> container.forEach(esCluster ->
esCluster.eachVersionedDistribution((version, distribution) -> { esCluster.eachVersionedDistribution((version, distribution) -> {
Configuration helperConfiguration = Boilerplate.maybeCreate(
rootProject.getConfigurations(),
getHelperConfigurationName(version),
onCreate ->
// We use a single configuration on the root project to resolve all testcluster dependencies ( like distros )
// at once, only once without the need to repeat it for each project. This pays off assuming that most
// projects use the same dependencies.
onCreate.setDescription(
"Internal helper configuration used by cluster configuration to download " +
"ES distributions and plugins for " + version
)
);
BwcVersions.UnreleasedVersionInfo unreleasedInfo; BwcVersions.UnreleasedVersionInfo unreleasedInfo;
final List<Version> unreleased; final List<Version> unreleased;
{ {
@ -320,29 +334,42 @@ public class TestClustersPlugin implements Plugin<Project> {
projectNotation.put("path", unreleasedInfo.gradleProjectPath); projectNotation.put("path", unreleasedInfo.gradleProjectPath);
projectNotation.put("configuration", distribution.getLiveConfiguration()); projectNotation.put("configuration", distribution.getLiveConfiguration());
rootProject.getDependencies().add( rootProject.getDependencies().add(
HELPER_CONFIGURATION_NAME, helperConfiguration.getName(),
project.getDependencies().project(projectNotation) project.getDependencies().project(projectNotation)
); );
} else { } else {
if (distribution.equals(Distribution.INTEG_TEST)) { rootProject.getDependencies().add(
rootProject.getDependencies().add( helperConfiguration.getName(),
HELPER_CONFIGURATION_NAME, "org.elasticsearch.distribution.integ-test-zip:elasticsearch:" + version distribution.getGroup() + ":" +
); distribution.getArtifactName() + ":" +
} else { version +
// declare dependencies to be downloaded from the download service. (distribution.getClassifier().isEmpty() ? "" : ":" + distribution.getClassifier()) + "@" +
// The BuildPlugin sets up the right repo for this to work distribution.getFileExtension());
// TODO: move the repo definition in this plugin when ClusterFormationTasks is removed
String dependency = String.format(
"%s:%s:%s:%s@%s",
distribution.getGroup(),
distribution.getArtifactName(),
version,
distribution.getClassifier(),
distribution.getFileExtension()
);
rootProject.getDependencies().add(HELPER_CONFIGURATION_NAME, dependency);
}
} }
sync.getInputs().files(helperConfiguration);
// NOTE: Gradle doesn't allow a lambda here ( fails at runtime )
sync.doLast(new Action<Task>() {
@Override
public void execute(Task task) {
project.copy(spec ->
helperConfiguration.getResolvedConfiguration().getResolvedArtifacts().forEach(resolvedArtifact -> {
final FileTree files;
File file = resolvedArtifact.getFile();
if (file.getName().endsWith(".zip")) {
files = project.zipTree(file);
} else if (file.getName().endsWith("tar.gz")) {
files = project.tarTree(file);
} else {
throw new IllegalArgumentException("Can't extract " + file + " unknown file extension");
}
spec.from(files, s -> s.into(resolvedArtifact.getModuleVersion().getId().getGroup()));
spec.into(getExtractDir(project));
}));
}
});
}))); })));
} }

View File

@ -18,14 +18,33 @@
*/ */
package org.elasticsearch.gradle.tool; package org.elasticsearch.gradle.tool;
import org.gradle.api.Action;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project; import org.gradle.api.Project;
import org.gradle.api.plugins.JavaPluginConvention; import org.gradle.api.plugins.JavaPluginConvention;
import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.SourceSetContainer;
import java.util.Optional;
public abstract class Boilerplate { public abstract class Boilerplate {
public static SourceSetContainer getJavaSourceSets(Project project) { public static SourceSetContainer getJavaSourceSets(Project project) {
return project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); return project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets();
} }
public static <T> T maybeCreate(NamedDomainObjectContainer<T> collection, String name) {
return Optional.ofNullable(collection.findByName(name))
.orElse(collection.create(name));
}
public static <T> T maybeCreate(NamedDomainObjectContainer<T> collection, String name, Action<T> action) {
return Optional.ofNullable(collection.findByName(name))
.orElseGet(() -> {
T result = collection.create(name);
action.execute(result);
return result;
});
}
} }

View File

@ -103,6 +103,14 @@ public class TestClustersPluginIT extends GradleIntegrationTestCase {
); );
} }
public void testReleased() {
BuildResult result = getTestClustersRunner("testReleased").build();
assertTaskSuccessful(result, ":testReleased");
assertStartedAndStoppedOnce(result, "releasedVersionDefault-1");
assertStartedAndStoppedOnce(result, "releasedVersionOSS-1");
assertStartedAndStoppedOnce(result, "releasedVersionIntegTest-1");
}
public void testIncremental() { public void testIncremental() {
BuildResult result = getTestClustersRunner("clean", ":user1").build(); BuildResult result = getTestClustersRunner("clean", ":user1").build();
assertTaskSuccessful(result, ":user1"); assertTaskSuccessful(result, ":user1");

View File

@ -17,8 +17,6 @@ allprojects { all ->
url "https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision url "https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision
} }
} }
jcenter()
} }
if (project == rootProject || project.name == "alpha" || project.name == "bravo") { if (project == rootProject || project.name == "alpha" || project.name == "bravo") {
@ -58,6 +56,21 @@ testClusters {
javaHome = file(System.getProperty('java.home')) javaHome = file(System.getProperty('java.home'))
numberOfNodes = 3 numberOfNodes = 3
} }
releasedVersionDefault {
version = "7.0.0"
distribution = 'DEFAULT'
javaHome = file(System.getProperty('java.home'))
}
releasedVersionOSS {
version = "7.0.0"
distribution = 'OSS'
javaHome = file(System.getProperty('java.home'))
}
releasedVersionIntegTest {
version = "7.0.0"
distribution = 'INTEG_TEST'
javaHome = file(System.getProperty('java.home'))
}
} }
task multiNode { task multiNode {
@ -67,6 +80,17 @@ task multiNode {
} }
} }
task testReleased {
useCluster testClusters.releasedVersionDefault
useCluster testClusters.releasedVersionOSS
useCluster testClusters.releasedVersionIntegTest
doFirst {
println "$path: Cluster running @ ${testClusters.releasedVersionDefault.httpSocketURI}"
println "$path: Cluster running @ ${testClusters.releasedVersionOSS.httpSocketURI}"
println "$path: Cluster running @ ${testClusters.releasedVersionIntegTest.httpSocketURI}"
}
}
task printLog { task printLog {
useCluster testClusters.myTestCluster useCluster testClusters.myTestCluster
doFirst { doFirst {