Create plugin for internalClusterTest task (#56067)

This commit creates a new gradle plugin to provide a separate task name
and source set for running ESIntegTestCase tests. The only project
converted to use the new plugin in this PR is server, as an example. The
remaining cases in x-pack will be handled in followups.

backport of #55896
This commit is contained in:
Ryan Ernst 2020-05-06 17:20:52 -07:00 committed by GitHub
parent 980f175222
commit 33d6a55d1d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
359 changed files with 279 additions and 104 deletions

View File

@ -260,10 +260,10 @@ To run all verification tasks, including static checks, unit tests, and integrat
---------------------------------------------------------------------------
Note that this will also run the unit tests and precommit tasks first. If you want to just
run the integration tests (because you are debugging them):
run the in memory cluster integration tests (because you are debugging them):
---------------------------------------------------------------------------
./gradlew integTest
./gradlew internalClusterTest
---------------------------------------------------------------------------
If you want to just run the precommit checks:

View File

@ -165,8 +165,7 @@ class PrecommitTasks {
}
SourceSet sourceSet = project.sourceSets.getByName(sourceSetName)
FileCollection runtime = sourceSet.runtimeClasspath
classpath = runtime.plus(sourceSet.compileClasspath)
classpath = project.files { sourceSet.runtimeClasspath.plus(sourceSet.compileClasspath) }
targetCompatibility = BuildParams.runtimeJavaVersion.majorVersion
if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_13) {

View File

@ -20,6 +20,7 @@ package org.elasticsearch.gradle.precommit;
import groovy.lang.Closure;
import org.elasticsearch.gradle.util.GradleUtils;
import org.elasticsearch.gradle.util.Util;
import org.gradle.api.DefaultTask;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Task;
@ -46,9 +47,12 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
@ -62,6 +66,8 @@ public class TestingConventionsTasks extends DefaultTask {
private final NamedDomainObjectContainer<TestingConventionRule> naming;
private List<String> tasks = null;
public TestingConventionsTasks() {
setDescription("Tests various testing conventions");
// Run only after everything is compiled
@ -74,6 +80,7 @@ public class TestingConventionsTasks extends DefaultTask {
return getProject().getTasks()
.withType(Test.class)
.stream()
.filter(t -> tasks == null || tasks.contains(t.getName()))
.filter(Task::getEnabled)
.collect(Collectors.toMap(Task::getPath, task -> task.getCandidateClassFiles().getFiles()));
}
@ -81,8 +88,8 @@ public class TestingConventionsTasks extends DefaultTask {
@Input
public Map<String, File> getTestClassNames() {
if (testClassNames == null) {
testClassNames = GradleUtils.getJavaSourceSets(getProject())
.getByName("test")
testClassNames = Util.getJavaTestSourceSet(getProject())
.get()
.getOutput()
.getClassesDirs()
.getFiles()
@ -108,6 +115,10 @@ public class TestingConventionsTasks extends DefaultTask {
naming.configure(action);
}
public void setTasks(String... tasks) {
this.tasks = Arrays.asList(tasks);
}
@Input
public Set<String> getMainClassNamedLikeTests() {
SourceSetContainer javaSourceSets = GradleUtils.getJavaSourceSets(getProject());
@ -319,6 +330,7 @@ public class TestingConventionsTasks extends DefaultTask {
}
private boolean implementsNamingConvention(Class<?> clazz) {
Objects.requireNonNull(clazz);
return implementsNamingConvention(clazz.getName());
}
@ -349,13 +361,7 @@ public class TestingConventionsTasks extends DefaultTask {
// the classes these don't influence the checks done by this task.
// A side effect is that we could mark as up-to-date with missing dependencies, but these will be found when
// running the tests.
return getProject().files(
getProject().getConfigurations().getByName("testRuntime").resolve(),
GradleUtils.getJavaSourceSets(getProject())
.stream()
.flatMap(sourceSet -> sourceSet.getOutput().getClassesDirs().getFiles().stream())
.collect(Collectors.toList())
);
return Util.getJavaTestSourceSet(getProject()).get().getRuntimeClasspath();
}
private Map<String, File> walkPathAndLoadClasses(File testRoot) {

View File

@ -0,0 +1,41 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test;
import org.elasticsearch.gradle.util.GradleUtils;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.tasks.SourceSet;
public class InternalClusterTestPlugin implements Plugin<Project> {
public static final String SOURCE_SET_NAME = "internalClusterTest";
@Override
public void apply(Project project) {
GradleUtils.addTestSourceSet(project, SOURCE_SET_NAME);
// TODO: fix usages of IT tests depending on Tests methods so this extension is not necessary
GradleUtils.extendSourceSet(project, SourceSet.TEST_SOURCE_SET_NAME, SOURCE_SET_NAME);
// add alias task that is easier to type
project.getTasks().register("icTest").configure(alias -> alias.dependsOn(SOURCE_SET_NAME));
}
}

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.gradle.util;
import org.elasticsearch.gradle.ElasticsearchJavaPlugin;
import org.gradle.api.Action;
import org.gradle.api.GradleException;
import org.gradle.api.NamedDomainObjectContainer;
@ -25,16 +26,26 @@ import org.gradle.api.PolymorphicDomainObjectContainer;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.UnknownTaskException;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.plugins.JavaBasePlugin;
import org.gradle.api.plugins.JavaPluginConvention;
import org.gradle.api.provider.Provider;
import org.gradle.api.services.BuildService;
import org.gradle.api.services.BuildServiceRegistration;
import org.gradle.api.services.BuildServiceRegistry;
import org.gradle.api.tasks.SourceSet;
import org.gradle.api.tasks.SourceSetContainer;
import org.gradle.api.tasks.TaskContainer;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.testing.Test;
import org.gradle.plugins.ide.eclipse.model.EclipseModel;
import org.gradle.plugins.ide.idea.model.IdeaModel;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
public abstract class GradleUtils {
@ -120,4 +131,79 @@ public abstract class GradleUtils {
return (Provider<T>) registration.getService();
}
/**
* Add a source set and task of the same name that runs tests.
*
* IDEs are also configured if setup, and the test task is added to check. The new test source
* set extends from the normal test source set to allow sharing of utilities.
*
* @return A task provider for the newly created test task
*/
public static TaskProvider<?> addTestSourceSet(Project project, String sourceSetName) {
project.getPluginManager().apply(ElasticsearchJavaPlugin.class);
// create our test source set and task
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
SourceSet testSourceSet = sourceSets.create(sourceSetName);
TaskProvider<Test> testTask = project.getTasks().register(sourceSetName, Test.class);
testTask.configure(task -> {
task.setGroup(JavaBasePlugin.VERIFICATION_GROUP);
task.setTestClassesDirs(testSourceSet.getOutput().getClassesDirs());
task.setClasspath(testSourceSet.getRuntimeClasspath());
});
Configuration testCompileConfig = project.getConfigurations().getByName(testSourceSet.getCompileClasspathConfigurationName());
Configuration testRuntimeConfig = project.getConfigurations().getByName(testSourceSet.getRuntimeClasspathConfigurationName());
testSourceSet.setCompileClasspath(testCompileConfig);
testSourceSet.setRuntimeClasspath(project.getObjects().fileCollection().from(testSourceSet.getOutput(), testRuntimeConfig));
extendSourceSet(project, SourceSet.MAIN_SOURCE_SET_NAME, sourceSetName);
// setup IDEs
String runtimeClasspathName = testSourceSet.getRuntimeClasspathConfigurationName();
Configuration runtimeClasspathConfiguration = project.getConfigurations().getByName(runtimeClasspathName);
project.getPluginManager().withPlugin("idea", p -> {
IdeaModel idea = project.getExtensions().getByType(IdeaModel.class);
idea.getModule().setTestSourceDirs(testSourceSet.getJava().getSrcDirs());
idea.getModule().getScopes().put("TEST", Map.of("plus", List.of(runtimeClasspathConfiguration)));
});
project.getPluginManager().withPlugin("eclipse", p -> {
EclipseModel eclipse = project.getExtensions().getByType(EclipseModel.class);
eclipse.getClasspath().setSourceSets(List.of(testSourceSet));
eclipse.getClasspath().getPlusConfigurations().add(runtimeClasspathConfiguration);
});
// add to the check task
project.getTasks().named(JavaBasePlugin.CHECK_TASK_NAME).configure(check -> check.dependsOn(testTask));
return testTask;
}
/**
* Extend the configurations of one source set from another.
*/
public static void extendSourceSet(Project project, String parentSourceSetName, String childSourceSetName) {
final List<Function<SourceSet, String>> configNameFunctions = Arrays.asList(
SourceSet::getCompileConfigurationName,
SourceSet::getImplementationConfigurationName,
SourceSet::getRuntimeConfigurationName,
SourceSet::getRuntimeOnlyConfigurationName
);
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
SourceSet parent = sourceSets.getByName(parentSourceSetName);
SourceSet child = sourceSets.getByName(childSourceSetName);
for (Function<SourceSet, String> configNameFunction : configNameFunctions) {
String parentConfigName = configNameFunction.apply(parent);
String childConfigName = configNameFunction.apply(child);
Configuration parentConfig = project.getConfigurations().getByName(parentConfigName);
Configuration childConfig = project.getConfigurations().getByName(childConfigName);
childConfig.extendsFrom(parentConfig);
}
// tie this new test source set to the main and test source sets
child.setCompileClasspath(project.getObjects().fileCollection().from(child.getCompileClasspath(), parent.getOutput()));
child.setRuntimeClasspath(project.getObjects().fileCollection().from(child.getRuntimeClasspath(), parent.getOutput()));
}
}

View File

@ -0,0 +1,20 @@
#
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
implementation-class=org.elasticsearch.gradle.test.InternalClusterTestPlugin

View File

@ -131,7 +131,7 @@ and add it as a dependency. As an example, we will use the `slf4j-simple` logger
</dependency>
--------------------------------------------------
:client-tests: {docdir}/../../server/src/test/java/org/elasticsearch/client/documentation
:client-tests: {docdir}/../../server/src/internalClusterTest/java/org/elasticsearch/client/documentation
:hlrc-tests: {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client
:client-reindex-tests: {docdir}/../../modules/reindex/src/test/java/org/elasticsearch/client/documentation

View File

@ -22,6 +22,7 @@ import org.elasticsearch.gradle.info.BuildParams
apply plugin: 'elasticsearch.build'
apply plugin: 'nebula.optional-base'
apply plugin: 'nebula.maven-base-publish'
apply plugin: 'elasticsearch.internal-cluster-test'
publishing {
publications {
@ -130,10 +131,14 @@ dependencies {
// tests use the locally compiled version of server
exclude group: 'org.elasticsearch', module: 'server'
}
internalClusterTestCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'server'
}
}
compileJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
compileTestJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
compileInternalClusterTestJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
// Until this project is always being formatted with spotless, we need to
// guard against `spotless()` not existing.
@ -170,6 +175,7 @@ testingConventions {
baseClass "org.elasticsearch.test.ESSingleNodeTestCase"
}
}
tasks = ['test']
}
task generateModulesList {
@ -328,20 +334,12 @@ dependencyLicenses {
}
task integTest(type: Test) {
description = 'Multi-node tests'
mustRunAfter test
include '**/*IT.class'
tasks.named('internalClusterTest').configure {
if (org.elasticsearch.gradle.info.BuildParams.isSnapshotBuild() == false) {
systemProperty 'es.datastreams_feature_enabled', 'true'
}
}
check.dependsOn integTest
task internalClusterTest {
dependsOn integTest
licenseHeaders {
excludes << 'org/elasticsearch/client/documentation/placeholder.txt'
}

View File

@ -44,7 +44,7 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.hasToString;
public class ValidateIndiesAliasesRequestIT extends ESSingleNodeTestCase {
public class ValidateIndicesAliasesRequestIT extends ESSingleNodeTestCase {
public static class IndicesAliasesPlugin extends Plugin implements ActionPlugin {

View File

@ -45,7 +45,6 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST)
public class ClusterShardLimitIT extends ESIntegTestCase {
@ -110,7 +109,7 @@ public class ClusterShardLimitIT extends ESIntegTestCase {
setShardsPerNode(counts.getShardsPerNode());
if (counts.firstIndexShards > 0) {
if (counts.getFirstIndexShards() > 0) {
createIndex(
"test",
Settings.builder()
@ -367,68 +366,4 @@ public class ClusterShardLimitIT extends ESIntegTestCase {
assertEquals(expectedError, e.getMessage());
}
public static class ShardCounts {
private final int shardsPerNode;
private final int firstIndexShards;
private final int firstIndexReplicas;
private final int failingIndexShards;
private final int failingIndexReplicas;
private ShardCounts(int shardsPerNode,
int firstIndexShards,
int firstIndexReplicas,
int failingIndexShards,
int failingIndexReplicas) {
this.shardsPerNode = shardsPerNode;
this.firstIndexShards = firstIndexShards;
this.firstIndexReplicas = firstIndexReplicas;
this.failingIndexShards = failingIndexShards;
this.failingIndexReplicas = failingIndexReplicas;
}
public static ShardCounts forDataNodeCount(int dataNodes) {
assertThat("this method will not work reliably with this many data nodes due to the limit of shards in a single index," +
"use fewer data nodes or multiple indices", dataNodes, lessThanOrEqualTo(90));
int mainIndexReplicas = between(0, dataNodes - 1);
int mainIndexShards = between(1, 10);
int totalShardsInIndex = (mainIndexReplicas + 1) * mainIndexShards;
// Sometimes add some headroom to the limit to check that it works even if you're not already right up against the limit
int shardsPerNode = (int) Math.ceil((double) totalShardsInIndex / dataNodes) + between(0, 10);
int totalCap = shardsPerNode * dataNodes;
int failingIndexShards;
int failingIndexReplicas;
if (dataNodes > 1 && frequently()) {
failingIndexShards = Math.max(1, totalCap - totalShardsInIndex);
failingIndexReplicas = between(1, dataNodes - 1);
} else {
failingIndexShards = totalCap - totalShardsInIndex + between(1, 10);
failingIndexReplicas = 0;
}
return new ShardCounts(shardsPerNode, mainIndexShards, mainIndexReplicas, failingIndexShards, failingIndexReplicas);
}
public int getShardsPerNode() {
return shardsPerNode;
}
public int getFirstIndexShards() {
return firstIndexShards;
}
public int getFirstIndexReplicas() {
return firstIndexReplicas;
}
public int getFailingIndexShards() {
return failingIndexShards;
}
public int getFailingIndexReplicas() {
return failingIndexReplicas;
}
}
}

Some files were not shown because too many files have changed in this diff Show More