convert modules to use testclusters (#40804)

* convert modules to use testclusters
* Eliminate PluginPropertiesTask and move logic in plugin where it belongs
This commit is contained in:
Alpar Torok 2019-04-04 11:41:38 +03:00
parent 6ac307d468
commit 25944c4317
14 changed files with 203 additions and 150 deletions

View File

@ -22,13 +22,18 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
import nebula.plugin.publishing.maven.MavenScmPlugin
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.NoticeTask
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.test.RunTask
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.publish.maven.MavenPublication
import org.gradle.api.publish.maven.plugins.MavenPublishPlugin
import org.gradle.api.publish.maven.tasks.GenerateMavenPom
import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.SourceSet
import org.gradle.api.tasks.bundling.Zip
import org.gradle.jvm.tasks.Jar
@ -38,11 +43,15 @@ import java.util.regex.Pattern
/**
* Encapsulates build configuration for an Elasticsearch plugin.
*/
public class PluginBuildPlugin extends BuildPlugin {
class PluginBuildPlugin extends BuildPlugin {
public static final String PLUGIN_EXTENSION_NAME = 'esplugin'
@Override
public void apply(Project project) {
void apply(Project project) {
super.apply(project)
PluginPropertiesExtension extension = project.extensions.create(PLUGIN_EXTENSION_NAME, PluginPropertiesExtension, project)
configureDependencies(project)
// this afterEvaluate must happen before the afterEvaluate added by integTest creation,
@ -50,13 +59,13 @@ public class PluginBuildPlugin extends BuildPlugin {
project.afterEvaluate {
boolean isXPackModule = project.path.startsWith(':x-pack:plugin')
boolean isModule = project.path.startsWith(':modules:') || isXPackModule
String name = project.pluginProperties.extension.name
String name = extension.name
project.archivesBaseName = name
// set the project description so it will be picked up by publishing
project.description = project.pluginProperties.extension.description
project.description = extension.description
configurePublishing(project)
configurePublishing(project, extension)
if (project.plugins.hasPlugin(TestClustersPlugin.class) == false) {
project.integTestCluster.dependsOn(project.tasks.bundlePlugin)
@ -68,12 +77,23 @@ public class PluginBuildPlugin extends BuildPlugin {
} else {
project.tasks.integTest.dependsOn(project.tasks.bundlePlugin)
if (isModule) {
throw new RuntimeException("Testclusters does not support modules yet");
project.testClusters.integTest.module(
project.file(project.tasks.bundlePlugin.archiveFile)
)
} else {
project.testClusters.integTest.plugin(
project.file(project.tasks.bundlePlugin.archiveFile)
)
}
project.extensions.getByType(PluginPropertiesExtension).extendedPlugins.each { pluginName ->
// Auto add dependent modules to the test cluster
if (project.findProject(":modules:${pluginName}") != null) {
project.testClusters.integTest.module(
project.file(project.project(":modules:${pluginName}").tasks.bundlePlugin.archiveFile)
)
}
}
}
project.tasks.run.dependsOn(project.tasks.bundlePlugin)
@ -87,7 +107,7 @@ public class PluginBuildPlugin extends BuildPlugin {
}
if (isModule == false || isXPackModule) {
addNoticeGeneration(project)
addNoticeGeneration(project, extension)
}
}
project.testingConventions {
@ -104,14 +124,14 @@ public class PluginBuildPlugin extends BuildPlugin {
}
}
createIntegTestTask(project)
createBundleTask(project)
createBundleTasks(project, extension)
project.configurations.getByName('default').extendsFrom(project.configurations.getByName('runtime'))
project.tasks.create('run', RunTask) // allow running ES with this plugin in the foreground of a build
}
private void configurePublishing(Project project) {
private void configurePublishing(Project project, PluginPropertiesExtension extension) {
// Only configure publishing if applied externally
if (project.pluginProperties.extension.hasClientJar) {
if (extension.hasClientJar) {
project.plugins.apply(MavenScmPlugin.class)
// Only change Jar tasks, we don't want a -client zip so we can't change archivesBaseName
project.tasks.withType(Jar) {
@ -119,17 +139,13 @@ public class PluginBuildPlugin extends BuildPlugin {
}
// always configure publishing for client jars
project.plugins.apply(MavenScmPlugin.class)
project.publishing.publications.nebula(MavenPublication).artifactId(
project.pluginProperties.extension.name + "-client"
)
project.publishing.publications.nebula(MavenPublication).artifactId(extension.name + "-client")
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask ->
generatePOMTask.ext.pomFileName = "${project.archivesBaseName}-client-${project.versions.elasticsearch}.pom"
}
} else {
if (project.plugins.hasPlugin(MavenPublishPlugin)) {
project.publishing.publications.nebula(MavenPublication).artifactId(
project.pluginProperties.extension.name
)
project.publishing.publications.nebula(MavenPublication).artifactId(extension.name)
}
}
@ -164,24 +180,64 @@ public class PluginBuildPlugin extends BuildPlugin {
* Adds a bundlePlugin task which builds the zip containing the plugin jars,
* metadata, properties, and packaging files
*/
private static void createBundleTask(Project project) {
private static void createBundleTasks(Project project, PluginPropertiesExtension extension) {
File pluginMetadata = project.file('src/main/plugin-metadata')
File templateFile = new File(project.buildDir, "templates/plugin-descriptor.properties")
// create a task to build the properties file for this plugin
PluginPropertiesTask buildProperties = project.tasks.create('pluginProperties', PluginPropertiesTask.class)
// create tasks to build the properties file for this plugin
Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') {
outputs.file(templateFile)
doLast {
InputStream resourceTemplate = PluginBuildPlugin.getResourceAsStream("/${templateFile.name}")
templateFile.setText(resourceTemplate.getText('UTF-8'), 'UTF-8')
}
}
Copy buildProperties = project.tasks.create('pluginProperties', Copy) {
dependsOn(copyPluginPropertiesTemplate)
from(templateFile)
into("${project.buildDir}/generated-resources")
}
project.afterEvaluate {
// check require properties are set
if (extension.name == null) {
throw new InvalidUserDataException('name is a required setting for esplugin')
}
if (extension.description == null) {
throw new InvalidUserDataException('description is a required setting for esplugin')
}
if (extension.classname == null) {
throw new InvalidUserDataException('classname is a required setting for esplugin')
}
Map<String, String> properties = [
'name': extension.name,
'description': extension.description,
'version': extension.version,
'elasticsearchVersion': Version.fromString(VersionProperties.elasticsearch).toString(),
'javaVersion': project.targetCompatibility as String,
'classname': extension.classname,
'extendedPlugins': extension.extendedPlugins.join(','),
'hasNativeController': extension.hasNativeController,
'requiresKeystore': extension.requiresKeystore
]
buildProperties.configure {
expand(properties)
inputs.properties(properties)
}
}
// add the plugin properties and metadata to test resources, so unit tests can
// know about the plugin (used by test security code to statically initialize the plugin in unit tests)
SourceSet testSourceSet = project.sourceSets.test
testSourceSet.output.dir(buildProperties.descriptorOutput.parentFile, builtBy: 'pluginProperties')
testSourceSet.output.dir(buildProperties.destinationDir, builtBy: buildProperties)
testSourceSet.resources.srcDir(pluginMetadata)
// create the actual bundle task, which zips up all the files for the plugin
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties]) {
from(buildProperties.descriptorOutput.parentFile) {
// plugin properties file
include(buildProperties.descriptorOutput.name)
}
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip) {
from buildProperties
from pluginMetadata // metadata (eg custom security policy)
/*
* If the plugin is using the shadow plugin then we need to bundle
@ -223,23 +279,17 @@ public class PluginBuildPlugin extends BuildPlugin {
}
}
/** Adds nebula publishing task to generate a pom file for the plugin. */
protected static void addClientJarPomGeneration(Project project) {
project.plugins.apply(MavenScmPlugin.class)
project.description = project.pluginProperties.extension.description
}
/** Configure the pom for the main jar of this plugin */
protected void addNoticeGeneration(Project project) {
File licenseFile = project.pluginProperties.extension.licenseFile
protected void addNoticeGeneration(Project project, PluginPropertiesExtension extension) {
File licenseFile = extension.licenseFile
if (licenseFile != null) {
project.tasks.bundlePlugin.from(licenseFile.parentFile) {
include(licenseFile.name)
rename { 'LICENSE.txt' }
}
}
File noticeFile = project.pluginProperties.extension.noticeFile
File noticeFile = extension.noticeFile
if (noticeFile != null) {
NoticeTask generateNotice = project.tasks.create('generateNotice', NoticeTask.class)
generateNotice.inputFile = noticeFile

View File

@ -1,82 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.plugin
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionProperties
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Task
import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.OutputFile
/**
* Creates a plugin descriptor.
*/
class PluginPropertiesTask extends Copy {
PluginPropertiesExtension extension
@OutputFile
File descriptorOutput = new File(project.buildDir, 'generated-resources/plugin-descriptor.properties')
PluginPropertiesTask() {
File templateFile = new File(project.buildDir, "templates/${descriptorOutput.name}")
Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') {
doLast {
InputStream resourceTemplate = PluginPropertiesTask.getResourceAsStream("/${descriptorOutput.name}")
templateFile.parentFile.mkdirs()
templateFile.setText(resourceTemplate.getText('UTF-8'), 'UTF-8')
}
}
dependsOn(copyPluginPropertiesTemplate)
extension = project.extensions.create('esplugin', PluginPropertiesExtension, project)
project.afterEvaluate {
// check require properties are set
if (extension.name == null) {
throw new InvalidUserDataException('name is a required setting for esplugin')
}
if (extension.description == null) {
throw new InvalidUserDataException('description is a required setting for esplugin')
}
if (extension.classname == null) {
throw new InvalidUserDataException('classname is a required setting for esplugin')
}
// configure property substitution
from(templateFile.parentFile).include(descriptorOutput.name)
into(descriptorOutput.parentFile)
Map<String, String> properties = generateSubstitutions()
expand(properties)
inputs.properties(properties)
}
}
Map<String, String> generateSubstitutions() {
return [
'name': extension.name,
'description': extension.description,
'version': extension.version,
'elasticsearchVersion': Version.fromString(VersionProperties.elasticsearch).toString(),
'javaVersion': project.targetCompatibility as String,
'classname': extension.classname,
'extendedPlugins': extension.extendedPlugins.join(','),
'hasNativeController': extension.hasNativeController,
'requiresKeystore': extension.requiresKeystore
]
}
}

View File

@ -128,6 +128,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
nodes.all(each -> each.plugin(plugin));
}
@Override
public void module(File module) {
nodes.all(each -> each.module(module));
}
@Override
public void keystore(String key, String value) {
nodes.all(each -> each.keystore(key, value));
@ -198,6 +203,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
}
}
@Override
public void extraConfigFile(String destination, File from) {
nodes.all(node -> node.extraConfigFile(destination, from));
}
private void writeUnicastHostsFiles() {
String unicastUris = nodes.stream().flatMap(node -> node.getAllTransportPortURI().stream()).collect(Collectors.joining("\n"));
nodes.forEach(node -> {

View File

@ -34,6 +34,7 @@ import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@ -61,6 +62,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private static final TimeUnit ES_DESTROY_TIMEOUT_UNIT = TimeUnit.SECONDS;
private static final int NODE_UP_TIMEOUT = 60;
private static final TimeUnit NODE_UP_TIMEOUT_UNIT = TimeUnit.SECONDS;
private static final List<String> OVERRIDABLE_SETTINGS = Arrays.asList(
"path.repo"
);
private final String path;
private final String name;
@ -72,10 +76,12 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private final LinkedHashMap<String, Predicate<TestClusterConfiguration>> waitConditions = new LinkedHashMap<>();
private final List<URI> plugins = new ArrayList<>();
private final List<File> modules = new ArrayList<>();
private final Map<String, Supplier<CharSequence>> settings = new LinkedHashMap<>();
private final Map<String, Supplier<CharSequence>> keystoreSettings = new LinkedHashMap<>();
private final Map<String, Supplier<CharSequence>> systemProperties = new LinkedHashMap<>();
private final Map<String, Supplier<CharSequence>> environment = new LinkedHashMap<>();
private final Map<String, File> extraConfigFiles = new HashMap<>();
final LinkedHashMap<String, String> defaultConfig = new LinkedHashMap<>();
private final Path confPathRepo;
@ -150,6 +156,11 @@ public class ElasticsearchNode implements TestClusterConfiguration {
plugin(plugin.toURI());
}
@Override
public void module(File module) {
this.modules.add(module);
}
@Override
public void keystore(String key, String value) {
addSupplier("Keystore", keystoreSettings, key, value);
@ -278,9 +289,66 @@ public class ElasticsearchNode implements TestClusterConfiguration {
});
}
installModules();
copyExtraConfigFiles();
startElasticsearchProcess();
}
private void copyExtraConfigFiles() {
extraConfigFiles.forEach((destination, from) -> {
if (Files.exists(from.toPath()) == false) {
throw new TestClustersException("Can't create extra config file from " + from + " for " + this +
" as it does not exist");
}
Path dst = configFile.getParent().resolve(destination);
try {
Files.createDirectories(dst);
Files.copy(from.toPath(), dst, StandardCopyOption.REPLACE_EXISTING);
LOGGER.info("Added extra config file {} for {}", destination, this);
} catch (IOException e) {
throw new UncheckedIOException("Can't create extra config file for", e);
}
});
}
private void installModules() {
if (distribution == Distribution.INTEG_TEST) {
modules.forEach(module -> services.copy(spec -> {
if (module.getName().toLowerCase().endsWith(".zip")) {
spec.from(services.zipTree(module));
} else if (module.isDirectory()) {
spec.from(module);
} else {
throw new IllegalArgumentException("Not a valid module " + module + " for " + this);
}
spec.into(
workingDir
.resolve("modules")
.resolve(
module.getName()
.replace(".zip", "")
.replace("-" + version, "")
)
.toFile()
);
}));
} else {
LOGGER.info("Not installing " + modules.size() + "(s) since the " + distribution + " distribution already " +
"has them");
}
}
@Override
public void extraConfigFile(String destination, File from) {
if (destination.contains("..")) {
throw new IllegalArgumentException("extra config file destination can't be relative, was " + destination +
" for " + this);
}
extraConfigFiles.put(destination, from);
}
private void runElaticsearchBinScriptWithInput(String input, String tool, String... args) {
try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) {
services.loggedExec(spec -> {
@ -560,11 +628,16 @@ public class ElasticsearchNode implements TestClusterConfiguration {
.collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue().get().toString()));
HashSet<String> overriden = new HashSet<>(defaultConfig.keySet());
overriden.retainAll(userConfig.keySet());
overriden.removeAll(OVERRIDABLE_SETTINGS);
if (overriden.isEmpty() ==false) {
throw new IllegalArgumentException(
"Testclusters does not allow the following settings to be changed:" + overriden + " for " + this
);
}
// Make sure no duplicate config keys
userConfig.keySet().stream()
.filter(OVERRIDABLE_SETTINGS::contains)
.forEach(defaultConfig::remove);
try {
// We create hard links for the distribution, so we need to remove the config file before writing it

View File

@ -41,6 +41,8 @@ public interface TestClusterConfiguration {
void plugin(File plugin);
void module(File module);
void keystore(String key, String value);
void keystore(String key, Supplier<CharSequence> valueSupplier);
@ -63,6 +65,8 @@ public interface TestClusterConfiguration {
void start();
void extraConfigFile(String destination, File from);
String getHttpSocketURI();
String getTransportPortURI();

View File

@ -26,7 +26,3 @@ esplugin {
dependencies {
compileOnly project(':modules:lang-painless')
}
integTestCluster {
module project(':modules:lang-painless')
}

View File

@ -19,6 +19,7 @@
configure(subprojects.findAll { it.parent.path == project.path }) {
group = 'org.elasticsearch.plugin' // for modules which publish client jars
apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.esplugin'
esplugin {

View File

@ -28,7 +28,3 @@ dependencies {
compile project(':libs:grok')
compile project(':libs:dissect')
}
integTestCluster {
module project(':modules:lang-painless')
}

View File

@ -22,6 +22,6 @@ esplugin {
classname 'org.elasticsearch.ingest.useragent.IngestUserAgentPlugin'
}
integTestCluster {
extraConfigFile 'ingest-user-agent/test-regexes.yml', 'src/test/test-regexes.yml'
testClusters.integTest {
extraConfigFile 'ingest-user-agent/test-regexes.yml', file('src/test/test-regexes.yml')
}

View File

@ -22,8 +22,8 @@ esplugin {
classname 'org.elasticsearch.painless.PainlessPlugin'
}
integTestCluster {
module project.project(':modules:mapper-extras')
testClusters.integTest {
module file(project(':modules:mapper-extras').tasks.bundlePlugin.archiveFile)
systemProperty 'es.scripting.update.ctx_in_params', 'false'
}

View File

@ -26,7 +26,3 @@ esplugin {
dependencies {
compileOnly project(':modules:lang-painless')
}
integTestCluster {
module project(':modules:lang-painless')
}

View File

@ -23,9 +23,9 @@ esplugin {
hasClientJar = true
}
integTestCluster {
testClusters.integTest {
// Modules who's integration is explicitly tested in integration tests
module project(':modules:lang-mustache')
module file(project(':modules:lang-mustache').tasks.bundlePlugin.archiveFile)
}
run {

View File

@ -29,10 +29,10 @@ esplugin {
hasClientJar = true
}
integTestCluster {
testClusters.integTest {
// Modules who's integration is explicitly tested in integration tests
module project(':modules:parent-join')
module project(':modules:lang-painless')
module file(project(':modules:parent-join').tasks.bundlePlugin.archiveFile)
module file(project(':modules:lang-painless').tasks.bundlePlugin.archiveFile)
// Whitelist reindexing from the local node so we can test reindex-from-remote.
setting 'reindex.remote.whitelist', '127.0.0.1:*'
}
@ -97,12 +97,15 @@ dependencies {
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
logger.warn("Disabling reindex-from-old tests because we can't get the pid file on windows")
integTestRunner.systemProperty "tests.fromOld", "false"
integTest.runner {
systemProperty "tests.fromOld", "false"
}
} else if (rootProject.rootDir.toString().contains(" ")) {
logger.warn("Disabling reindex-from-old tests because Elasticsearch 1.7 won't start with spaces in the path")
integTestRunner.systemProperty "tests.fromOld", "false"
integTest.runner {
systemProperty "tests.fromOld", "false"
}
} else {
integTestRunner.systemProperty "tests.fromOld", "true"
/* Set up tasks to unzip and run the old versions of ES before running the
* integration tests. */
for (String version : ['2', '1', '090']) {
@ -116,8 +119,8 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
}
into temporaryDir
}
Task fixture = task("oldEs${version}Fixture",
type: org.elasticsearch.gradle.test.AntFixture) {
Task fixture = task("oldEs${version}Fixture", type: org.elasticsearch.gradle.test.AntFixture) {
dependsOn project.configurations.oldesFixture
dependsOn unzip
executable = new File(project.runtimeJavaHome, 'bin/java')
@ -133,12 +136,16 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
return fixture.portsFile.exists()
}
}
integTest.dependsOn fixture
integTestRunner {
/* Use a closure on the string to delay evaluation until right before we
* run the integration tests so that we can be sure that the file is
* ready. */
systemProperty "es${version}.port", "${ -> fixture.addressAndPort }"
integTest {
dependsOn fixture
runner {
systemProperty "tests.fromOld", "true"
/* Use a closure on the string to delay evaluation until right before we
* run the integration tests so that we can be sure that the file is
* ready. */
systemProperty "es${version}.port", "${ -> fixture.addressAndPort }"
}
}
}
}

View File

@ -37,10 +37,12 @@ task urlFixture(type: AntFixture) {
args 'org.elasticsearch.repositories.url.URLFixture', baseDir, "${repositoryDir.absolutePath}"
}
integTestCluster {
integTest {
dependsOn urlFixture
}
testClusters.integTest {
// repositoryDir is used by a FS repository to create snapshots
setting 'path.repo', "${repositoryDir.absolutePath}"
// repositoryDir is used by two URL repositories to restore snapshots
setting 'repositories.url.allowed_urls', "http://snapshot.test*,http://${ -> urlFixture.addressAndPort }"
setting 'repositories.url.allowed_urls', { "http://snapshot.test*,http://${urlFixture.addressAndPort}" }
}