Nik Everett e6b9f59e4e
Build: Shadow x-pack:protocol into x-pack:plugin:core (#32240)
This bundles the x-pack:protocol project into the x-pack:plugin:core
project because we'd like folks to consider it an implementation detail
of our build rather than a separate artifact to be managed and depended
on. It is now bundled into both x-pack:plugin:core and
client:rest-high-level. To make this work I had to fix a few things.

Firstly, I had to make PluginBuildPlugin work with the shadow plugin.
In that case we have to bundle only the `shadow` dependencies and the
shadow jar.

Secondly, every reference to x-pack:plugin:core has to use the `shadow`
configuration. Without that the reference is missing all of the
un-shadowed dependencies. I tried to make it so that applying the shadow
plugin automatically redefines the `default` configuration to mirror the
`shadow` configuration which would allow us to use bare project references
to the x-pack:plugin:core project but I couldn't make it work. It'd *look*
like it works but then fail for transitive dependencies anyway. I think
it is still a good thing to do but I don't have the willpower to do it
now.

Finally, I had to fix an issue where Eclipse and IntelliJ didn't properly
reference shadowed transitive dependencies. Neither IDE supports shadowing
natively so they have to reference the shadowed projects. We fix this by
detecting `shadow` dependencies when in "Intellij mode" or "Eclipse mode"
and adding `runtime` dependencies to the same target. This convinces
IntelliJ and Eclipse to play nice.
2018-07-24 11:53:04 -04:00

319 lines
14 KiB
Groovy

import org.elasticsearch.gradle.test.NodeInfo
import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.Version
import java.nio.charset.StandardCharsets
import java.util.regex.Matcher
// Apply the java plugin to this project so the sources can be edited in an IDE
apply plugin: 'elasticsearch.build'
test.enabled = false
dependencies {
testCompile project(path: xpackModule('core'), configuration: 'shadow')
testCompile project(path: xpackModule('security'), configuration: 'runtime')
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // to be moved in a later commit
}
Closure waitWithAuth = { NodeInfo node, AntBuilder ant ->
File tmpFile = new File(node.cwd, 'wait.success')
// wait up to two minutes
final long stopTime = System.currentTimeMillis() + (2 * 60000L);
Exception lastException = null;
int lastResponseCode = 0
while (System.currentTimeMillis() < stopTime) {
lastException = null;
// we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned
HttpURLConnection httpURLConnection = null;
try {
// TODO this sucks having to hardcode number of nodes, but node.config.numNodes isn't necessarily accurate for rolling
httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=3&wait_for_status=yellow").openConnection();
httpURLConnection.setRequestProperty("Authorization", "Basic " +
Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8)));
httpURLConnection.setRequestMethod("GET");
httpURLConnection.setConnectTimeout(1000);
httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes!
httpURLConnection.connect();
lastResponseCode = httpURLConnection.getResponseCode()
if (lastResponseCode == 200) {
tmpFile.withWriter StandardCharsets.UTF_8.name(), {
it.write(httpURLConnection.getInputStream().getText(StandardCharsets.UTF_8.name()))
}
break;
}
} catch (Exception e) {
logger.debug("failed to call cluster health", e)
lastException = e
} finally {
if (httpURLConnection != null) {
httpURLConnection.disconnect();
}
}
// did not start, so wait a bit before trying again
Thread.sleep(500L);
}
if (tmpFile.exists() == false) {
final String message = "final attempt of calling cluster health failed [lastResponseCode=${lastResponseCode}]"
if (lastException != null) {
logger.error(message, lastException)
} else {
logger.error(message + " [no exception]")
}
}
return tmpFile.exists()
}
Project mainProject = project
compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked"
/**
* Subdirectories of this project are test rolling upgrades with various
* configuration options based on their name.
*/
subprojects {
Matcher m = project.name =~ /with(out)?-system-key/
if (false == m.matches()) {
throw new InvalidUserDataException("Invalid project name [${project.name}]")
}
boolean withSystemKey = m.group(1) == null
apply plugin: 'elasticsearch.standalone-test'
// Use resources from the rolling-upgrade project in subdirectories
sourceSets {
test {
java {
srcDirs = ["${mainProject.projectDir}/src/test/java"]
}
resources {
srcDirs = ["${mainProject.projectDir}/src/test/resources"]
}
}
}
String outputDir = "${buildDir}/generated-resources/${project.name}"
// This is a top level task which we will add dependencies to below.
// It is a single task that can be used to backcompat tests against all versions.
task bwcTest {
description = 'Runs backwards compatibility tests.'
group = 'verification'
}
String output = "${buildDir}/generated-resources/${project.name}"
task copyTestNodeKeystore(type: Copy) {
from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
into outputDir
}
for (Version version : bwcVersions.wireCompatible) {
String baseName = "v${version}"
Task oldClusterTest = tasks.create(name: "${baseName}#oldClusterTest", type: RestIntegTestTask) {
mustRunAfter(precommit)
}
Object extension = extensions.findByName("${baseName}#oldClusterTestCluster")
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
dependsOn copyTestNodeKeystore
if (version.before('6.3.0')) {
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${version}"
}
String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/elasticsearch-users'
setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
bwcVersion = version
numBwcNodes = 3
numNodes = 3
minimumMasterNodes = { 3 }
clusterName = 'rolling-upgrade'
waitCondition = waitWithAuth
setting 'xpack.monitoring.exporters._http.type', 'http'
setting 'xpack.monitoring.exporters._http.enabled', 'false'
setting 'xpack.monitoring.exporters._http.auth.username', 'test_user'
setting 'xpack.monitoring.exporters._http.auth.password', 'x-pack-test-password'
setting 'xpack.license.self_generated.type', 'trial'
setting 'xpack.security.enabled', 'true'
setting 'xpack.security.transport.ssl.enabled', 'true'
setting 'xpack.security.authc.token.enabled', 'true'
setting 'xpack.security.audit.enabled', 'true'
setting 'xpack.security.audit.outputs', 'index'
setting 'xpack.ssl.keystore.path', 'testnode.jks'
setting 'xpack.ssl.keystore.password', 'testnode'
dependsOn copyTestNodeKeystore
extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks')
if (withSystemKey) {
if (version.onOrAfter('5.1.0') && version.before('6.0.0')) {
// The setting didn't exist until 5.1.0
setting 'xpack.security.system_key.required', 'true'
}
if (version.onOrAfter('6.0.0')) {
keystoreFile 'xpack.watcher.encryption_key', "${mainProject.projectDir}/src/test/resources/system_key"
} else {
String systemKeyFile = version.before('6.3.0') ? 'x-pack/system_key' : 'system_key'
extraConfigFile systemKeyFile, "${mainProject.projectDir}/src/test/resources/system_key"
}
setting 'xpack.watcher.encrypt_sensitive_data', 'true'
}
}
Task oldClusterTestRunner = tasks.getByName("${baseName}#oldClusterTestRunner")
oldClusterTestRunner.configure {
systemProperty 'tests.rest.suite', 'old_cluster'
}
Closure configureUpgradeCluster = {String name, Task lastRunner, int stopNode, Closure unicastSeed ->
configure(extensions.findByName("${baseName}#${name}")) {
dependsOn lastRunner, "${baseName}#oldClusterTestCluster#node${stopNode}.stop"
setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
clusterName = 'rolling-upgrade'
unicastTransportUri = { seedNode, node, ant -> unicastSeed() }
minimumMasterNodes = { 3 }
/* Override the data directory so the new node always gets the node we
* just stopped's data directory. */
dataDir = { nodeNumber -> oldClusterTest.nodes[stopNode].dataDir }
waitCondition = waitWithAuth
setting 'xpack.monitoring.exporters._http.type', 'http'
setting 'xpack.monitoring.exporters._http.enabled', 'false'
setting 'xpack.monitoring.exporters._http.auth.username', 'test_user'
setting 'xpack.monitoring.exporters._http.auth.password', 'x-pack-test-password'
setting 'xpack.license.self_generated.type', 'trial'
setting 'xpack.security.enabled', 'true'
setting 'xpack.security.transport.ssl.enabled', 'true'
setting 'xpack.ssl.keystore.path', 'testnode.jks'
keystoreSetting 'xpack.ssl.keystore.secure_password', 'testnode'
setting 'node.attr.upgraded', 'true'
setting 'xpack.security.authc.token.enabled', 'true'
setting 'xpack.security.audit.enabled', 'true'
setting 'xpack.security.audit.outputs', 'index'
setting 'node.name', "upgraded-node-${stopNode}"
dependsOn copyTestNodeKeystore
extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks')
if (withSystemKey) {
setting 'xpack.watcher.encrypt_sensitive_data', 'true'
keystoreFile 'xpack.watcher.encryption_key', "${mainProject.projectDir}/src/test/resources/system_key"
}
}
}
Task oneThirdUpgradedTest = tasks.create(name: "${baseName}#oneThirdUpgradedTest", type: RestIntegTestTask)
configureUpgradeCluster("oneThirdUpgradedTestCluster", oldClusterTestRunner, 0,
// Use all running nodes as seed nodes so there is no race between pinging and the tests
{ oldClusterTest.nodes.get(1).transportUri() + ',' + oldClusterTest.nodes.get(2).transportUri() })
Task oneThirdUpgradedTestRunner = tasks.getByName("${baseName}#oneThirdUpgradedTestRunner")
oneThirdUpgradedTestRunner.configure {
systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.first_round', 'true'
// We only need to run these tests once so we may as well do it when we're two thirds upgraded
systemProperty 'tests.rest.blacklist', [
'mixed_cluster/10_basic/Start scroll in mixed cluster on upgraded node that we will continue after upgrade',
'mixed_cluster/30_ml_jobs_crud/Create a job in the mixed cluster and write some data',
'mixed_cluster/40_ml_datafeed_crud/Put job and datafeed in mixed cluster',
].join(',')
finalizedBy "${baseName}#oldClusterTestCluster#node1.stop"
}
Task twoThirdsUpgradedTest = tasks.create(name: "${baseName}#twoThirdsUpgradedTest", type: RestIntegTestTask)
configureUpgradeCluster("twoThirdsUpgradedTestCluster", oneThirdUpgradedTestRunner, 1,
// Use all running nodes as seed nodes so there is no race between pinging and the tests
{ oldClusterTest.nodes.get(2).transportUri() + ',' + oneThirdUpgradedTest.nodes.get(0).transportUri() })
Task twoThirdsUpgradedTestRunner = tasks.getByName("${baseName}#twoThirdsUpgradedTestRunner")
twoThirdsUpgradedTestRunner.configure {
systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.first_round', 'false'
finalizedBy "${baseName}#oldClusterTestCluster#node2.stop"
}
Task upgradedClusterTest = tasks.create(name: "${baseName}#upgradedClusterTest", type: RestIntegTestTask)
configureUpgradeCluster("upgradedClusterTestCluster", twoThirdsUpgradedTestRunner, 2,
// Use all running nodes as seed nodes so there is no race between pinging and the tests
{ oneThirdUpgradedTest.nodes.get(0).transportUri() + ',' + twoThirdsUpgradedTest.nodes.get(0).transportUri() })
Task upgradedClusterTestRunner = tasks.getByName("${baseName}#upgradedClusterTestRunner")
upgradedClusterTestRunner.configure {
systemProperty 'tests.rest.suite', 'upgraded_cluster'
/*
* Force stopping all the upgraded nodes after the test runner
* so they are alive during the test.
*/
finalizedBy "${baseName}#oneThirdUpgradedTestCluster#stop"
finalizedBy "${baseName}#twoThirdsUpgradedTestCluster#stop"
// migration tests should only run when the original/old cluster nodes where versions < 5.2.0.
// this stinks but we do the check here since our rest tests do not support conditionals
// otherwise we could check the index created version
String versionStr = project.extensions.findByName("${baseName}#oldClusterTestCluster").properties.get('bwcVersion')
String[] versionParts = versionStr.split('\\.')
if (versionParts[0].equals("5")) {
Integer minor = Integer.parseInt(versionParts[1])
if (minor >= 2) {
systemProperty 'tests.rest.blacklist', '/20_security/Verify default password migration results in upgraded cluster'
}
}
}
Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") {
dependsOn = [upgradedClusterTest]
}
if (project.bwc_tests_enabled) {
bwcTest.dependsOn(versionBwcTest)
}
}
test.enabled = false // no unit tests for rolling upgrades, only the rest integration test
// basic integ tests includes testing bwc against the most recent version
task integTest {
if (project.bwc_tests_enabled) {
for (final def version : bwcVersions.snapshotsWireCompatible) {
dependsOn "v${version}#bwcTest"
}
}
}
check.dependsOn(integTest)
dependencies {
testCompile project(path: xpackModule('core'), configuration: 'shadow')
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
testCompile project(path: xpackModule('watcher'))
}
compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked"
// copy x-pack plugin info so it is on the classpath and security manager has the right permissions
task copyXPackRestSpec(type: Copy) {
dependsOn(project.configurations.restSpec, 'processTestResources')
from project(xpackProject('plugin').path).sourceSets.test.resources
include 'rest-api-spec/api/**'
into project.sourceSets.test.output.resourcesDir
}
task copyXPackPluginProps(type: Copy) {
dependsOn(copyXPackRestSpec)
from project(xpackModule('core')).file('src/main/plugin-metadata')
from project(xpackModule('core')).tasks.pluginProperties
into outputDir
}
project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps)
repositories {
maven {
url "https://artifacts.elastic.co/maven"
}
maven {
url "https://snapshots.elastic.co/maven"
}
}
}