OpenSearch/plugin/build.gradle

462 lines
17 KiB
Groovy

import com.amazonaws.AmazonServiceException
import com.amazonaws.ClientConfiguration
import com.amazonaws.auth.AWSCredentials
import com.amazonaws.auth.BasicAWSCredentials
import com.amazonaws.services.s3.AmazonS3Client
import com.amazonaws.services.s3.model.HeadBucketRequest
import com.bettercloud.vault.Vault
import com.bettercloud.vault.VaultConfig
import com.bettercloud.vault.response.LogicalResponse
import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.test.NodeInfo
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.StandardCopyOption
import java.nio.file.attribute.PosixFilePermission
import java.nio.file.attribute.PosixFilePermissions
group 'org.elasticsearch.plugin'
apply plugin: 'elasticsearch.esplugin'
esplugin {
name 'x-pack'
description 'Elasticsearch Expanded Pack Plugin'
classname 'org.elasticsearch.xpack.XPackPlugin'
hasNativeController true
licenseFile project(':x-pack-elasticsearch').file('LICENSE.txt')
noticeFile project(':x-pack-elasticsearch').file('NOTICE.txt')
}
archivesBaseName = 'x-pack' // for api jar
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath group: 'com.bettercloud', name: 'vault-java-driver', version:"1.1.0"
classpath 'com.amazonaws:aws-java-sdk-s3:1.10.33'
}
}
Closure setAwsCreds = {
/**
* The Elastic Secrets vault is served via HTTPS with a Let's Encrypt certificate. The root certificates that cross-signed the Let's
* Encrypt certificates were not trusted by the JDK until 8u101. Therefore, we enforce that the JDK is at least 8u101 here.
*/
final String javaVersion = System.getProperty('java.version')
final String javaVendor = System.getProperty('java.vendor')
def matcher = javaVersion =~ /1\.8\.0(?:_(\d+))?/
boolean matches = matcher.matches()
assert matches
final int update
if (matcher.group(1) == null) {
update = 0
} else {
update = matcher.group(1).toInteger()
}
if (update < 101) {
throw new GradleException("JDK ${javaVendor} ${javaVersion} does not have necessary root certificates " +
"(https://bugs.openjdk.java.net/browse/JDK-8154757), update your JDK to at least JDK 8u101+")
}
// get an authentication token with vault
String homePath = System.properties['user.home']
File githubToken = file("${homePath}/.elastic/github.token")
final String VAULT_URL = 'https://secrets.elastic.co:8200'
final String VAULT_ROLE_ID = "8e90dd88-5a8e-9c12-0da9-5439f293ff97"
final String VAULT_SECRET_ID = System.env.VAULT_SECRET_ID
String authBody = null
URL vaultUrl = null
if (githubToken.exists()) {
try {
Set<PosixFilePermission> perms = Files.getPosixFilePermissions(githubToken.toPath())
if (perms.equals(PosixFilePermissions.fromString("rw-------")) == false) {
throw new GradleException('github.token must have 600 permissions')
}
} catch (UnsupportedOperationException e) {
// Assume this isn't a POSIX file system
}
vaultUrl = new URL(VAULT_URL + '/v1/auth/github/login')
authBody = "{\"token\": \"${githubToken.getText('UTF-8').trim()}\"}"
} else if (VAULT_SECRET_ID != null) {
vaultUrl = new URL(VAULT_URL + '/v1/auth/approle/login')
authBody = "{\"role_id\": \"${VAULT_ROLE_ID}\", \"secret_id\": \"${VAULT_SECRET_ID}\"}"
} else {
throw new GradleException('Missing ~/.elastic/github.token file or VAULT_SECRET_ID environment variable, needed to authenticate with vault for secrets')
}
HttpURLConnection vaultConn = (HttpURLConnection) vaultUrl.openConnection()
vaultConn.setRequestProperty('Content-Type', 'application/json')
vaultConn.setRequestMethod('PUT')
vaultConn.setDoOutput(true)
vaultConn.outputStream.withWriter('UTF-8') { writer ->
writer.write(authBody)
}
vaultConn.connect()
Object authResponse = new groovy.json.JsonSlurper().parseText(vaultConn.content.text)
VaultConfig config = new VaultConfig(VAULT_URL, authResponse.auth.client_token)
Vault vault = new Vault(config)
LogicalResponse secret = vault.logical().read("aws-dev/creds/prelertartifacts")
project.ext.mlAwsAccessKey = secret.data.get('access_key')
project.ext.mlAwsSecretKey = secret.data.get('secret_key')
// Retry for up to 1 minute to give AWS a chance to propagate the credentials
int retries = 120
while (retries > 0) {
AWSCredentials creds = new BasicAWSCredentials(project.mlAwsAccessKey, project.mlAwsSecretKey)
ClientConfiguration clientConfiguration = new ClientConfiguration()
// the response metadata cache is only there for diagnostics purposes,
// but can force objects from every response to the old generation.
clientConfiguration.setResponseMetadataCacheSize(0)
AmazonS3Client client = new AmazonS3Client(creds, clientConfiguration)
try {
client.headBucket(new HeadBucketRequest('prelert-artifacts'))
break;
} catch (AmazonServiceException e) {
if (e.getStatusCode() != 403 || retries == 0) {
throw new GradleException('Could not access ml-cpp artifacts. Timed out after 60 attempts', e)
}
}
sleep(500)
retries--
}
}
gradle.taskGraph.whenReady { taskGraph ->
// Vault auth to get keys for access to cpp artifacts
if (taskGraph.hasTask(bundlePlugin)) {
if (project.hasProperty("mlAwsAccessKey") == false && project.hasProperty("mlAwsSecretKey") == false) {
if (project.gradle.startParameter.isOffline()) {
// if the project is offline, then we shouldn't try to contact AWS and instead can just
// use a cached, but possibly expired artifact so that the build works...
project.ext.mlAwsAccessKey = ""
project.ext.mlAwsSecretKey = ""
} else {
setAwsCreds()
}
}
repositories {
maven {
url "s3://prelert-artifacts/maven"
credentials(AwsCredentials) {
accessKey "${project.mlAwsAccessKey}"
secretKey "${project.mlAwsSecretKey}"
}
}
}
// For some unknown reason, this from statement triggers
// a resolve of the nativeBundle configuration. It should
// be delayed until the bundlePlugin task is executed, but
// it is not. So for now, we add the extra files to bundlePlugin
// here right after configuring the prelert maven, which
// will only happen when bundlePlugin will be run anyways.
project.bundlePlugin.from {
zipTree(configurations.nativeBundle.singleFile)
}
project.extractNativeLicenses.from {
zipTree(configurations.nativeBundle.singleFile)
}
}
}
// TODO: fix this! https://github.com/elastic/x-plugins/issues/1066
ext.compactProfile = 'full'
dependencyLicenses {
mapping from: /netty-.*/, to: 'netty'
mapping from: /bc.*/, to: 'bouncycastle'
mapping from: /owasp-java-html-sanitizer.*/, to: 'owasp-java-html-sanitizer'
mapping from: /transport-netty.*/, to: 'elasticsearch'
mapping from: /rest.*/, to: 'elasticsearch'
mapping from: /http.*/, to: 'httpclient' // pulled in by rest client
mapping from: /commons-.*/, to: 'commons' // pulled in by rest client
mapping from: /sniffer.*/, to: 'elasticsearch'
ignoreSha 'rest'
ignoreSha 'transport-netty4'
ignoreSha 'sniffer'
}
licenseHeaders {
approvedLicenses << 'BCrypt (BSD-like)'
additionalLicense 'BCRYP', 'BCrypt (BSD-like)', 'Copyright (c) 2006 Damien Miller <djm@mindrot.org>'
}
configurations {
nativeBundle {
// Check for new native code on every build, otherwise builds can
// fail for 24 hours after a change to the C++ output format
resolutionStrategy.cacheChangingModulesFor 0, 'seconds'
}
}
if (findProject(':machine-learning-cpp') != null) {
configurations.nativeBundle {
resolutionStrategy.dependencySubstitution {
substitute module("org.elasticsearch.ml:ml-cpp") with project(":machine-learning-cpp")
}
}
bundlePlugin.dependsOn ':machine-learning-cpp:buildUberZip'
}
dependencies {
// security deps
compile project(path: ':modules:transport-netty4', configuration: 'runtime')
compile 'com.unboundid:unboundid-ldapsdk:3.2.0'
compile 'org.bouncycastle:bcprov-jdk15on:1.55'
compile 'org.bouncycastle:bcpkix-jdk15on:1.55'
testCompile 'com.google.jimfs:jimfs:1.1'
// watcher deps
compile 'com.googlecode.owasp-java-html-sanitizer:owasp-java-html-sanitizer:r239'
compile 'com.google.guava:guava:16.0.1' // needed by watcher for the html sanitizer and security tests for jimfs
compile 'com.sun.mail:javax.mail:1.5.6'
// HACK: java 9 removed javax.activation from the default modules, so instead of trying to add modules, which would have
// to be conditionalized for java 8/9, we pull in the classes directly
compile 'javax.activation:activation:1.1.1'
testCompile 'org.subethamail:subethasmtp:3.1.7'
// needed for subethasmtp, has @GuardedBy annotation
testCompile 'com.google.code.findbugs:jsr305:3.0.1'
// monitoring deps
compile "org.elasticsearch.client:rest:${version}"
compile "org.elasticsearch.client:sniffer:${version}"
// ml deps
compile 'net.sf.supercsv:super-csv:2.4.0'
nativeBundle ("org.elasticsearch.ml:ml-cpp:${project.version}@zip") {
changing = true
}
testCompile 'org.ini4j:ini4j:0.5.2'
// common test deps
testCompile 'org.elasticsearch:securemock:1.2'
testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}"
testCompile 'org.slf4j:slf4j-log4j12:1.6.2'
testCompile 'org.slf4j:slf4j-api:1.6.2'
}
// make LicenseSigner available for testing signed licenses
sourceSets.test.java {
srcDir '../license-tools/src/main/java'
}
compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
ext.expansions = [
'project.version': version,
]
processResources {
from(sourceSets.main.resources.srcDirs) {
exclude '**/public.key'
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
}
boolean snapshot = "true".equals(System.getProperty("build.snapshot", "true"))
if (snapshot) {
from 'keys/dev/public.key'
} else {
from 'keys/prod/public.key'
}
}
forbiddenPatterns {
exclude '**/*.key'
exclude '**/*.p12'
exclude '**/*.der'
exclude '**/*.zip'
}
task extractNativeLicenses(type: Copy) {
into "${buildDir}"
include 'platform/licenses/**'
}
// TODO: standardize packaging config for plugins
bundlePlugin {
from('bin/x-pack') {
into 'bin'
}
from('config/x-pack') {
into 'config'
}
// We don't ship the individual nativeBundle licenses - instead
// they get combined into the top level NOTICES file we ship
exclude 'platform/licenses/**'
}
// add api jar for extension authors to compile against
// note this is just the normal x-pack jar for now, with a different name
project.afterEvaluate {
task apiJar {
dependsOn('generatePomFileForApijarPublication', project.jar)
doFirst {
Path jarFile = project.jar.outputs.files.singleFile.toPath()
String apiFileName = jarFile.fileName.toString().replace(project.version, "api-${project.version}")
Files.copy(jarFile, jarFile.resolveSibling(apiFileName), StandardCopyOption.REPLACE_EXISTING)
String pomFileName = jarFile.fileName.toString().replace('.jar', '.pom')
String apiPomFileName = apiFileName.replace('.jar', '.pom')
Files.copy(jarFile.resolveSibling(pomFileName), jarFile.resolveSibling(apiPomFileName),
StandardCopyOption.REPLACE_EXISTING)
}
}
assemble.dependsOn(apiJar)
project.publishing {
publications {
apijar(MavenPublication) {
from project.components.java
artifactId = 'x-pack-api'
pom.withXml { XmlProvider xml ->
Node root = xml.asNode()
root.appendNode('name', project.pluginProperties.extension.name)
root.appendNode('description', project.pluginProperties.extension.description)
}
}
}
}
// Add an extra licenses directory to the combined notices
project.tasks.findByName('generateNotice').dependsOn extractNativeLicenses
project.tasks.findByName('generateNotice').licensesDir new File("${project.buildDir}/platform/licenses")
}
integTestRunner {
// TODO: fix this rest test to not depend on a hardcoded port!
systemProperty 'tests.rest.blacklist', 'getting_started/10_monitor_cluster_health/*'
}
integTestCluster {
setting 'xpack.ml.enabled', 'true'
setting 'logger.org.elasticsearch.xpack.ml.datafeed', 'TRACE'
// Integration tests are supposed to enable/disable exporters before/after each test
setting 'xpack.monitoring.exporters._local.type', 'local'
setting 'xpack.monitoring.exporters._local.enabled', 'false'
setting 'xpack.monitoring.collection.interval', '-1'
waitCondition = { NodeInfo node, AntBuilder ant ->
File tmpFile = new File(node.cwd, 'wait.success')
for (int i = 0; i < 10; i++) {
// we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned
HttpURLConnection httpURLConnection = null;
try {
httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=${numNodes}&wait_for_status=yellow").openConnection();
httpURLConnection.setRequestProperty("Authorization", "Basic " +
Base64.getEncoder().encodeToString("elastic:changeme".getBytes(StandardCharsets.UTF_8)));
httpURLConnection.setRequestMethod("GET");
httpURLConnection.connect();
if (httpURLConnection.getResponseCode() == 200) {
tmpFile.withWriter StandardCharsets.UTF_8.name(), {
it.write(httpURLConnection.getInputStream().getText(StandardCharsets.UTF_8.name()))
}
}
} catch (Exception e) {
if (i == 9) {
logger.error("final attempt of calling cluster health failed", e)
} else {
logger.debug("failed to call cluster health", e)
}
} finally {
if (httpURLConnection != null) {
httpURLConnection.disconnect();
}
}
// did not start, so wait a bit before trying again
Thread.sleep(500L);
}
return tmpFile.exists()
}
}
test {
/*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty.
*/
systemProperty 'es.set.netty.runtime.available.processors', 'false'
}
integTestRunner {
/*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty.
*/
systemProperty 'es.set.netty.runtime.available.processors', 'false'
}
// TODO: don't publish test artifacts just to run messy tests, fix the tests!
// https://github.com/elastic/x-plugins/issues/724
configurations {
testArtifacts.extendsFrom testRuntime
}
task testJar(type: Jar) {
classifier "test"
from sourceSets.test.output
}
artifacts {
// normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions
archives jar
testArtifacts testJar
}
// classes are missing, e.g. com.ibm.icu.lang.UCharacter
thirdPartyAudit.excludes = [
// uses internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
// pulled in as external dependency to work on java 9
'com.sun.activation.registries.LineTokenizer',
'com.sun.activation.registries.LogSupport',
'com.sun.activation.registries.MailcapFile',
'com.sun.activation.registries.MailcapParseException',
'com.sun.activation.registries.MailcapTokenizer',
'com.sun.activation.registries.MimeTypeEntry',
'com.sun.activation.registries.MimeTypeFile',
'javax.activation.ActivationDataFlavor',
'javax.activation.CommandInfo',
'javax.activation.CommandMap',
'javax.activation.CommandObject',
'javax.activation.DataContentHandler',
'javax.activation.DataContentHandlerFactory',
'javax.activation.DataHandler$1',
'javax.activation.DataHandler',
'javax.activation.DataHandlerDataSource',
'javax.activation.DataSource',
'javax.activation.DataSourceDataContentHandler',
'javax.activation.FileDataSource',
'javax.activation.FileTypeMap',
'javax.activation.MailcapCommandMap',
'javax.activation.MimeType',
'javax.activation.MimeTypeParameterList',
'javax.activation.MimeTypeParseException',
'javax.activation.MimetypesFileTypeMap',
'javax.activation.ObjectDataContentHandler',
'javax.activation.SecuritySupport$1',
'javax.activation.SecuritySupport$2',
'javax.activation.SecuritySupport$3',
'javax.activation.SecuritySupport$4',
'javax.activation.SecuritySupport$5',
'javax.activation.SecuritySupport',
'javax.activation.URLDataSource',
'javax.activation.UnsupportedDataTypeException'
]
run {
setting 'xpack.ml.enabled', 'true'
setting 'xpack.graph.enabled', 'true'
setting 'xpack.security.enabled', 'true'
setting 'xpack.monitoring.enabled', 'true'
setting 'xpack.watcher.enabled', 'true'
}