Gets build to use elasticsearch-extras (elastic/elasticsearch#758)
* Gets build to use elasticsearch-extras Also adds ci script for building repo on CI servers To use this change you need to: 1. Clone elasticsearch: `git@github.com:elastic/elasticsearch.git` 2. create a directory at the same level as elasticsearch called `elasticsearch-extra` 3. Clone this repository into the `elasticsearch-extra` directory 4. Run `gradle build` from the `elasticsearch-extra/prelert-legacy` directory or run `gradle :prelert-legacy:build` from the `elasticsearch directory * Adds USE_SSH option to ci script * iter Original commit: elastic/x-pack-elasticsearch@ea127dfef0
This commit is contained in:
parent
bddfac59ed
commit
4c6989212a
195
build.gradle
195
build.gradle
|
@ -3,7 +3,10 @@ description = 'Builds the Machine Learning Java classes and UI'
|
|||
import org.gradle.plugins.ide.eclipse.model.SourceFolder
|
||||
import org.elasticsearch.gradle.precommit.LicenseHeadersTask
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
|
||||
if (project.projectDir.name != 'prelert-legacy') {
|
||||
throw new GradleException('You must checkout prelert-legacy in the following directory: <path to Elasticsearch checkout>/../elasticsearch-extra/prelert-legacy')
|
||||
}
|
||||
|
||||
String mlAwsAccessKey = System.env.PRELERT_AWS_ACCESS_KEY_ID
|
||||
if (mlAwsAccessKey == null && project.hasProperty("PRELERT_AWS_ACCESS_KEY_ID")) {
|
||||
|
@ -26,40 +29,6 @@ if (envCppLocalDists != null) {
|
|||
|
||||
allprojects {
|
||||
group = 'org.elasticsearch.ml'
|
||||
version = VersionProperties.elasticsearch
|
||||
}
|
||||
|
||||
configurations.all {
|
||||
// check for updates every build
|
||||
resolutionStrategy.cacheChangingModulesFor 0, 'seconds'
|
||||
}
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
if (System.getProperty("repos.mavenlocal") != null) {
|
||||
// with -Drepos.mavenlocal=true we can force checking the local .m2 repo which is useful for building against
|
||||
// elasticsearch snapshots
|
||||
mavenLocal()
|
||||
}
|
||||
mavenCentral()
|
||||
maven {
|
||||
name 'sonatype-snapshots'
|
||||
url "https://oss.sonatype.org/content/repositories/snapshots/"
|
||||
}
|
||||
jcenter()
|
||||
}
|
||||
dependencies {
|
||||
classpath group: 'org.elasticsearch.gradle', name: 'build-tools', version: "${elasticsearchVersion}", changing: true
|
||||
}
|
||||
}
|
||||
|
||||
subprojects {
|
||||
// we must not publish to sonatype until we have set up x-plugins to only publish the parts we want to publish!
|
||||
project.afterEvaluate {
|
||||
if (project.plugins.hasPlugin('com.bmuschko.nexus') && project.nexus.repositoryUrl.startsWith('file://') == false) {
|
||||
uploadArchives.enabled = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task bundlePack(type: Zip) {
|
||||
|
@ -70,7 +39,31 @@ task bundlePack(type: Zip) {
|
|||
from { zipTree(project('kibana').bundlePlugin.outputs.files.singleFile) }
|
||||
destinationDir file('build/distributions')
|
||||
baseName = 'ml'
|
||||
version = project.version
|
||||
version = VersionProperties.elasticsearch
|
||||
}
|
||||
|
||||
subprojects {
|
||||
plugins.withType(MavenPublishPlugin).whenPluginAdded {
|
||||
publishing {
|
||||
publications {
|
||||
// add license information to generated poms
|
||||
all {
|
||||
pom.withXml { XmlProvider xml ->
|
||||
Node node = xml.asNode()
|
||||
|
||||
Node license = node.appendNode('licenses').appendNode('license')
|
||||
license.appendNode('name', 'Elastic Commercial Software End User License Agreement')
|
||||
license.appendNode('url', 'https://www.elastic.co/eula/')
|
||||
license.appendNode('distribution', 'repo')
|
||||
|
||||
Node developer = node.appendNode('developers').appendNode('developer')
|
||||
developer.appendNode('name', 'Elastic')
|
||||
developer.appendNode('url', 'http://www.elastic.co')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task assemble(dependsOn: bundlePack) {
|
||||
|
@ -83,11 +76,6 @@ task test(dependsOn: [':elasticsearch:test', ':kibana:test']) {
|
|||
description = 'Assembles and tests this project.'
|
||||
}
|
||||
|
||||
task build(dependsOn: [assemble, test]) {
|
||||
group = 'Build'
|
||||
description = 'Assembles and tests this project.'
|
||||
}
|
||||
|
||||
task clean(type: Delete) {
|
||||
group = 'Build'
|
||||
description = 'Deletes the build directory'
|
||||
|
@ -95,26 +83,16 @@ task clean(type: Delete) {
|
|||
}
|
||||
|
||||
subprojects {
|
||||
apply plugin: 'eclipse'
|
||||
apply plugin: 'idea'
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
if (System.getProperty("repos.mavenlocal") != null) {
|
||||
// with -Drepos.mavenlocal=true we can force checking the local .m2 repo which is useful for building against
|
||||
// elasticsearch snapshots
|
||||
mavenLocal()
|
||||
}
|
||||
mavenCentral()
|
||||
maven {
|
||||
name 'sonatype-snapshots'
|
||||
url "https://oss.sonatype.org/content/repositories/snapshots/"
|
||||
}
|
||||
jcenter()
|
||||
}
|
||||
}
|
||||
tasks.withType(LicenseHeadersTask.class) {
|
||||
approvedLicenses = ['Elasticsearch Confidential']
|
||||
additionalLicense 'ESCON', 'Elasticsearch Confidential', 'ELASTICSEARCH CONFIDENTIAL'
|
||||
}
|
||||
ext.projectSubstitutions += [ "org.elasticsearch.plugin:ml-api:${version}": ':prelert-legacy:elasticsearch' ]
|
||||
}
|
||||
|
||||
repositories {
|
||||
allprojects {
|
||||
repositories {
|
||||
if (System.getProperty("repos.mavenlocal") != null) {
|
||||
// with -Drepos.mavenlocal=true we can force checking the local .m2 repo which is useful for building against
|
||||
// elasticsearch snapshots
|
||||
|
@ -134,103 +112,4 @@ subprojects {
|
|||
}
|
||||
jcenter()
|
||||
}
|
||||
|
||||
tasks.withType(LicenseHeadersTask.class) {
|
||||
approvedLicenses = ['Elasticsearch Confidential']
|
||||
additionalLicense 'ESCON', 'Elasticsearch Confidential', 'ELASTICSEARCH CONFIDENTIAL'
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
// injecting groovy property variables into all projects
|
||||
project.ext {
|
||||
// for ide hacks...
|
||||
isEclipse = System.getProperty("eclipse.launcher") != null || gradle.startParameter.taskNames.contains('eclipse') || gradle.startParameter.taskNames.contains('cleanEclipse')
|
||||
isIdea = System.getProperty("idea.active") != null || gradle.startParameter.taskNames.contains('idea') || gradle.startParameter.taskNames.contains('cleanIdea')
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
apply plugin: 'idea'
|
||||
|
||||
if (isIdea) {
|
||||
project.buildDir = file('build-idea')
|
||||
}
|
||||
idea {
|
||||
module {
|
||||
inheritOutputDirs = false
|
||||
outputDir = file('build-idea/classes/main')
|
||||
testOutputDir = file('build-idea/classes/test')
|
||||
|
||||
// also ignore other possible build dirs
|
||||
excludeDirs += file('build')
|
||||
excludeDirs += file('build-eclipse')
|
||||
|
||||
iml {
|
||||
// fix so that Gradle idea plugin properly generates support for resource folders
|
||||
// see also https://issues.gradle.org/browse/GRADLE-2975
|
||||
withXml {
|
||||
it.asNode().component.content.sourceFolder.findAll { it.@url == 'file://$MODULE_DIR$/src/main/resources' }.each {
|
||||
it.attributes().remove('isTestSource')
|
||||
it.attributes().put('type', 'java-resource')
|
||||
}
|
||||
it.asNode().component.content.sourceFolder.findAll { it.@url == 'file://$MODULE_DIR$/src/test/resources' }.each {
|
||||
it.attributes().remove('isTestSource')
|
||||
it.attributes().put('type', 'java-test-resource')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure gradle idea was run before running anything in intellij (including import).
|
||||
File ideaMarker = new File(projectDir, '.local-idea-is-configured')
|
||||
tasks.idea.doLast {
|
||||
ideaMarker.setText('', 'UTF-8')
|
||||
}
|
||||
if (System.getProperty('idea.active') != null && ideaMarker.exists() == false) {
|
||||
throw new GradleException('You must run gradle idea from the root of elasticsearch before importing into IntelliJ')
|
||||
}
|
||||
|
||||
// eclipse configuration
|
||||
allprojects {
|
||||
apply plugin: 'eclipse'
|
||||
// Name all the non-root projects after their path so that paths get grouped together when imported into eclipse.
|
||||
if (path != ':') {
|
||||
eclipse.project.name = path
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
eclipse.project.name = eclipse.project.name.replace(':', '_')
|
||||
}
|
||||
}
|
||||
|
||||
plugins.withType(JavaBasePlugin) {
|
||||
File eclipseBuild = project.file('build-eclipse')
|
||||
eclipse.classpath.defaultOutputDir = eclipseBuild
|
||||
if (isEclipse) {
|
||||
// set this so generated dirs will be relative to eclipse build
|
||||
project.buildDir = eclipseBuild
|
||||
}
|
||||
eclipse.classpath.file.whenMerged { classpath ->
|
||||
// give each source folder a unique corresponding output folder
|
||||
int i = 0;
|
||||
classpath.entries.findAll { it instanceof SourceFolder }.each { folder ->
|
||||
i++;
|
||||
// this is *NOT* a path or a file.
|
||||
folder.output = "build-eclipse/" + i
|
||||
}
|
||||
}
|
||||
}
|
||||
task copyEclipseSettings(type: Copy) {
|
||||
// TODO: "package this up" for external builds
|
||||
from new File(project.rootDir, 'buildSrc/src/main/resources/eclipse.settings')
|
||||
into '.settings'
|
||||
}
|
||||
// otherwise .settings is not nuked entirely
|
||||
task wipeEclipseSettings(type: Delete) {
|
||||
delete '.settings'
|
||||
}
|
||||
tasks.cleanEclipse.dependsOn(wipeEclipseSettings)
|
||||
// otherwise the eclipse merging is *super confusing*
|
||||
tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
File extrasDir = new File(settingsDir, '../..').getCanonicalFile()
|
||||
if (extrasDir.name.endsWith('-extra') == false) {
|
||||
throw new GradleException("prelert-legacy must be checked out under an elasticsearch-extra directory, found ${extrasDir.name}")
|
||||
}
|
||||
File elasticsearchDir = new File(extrasDir.parentFile, extrasDir.name[0..-7])
|
||||
if (elasticsearchDir.exists() == false) {
|
||||
throw new GradleException("${elasticsearchDir.name} is missing as a sibling to ${extrasDir.name}")
|
||||
}
|
||||
|
||||
project(':').projectDir = new File(elasticsearchDir, 'buildSrc')
|
|
@ -0,0 +1,151 @@
|
|||
#!/bin/bash
|
||||
# This script is used as a single command to run the ml tests.
|
||||
#
|
||||
# It will attempt to check out 'elasticsearch' into a sibling directory
|
||||
# unless the environment variable `USE_EXISTING_ES` has a value. The
|
||||
# branch of elasticsearch which will be checked out depends on
|
||||
# environment variables. If running locally, set GIT_BRANCH. When
|
||||
# running in Jenkins, that env var is set. When running a PR
|
||||
# jenkins job, the variables PR_SOURCE_BRANCH and PR_TARGET_BRANCH
|
||||
# will be set and the source branch will be looked for in elasticsearch
|
||||
# before falling back to the target branch name.
|
||||
#
|
||||
# It will also attempt to install the appropriate version of node.js
|
||||
# for the Kibana plugin tests using nvm, unless
|
||||
# `xpack.kibana.build=false` is defined in
|
||||
# ~/.gradle/gradle.properties. Set a custom nvm directory using the
|
||||
# `NVM_DIR` environment variable.
|
||||
#
|
||||
|
||||
# Turn on semi-strict mode
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
# Allow the user choose different test through a single cli arg
|
||||
# default to `check` if no argument has been supplied
|
||||
key=${1-check}
|
||||
case $key in
|
||||
packagingTest)
|
||||
GRADLE_CLI_ARGS=(
|
||||
"--info"
|
||||
"-Pvagrant.boxes=all"
|
||||
":prelert-legacy:qa:vagrant:packagingTest"
|
||||
)
|
||||
;;
|
||||
check)
|
||||
GRADLE_CLI_ARGS=(
|
||||
"--info"
|
||||
"check"
|
||||
"-Dtests.network=true"
|
||||
"-Dtests.badapples=true"
|
||||
)
|
||||
;;
|
||||
jdk9)
|
||||
GRADLE_CLI_ARGS=(
|
||||
"-Pxpack.kibana.build=false"
|
||||
"--info"
|
||||
"check"
|
||||
"-Dtests.network=true"
|
||||
"-Dtests.badapples=true"
|
||||
-Dtests.jvm.argline="--add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.nio.file=ALL-UNNAMED --add-opens=java.base/java.security.cert=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/javax.net.ssl=ALL-UNNAMED"
|
||||
)
|
||||
;;
|
||||
*)
|
||||
echo "Unsupported cli argument $1. Allowed arguments are packagingTest or check. No argument defaults to check."
|
||||
exit 1;;
|
||||
esac
|
||||
|
||||
SCRIPT="$0"
|
||||
|
||||
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
|
||||
while [ -h "$SCRIPT" ] ; do
|
||||
ls=$(ls -ld "$SCRIPT")
|
||||
# Drop everything prior to ->
|
||||
link=$(expr "$ls" : '.*-> \(.*\)$')
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
SCRIPT="$link"
|
||||
else
|
||||
SCRIPT=$(dirname "$SCRIPT")/"$link"
|
||||
fi
|
||||
done
|
||||
|
||||
# determine base directory
|
||||
BASE_DIR=$(dirname "$SCRIPT")/..
|
||||
|
||||
# make BASE_DIR absolute
|
||||
BASE_DIR=$(cd "$BASE_DIR"; pwd)
|
||||
|
||||
PARENT_DIR=$(cd "$BASE_DIR"/../..; pwd)
|
||||
|
||||
# go to the parent directory
|
||||
cd $PARENT_DIR
|
||||
|
||||
if [ -z ${USE_EXISTING_ES:+x} ]; then
|
||||
if [ -d "./elasticsearch" ]; then
|
||||
echo "I expected a clean workspace but an 'elasticsearch' sibling directory already exists in [$PARENT_DIR]!"
|
||||
echo
|
||||
echo "Either define 'USE_EXISTING_ES' or remove the existing 'elasticsearch' sibling."
|
||||
exit 1
|
||||
fi
|
||||
BRANCH=${PR_SOURCE_BRANCH:-${GIT_BRANCH#*/}} # GIT_BRANCH starts with the repo, i.e., origin/master
|
||||
BRANCH=${BRANCH:-master} # fall back to CI branch if not testing a PR
|
||||
echo "Checking if branch '$BRANCH' has elasticsearch sibling..."
|
||||
ES_REPO_URL="https://github.com/elastic/elasticsearch.git"
|
||||
if [ -z ${USE_SSH:+x} ]; then
|
||||
ES_REPO_URL="git@github.com:elastic/elasticsearch.git"
|
||||
fi
|
||||
if [[ -z "$(git ls-remote --heads $ES_REPO_URL $BRANCH)" ]]; then
|
||||
echo "No sibling branch, using PR target branch"
|
||||
BRANCH=$PR_TARGET_BRANCH
|
||||
fi
|
||||
echo "Checking out Elasticsearch '$BRANCH' branch..."
|
||||
git clone -b $BRANCH $ES_REPO_URL --depth=1
|
||||
else
|
||||
if [ -d "./elasticsearch" ]; then
|
||||
echo "Using existing 'elasticsearch' checkout"
|
||||
else
|
||||
echo "You have defined 'USE_EXISTING_ES' but no existing Elasticsearch directory exists!"
|
||||
exit 2
|
||||
fi
|
||||
fi
|
||||
|
||||
ES_COMMIT="$(cd "$PARENT_DIR/elasticsearch"; git show --oneline -s | cut -d\ -f1)"
|
||||
ML_COMMIT="$(cd "$BASE_DIR"; git show --oneline -s | cut -d\ -f1)"
|
||||
|
||||
# back to base directory
|
||||
cd "$BASE_DIR"
|
||||
|
||||
if ! grep -q -e '^xpack\.kibana\.build=false$' ~/.gradle/gradle.properties
|
||||
then
|
||||
# install the correct node.js version
|
||||
if [ -z ${NVM_DIR:+x} ]; then
|
||||
export NVM_DIR="/var/lib/jenkins/.nvm";
|
||||
fi
|
||||
|
||||
NVM_SCRIPT="$NVM_DIR/nvm.sh"
|
||||
if [ -s "$NVM_SCRIPT" ]; then
|
||||
. "$NVM_SCRIPT" # load nvm
|
||||
else
|
||||
echo "Unable to find the nvm script at \"$NVM_SCRIPT\""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Installing node.js version $(cat ./kibana/.node-version)..."
|
||||
nvm install "$(cat ./kibana/.node-version)"
|
||||
fi
|
||||
|
||||
echo "Running ML tests..."
|
||||
echo "Running in $PWD"
|
||||
echo "Elasticsearch commit: $ES_COMMIT"
|
||||
echo "Machine Learning commit: $ML_COMMIT"
|
||||
|
||||
# output the commands
|
||||
set -xuf
|
||||
|
||||
# clean
|
||||
gradle --stacktrace clean
|
||||
|
||||
# Actually run the tests
|
||||
gradle "${GRADLE_CLI_ARGS[@]}"
|
||||
|
||||
# ~*~ shell-script-mode ~*~
|
|
@ -8,10 +8,6 @@ esplugin {
|
|||
classname 'org.elasticsearch.xpack.ml.MlPlugin'
|
||||
}
|
||||
|
||||
version = project.version
|
||||
|
||||
thirdPartyAudit.enabled = false
|
||||
|
||||
configurations {
|
||||
nativeBundle
|
||||
}
|
||||
|
|
|
@ -2,13 +2,13 @@ apply plugin: 'elasticsearch.standalone-rest-test'
|
|||
apply plugin: 'elasticsearch.rest-test'
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: ':elasticsearch', configuration: 'runtime')
|
||||
testCompile project(path: ':prelert-legacy:elasticsearch', configuration: 'runtime')
|
||||
}
|
||||
|
||||
integTest {
|
||||
cluster {
|
||||
numNodes = 3
|
||||
distribution = 'zip'
|
||||
plugin ':elasticsearch'
|
||||
plugin ':prelert-legacy:elasticsearch'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
rootProject.name = 'ml'
|
||||
include ':elasticsearch'
|
||||
include ':docs'
|
||||
include ':kibana'
|
||||
include ':qa:basic-multi-node'
|
||||
File extrasDir = new File(settingsDir, '..').getCanonicalFile()
|
||||
if (extrasDir.name.endsWith('-extra') == false) {
|
||||
throw new GradleException("prelert-legacy must be checked out under an elasticsearch-extra directory, found ${extrasDir.name}")
|
||||
}
|
||||
File elasticsearchDir = new File(extrasDir.parentFile, extrasDir.name[0..-7])
|
||||
project(':').projectDir = elasticsearchDir
|
||||
apply from: "${elasticsearchDir}/settings.gradle"
|
||||
|
|
Loading…
Reference in New Issue