2016-11-29 07:44:56 -05:00
|
|
|
#!/bin/bash
|
2016-12-14 18:02:28 -05:00
|
|
|
# This script is used as a single command to run the x-pack tests.
|
2016-01-18 18:20:33 -05:00
|
|
|
#
|
2016-04-07 08:34:58 -04:00
|
|
|
# It will attempt to check out 'elasticsearch' into a sibling directory
|
2016-12-05 19:10:33 -05:00
|
|
|
# unless the environment variable `USE_EXISTING_ES` has a value. The
|
|
|
|
# branch of elasticsearch which will be checked out depends on
|
|
|
|
# environment variables. If running locally, set GIT_BRANCH. When
|
|
|
|
# running in Jenkins, that env var is set. When running a PR
|
|
|
|
# jenkins job, the variables PR_SOURCE_BRANCH and PR_TARGET_BRANCH
|
|
|
|
# will be set and the source branch will be looked for in elasticsearch
|
|
|
|
# before falling back to the target branch name.
|
2016-01-18 18:20:33 -05:00
|
|
|
#
|
2016-09-12 14:32:12 -04:00
|
|
|
# It will also attempt to install the appropriate version of node.js
|
2016-09-13 08:34:22 -04:00
|
|
|
# for the Kibana plugin tests using nvm, unless
|
|
|
|
# `xpack.kibana.build=false` is defined in
|
|
|
|
# ~/.gradle/gradle.properties. Set a custom nvm directory using the
|
|
|
|
# `NVM_DIR` environment variable.
|
2016-09-12 14:32:12 -04:00
|
|
|
#
|
2016-01-18 18:20:33 -05:00
|
|
|
|
2016-11-23 06:44:15 -05:00
|
|
|
# Turn on semi-strict mode
|
|
|
|
set -e
|
2016-11-29 07:44:56 -05:00
|
|
|
set -o pipefail
|
2016-11-21 09:36:55 -05:00
|
|
|
|
2016-11-23 06:44:15 -05:00
|
|
|
# Allow the user choose different test through a single cli arg
|
2016-11-28 06:19:48 -05:00
|
|
|
# default to `check` if no argument has been supplied
|
|
|
|
key=${1-check}
|
2016-11-21 09:36:55 -05:00
|
|
|
case $key in
|
2017-09-03 16:43:15 -04:00
|
|
|
intake)
|
|
|
|
GRADLE_CLI_ARGS=(
|
|
|
|
"--info"
|
|
|
|
"compileJava"
|
|
|
|
"compileTestJava"
|
|
|
|
"precommit"
|
|
|
|
"check"
|
|
|
|
"-Dtests.network=true"
|
2017-09-03 18:37:50 -04:00
|
|
|
"-Dtests.badapples=true"
|
2017-09-03 16:43:15 -04:00
|
|
|
)
|
|
|
|
;;
|
2016-11-28 06:19:48 -05:00
|
|
|
packagingTest)
|
2016-12-19 12:53:00 -05:00
|
|
|
GRADLE_CLI_ARGS=(
|
|
|
|
"--info"
|
|
|
|
"-Pvagrant.boxes=all"
|
2017-05-22 11:53:36 -04:00
|
|
|
"packagingTest"
|
|
|
|
)
|
|
|
|
;;
|
|
|
|
bwcTest)
|
|
|
|
GRADLE_CLI_ARGS=(
|
|
|
|
"--info"
|
|
|
|
"bwcTest"
|
2016-12-19 12:53:00 -05:00
|
|
|
)
|
2016-11-29 07:44:56 -05:00
|
|
|
;;
|
2016-11-28 06:19:48 -05:00
|
|
|
check)
|
2016-12-19 12:53:00 -05:00
|
|
|
GRADLE_CLI_ARGS=(
|
|
|
|
"--info"
|
|
|
|
"check"
|
|
|
|
"-Dtests.network=true"
|
|
|
|
"-Dtests.badapples=true"
|
|
|
|
)
|
|
|
|
;;
|
2017-12-13 10:19:31 -05:00
|
|
|
smokeTestSql) # TODO remove this once we are ready to merge sql down
|
|
|
|
GRADLE_CLI_ARGS=(
|
|
|
|
"--info"
|
|
|
|
"-psql"
|
|
|
|
"check"
|
|
|
|
":x-pack-elasticsearch:plugin:precommit"
|
2018-01-18 08:39:02 -05:00
|
|
|
":x-pack-elasticsearch:plugin:sql:check"
|
2018-01-18 09:33:16 -05:00
|
|
|
":x-pack-elasticsearch:plugin:sql:sql-cli:check"
|
|
|
|
":x-pack-elasticsearch:plugin:sql:jdbc:check"
|
2017-12-13 10:19:31 -05:00
|
|
|
":x-pack-elasticsearch:qa:sql:check"
|
|
|
|
":x-pack-elasticsearch:qa:sql:multinode:check"
|
|
|
|
":x-pack-elasticsearch:qa:sql:no-security:check"
|
|
|
|
":x-pack-elasticsearch:qa:sql:security:check"
|
|
|
|
":x-pack-elasticsearch:qa:sql:security:no-ssl:check"
|
|
|
|
":x-pack-elasticsearch:qa:sql:security:ssl:check"
|
|
|
|
)
|
|
|
|
;;
|
2017-09-13 19:01:21 -04:00
|
|
|
releaseTest)
|
|
|
|
GRADLE_CLI_ARGS=(
|
|
|
|
"--info"
|
|
|
|
"check"
|
|
|
|
"-Dtests.network=true"
|
|
|
|
"-Dtests.badapples=true"
|
|
|
|
"-Dbuild.snapshot=false"
|
|
|
|
"-Dtests.jvm.argline=-Dbuild.snapshot=false"
|
2017-07-27 09:59:57 -04:00
|
|
|
)
|
|
|
|
;;
|
2016-11-28 06:19:48 -05:00
|
|
|
*)
|
2016-11-29 07:44:56 -05:00
|
|
|
echo "Unsupported cli argument $1. Allowed arguments are packagingTest or check. No argument defaults to check."
|
2016-11-28 06:19:48 -05:00
|
|
|
exit 1;;
|
2016-11-21 09:36:55 -05:00
|
|
|
esac
|
2016-01-18 18:20:33 -05:00
|
|
|
|
2016-04-07 08:34:58 -04:00
|
|
|
SCRIPT="$0"
|
2016-01-18 18:20:33 -05:00
|
|
|
|
2016-04-07 08:34:58 -04:00
|
|
|
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
|
|
|
|
while [ -h "$SCRIPT" ] ; do
|
2016-11-29 07:44:56 -05:00
|
|
|
ls=$(ls -ld "$SCRIPT")
|
2016-04-07 08:34:58 -04:00
|
|
|
# Drop everything prior to ->
|
2016-11-29 07:44:56 -05:00
|
|
|
link=$(expr "$ls" : '.*-> \(.*\)$')
|
2016-04-07 08:34:58 -04:00
|
|
|
if expr "$link" : '/.*' > /dev/null; then
|
|
|
|
SCRIPT="$link"
|
|
|
|
else
|
2016-11-29 07:44:56 -05:00
|
|
|
SCRIPT=$(dirname "$SCRIPT")/"$link"
|
2016-04-07 08:34:58 -04:00
|
|
|
fi
|
|
|
|
done
|
2016-01-18 18:20:33 -05:00
|
|
|
|
2016-04-07 08:34:58 -04:00
|
|
|
# determine base directory
|
2016-11-29 07:44:56 -05:00
|
|
|
BASE_DIR=$(dirname "$SCRIPT")/..
|
2016-04-07 08:34:58 -04:00
|
|
|
|
|
|
|
# make BASE_DIR absolute
|
2016-11-29 07:44:56 -05:00
|
|
|
BASE_DIR=$(cd "$BASE_DIR"; pwd)
|
2016-04-07 08:34:58 -04:00
|
|
|
|
2016-12-14 18:02:28 -05:00
|
|
|
PARENT_DIR=$(cd "$BASE_DIR"/../..; pwd)
|
2016-04-07 08:34:58 -04:00
|
|
|
|
|
|
|
# go to the parent directory
|
|
|
|
cd $PARENT_DIR
|
|
|
|
|
|
|
|
if [ -z ${USE_EXISTING_ES:+x} ]; then
|
2016-01-18 18:20:33 -05:00
|
|
|
if [ -d "./elasticsearch" ]; then
|
2016-04-07 08:34:58 -04:00
|
|
|
echo "I expected a clean workspace but an 'elasticsearch' sibling directory already exists in [$PARENT_DIR]!"
|
2016-01-18 18:20:33 -05:00
|
|
|
echo
|
2016-04-07 08:34:58 -04:00
|
|
|
echo "Either define 'USE_EXISTING_ES' or remove the existing 'elasticsearch' sibling."
|
2016-01-18 18:20:33 -05:00
|
|
|
exit 1
|
|
|
|
fi
|
2017-05-25 15:03:40 -04:00
|
|
|
|
|
|
|
function pick_clone_target {
|
|
|
|
echo "picking which branch of elasticsearch to clone"
|
|
|
|
|
|
|
|
# PR_* are provided by the CI git plugin for pull requests
|
|
|
|
if [[ -n "$PR_AUTHOR" && -n "$PR_SOURCE_BRANCH" ]]; then
|
|
|
|
GH_USER="$PR_AUTHOR"
|
|
|
|
BRANCH="$PR_SOURCE_BRANCH"
|
|
|
|
echo " -> using pull request author $GH_USER and branch $BRANCH"
|
|
|
|
if [[ -n "$(git ls-remote --heads https://github.com/$GH_USER/elasticsearch.git $BRANCH 2>/dev/null)" ]]; then
|
|
|
|
return
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
GH_USER="elastic"
|
|
|
|
# GIT_BRANCH is provided by normal CI runs. It starts with the repo, i.e., origin/master
|
|
|
|
# If we are not in CI, we fall back to the master branch
|
|
|
|
BRANCH="${PR_TARGET_BRANCH:-${GIT_BRANCH#*/}}"
|
|
|
|
BRANCH="${BRANCH:-master}"
|
|
|
|
echo " -> using CI branch $BRANCH from elastic repo"
|
|
|
|
}
|
|
|
|
|
2017-12-13 10:19:31 -05:00
|
|
|
# pick_clone_target NOCOMMIT just use master for testing our feature branch. Do not merge this.....
|
|
|
|
GH_USER="elastic"
|
|
|
|
BRANCH="master"
|
2017-05-25 15:03:40 -04:00
|
|
|
|
2017-08-28 17:03:02 -04:00
|
|
|
DEPTH=1
|
|
|
|
if [ -n "$BUILD_METADATA" ]; then
|
|
|
|
IFS=';' read -ra metadata <<< "$BUILD_METADATA"
|
|
|
|
for kv in "${metadata[@]}"; do
|
|
|
|
IFS='=' read -ra key_value <<< "$kv"
|
|
|
|
if [ "${key_value[0]}" == "git_ref_elasticsearch" ]; then
|
|
|
|
# Force checked out hash if build metadata is set. We use a depth of 100, which
|
|
|
|
# assumes there are no more than 100 commits between head of the branch and
|
|
|
|
# last-good-commit. This is still quite a bit faster than pulling the entire history.
|
|
|
|
ES_REF="${key_value[1]}"
|
|
|
|
DEPTH=100
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
fi
|
|
|
|
|
2017-05-25 15:03:40 -04:00
|
|
|
echo " -> checking out '$BRANCH' branch from $GH_USER/elasticsearch..."
|
2017-08-28 17:03:02 -04:00
|
|
|
git clone -b $BRANCH "https://github.com/$GH_USER/elasticsearch.git" --depth=$DEPTH
|
|
|
|
|
|
|
|
if [ ! -z $ES_REF ]; then
|
|
|
|
echo " -> using elasticsearch ref from build metadata: $ES_REF"
|
|
|
|
git -C elasticsearch checkout $ES_REF
|
|
|
|
else
|
|
|
|
ES_REF="$(git -C elasticsearch rev-parse HEAD)"
|
|
|
|
fi
|
|
|
|
|
|
|
|
echo " -> checked out elasticsearch revision: $ES_REF"
|
2017-05-25 15:03:40 -04:00
|
|
|
echo
|
|
|
|
|
2016-01-18 18:20:33 -05:00
|
|
|
else
|
|
|
|
if [ -d "./elasticsearch" ]; then
|
|
|
|
echo "Using existing 'elasticsearch' checkout"
|
|
|
|
else
|
2016-04-07 08:34:58 -04:00
|
|
|
echo "You have defined 'USE_EXISTING_ES' but no existing Elasticsearch directory exists!"
|
2016-01-18 18:20:33 -05:00
|
|
|
exit 2
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
|
2016-09-12 14:32:12 -04:00
|
|
|
# back to base directory
|
|
|
|
cd "$BASE_DIR"
|
|
|
|
|
2017-02-10 14:02:42 -05:00
|
|
|
echo "Running x-pack-elasticsearch tests..."
|
2016-09-12 14:32:12 -04:00
|
|
|
echo "Running in $PWD"
|
2016-04-07 08:34:58 -04:00
|
|
|
|
|
|
|
# output the commands
|
2016-09-12 14:32:12 -04:00
|
|
|
set -xuf
|
2016-04-07 08:34:58 -04:00
|
|
|
|
|
|
|
# clean
|
2018-01-04 16:42:12 -05:00
|
|
|
./gradlew --stacktrace clean
|
2016-01-18 18:20:33 -05:00
|
|
|
|
2016-04-07 08:34:58 -04:00
|
|
|
# Actually run the tests
|
2018-01-04 16:42:12 -05:00
|
|
|
./gradlew "${GRADLE_CLI_ARGS[@]}"
|
2016-01-18 18:20:33 -05:00
|
|
|
|
2017-08-28 17:03:02 -04:00
|
|
|
# write the ES hash we checked out to build metadata
|
|
|
|
mkdir build
|
|
|
|
echo "git_ref_elasticsearch=$ES_REF" > build/build_metadata
|
|
|
|
|
2016-01-18 18:20:33 -05:00
|
|
|
# ~*~ shell-script-mode ~*~
|