213 lines
6.7 KiB
Bash
Executable File
213 lines
6.7 KiB
Bash
Executable File
#!/bin/bash
|
|
# This script is used as a single command to run the x-pack tests.
|
|
#
|
|
# It will attempt to check out 'elasticsearch' into a sibling directory
|
|
# unless the environment variable `USE_EXISTING_ES` has a value. The
|
|
# branch of elasticsearch which will be checked out depends on
|
|
# environment variables. If running locally, set GIT_BRANCH. When
|
|
# running in Jenkins, that env var is set. When running a PR
|
|
# jenkins job, the variables PR_SOURCE_BRANCH and PR_TARGET_BRANCH
|
|
# will be set and the source branch will be looked for in elasticsearch
|
|
# before falling back to the target branch name.
|
|
#
|
|
# It will also attempt to install the appropriate version of node.js
|
|
# for the Kibana plugin tests using nvm, unless
|
|
# `xpack.kibana.build=false` is defined in
|
|
# ~/.gradle/gradle.properties. Set a custom nvm directory using the
|
|
# `NVM_DIR` environment variable.
|
|
#
|
|
|
|
# Turn on semi-strict mode
|
|
set -e
|
|
set -o pipefail
|
|
|
|
# Allow the user choose different test through a single cli arg
|
|
# default to `check` if no argument has been supplied
|
|
key=${1-check}
|
|
case $key in
|
|
intake)
|
|
GRADLE_CLI_ARGS=(
|
|
"--info"
|
|
"compileJava"
|
|
"compileTestJava"
|
|
"precommit"
|
|
"check"
|
|
"-Dtests.network=true"
|
|
"-Dtests.badapples=true"
|
|
)
|
|
;;
|
|
packagingTest)
|
|
GRADLE_CLI_ARGS=(
|
|
"--info"
|
|
"-Pvagrant.boxes=all"
|
|
"packagingTest"
|
|
)
|
|
;;
|
|
bwcTest)
|
|
GRADLE_CLI_ARGS=(
|
|
"--info"
|
|
"bwcTest"
|
|
)
|
|
;;
|
|
check)
|
|
GRADLE_CLI_ARGS=(
|
|
"--info"
|
|
"check"
|
|
"-Dtests.network=true"
|
|
"-Dtests.badapples=true"
|
|
)
|
|
;;
|
|
smokeTestSql) # TODO remove this once we are ready to merge sql down
|
|
GRADLE_CLI_ARGS=(
|
|
"--info"
|
|
"-psql"
|
|
"check"
|
|
":x-pack-elasticsearch:plugin:precommit"
|
|
":x-pack-elasticsearch:qa:sql:check"
|
|
":x-pack-elasticsearch:qa:sql:multinode:check"
|
|
":x-pack-elasticsearch:qa:sql:no-security:check"
|
|
":x-pack-elasticsearch:qa:sql:security:check"
|
|
":x-pack-elasticsearch:qa:sql:security:no-ssl:check"
|
|
":x-pack-elasticsearch:qa:sql:security:ssl:check"
|
|
)
|
|
;;
|
|
releaseTest)
|
|
GRADLE_CLI_ARGS=(
|
|
"--info"
|
|
"check"
|
|
"-Dtests.network=true"
|
|
"-Dtests.badapples=true"
|
|
"-Dbuild.snapshot=false"
|
|
"-Dtests.jvm.argline=-Dbuild.snapshot=false"
|
|
)
|
|
;;
|
|
jdk9)
|
|
GRADLE_CLI_ARGS=(
|
|
"-Pxpack.kibana.build=false"
|
|
"--info"
|
|
"check"
|
|
"-Dtests.network=true"
|
|
"-Dtests.badapples=true"
|
|
-Dtests.jvm.argline="--add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.nio.file=ALL-UNNAMED --add-opens=java.base/java.security.cert=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/javax.net.ssl=ALL-UNNAMED"
|
|
)
|
|
;;
|
|
*)
|
|
echo "Unsupported cli argument $1. Allowed arguments are packagingTest or check. No argument defaults to check."
|
|
exit 1;;
|
|
esac
|
|
|
|
SCRIPT="$0"
|
|
|
|
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
|
|
while [ -h "$SCRIPT" ] ; do
|
|
ls=$(ls -ld "$SCRIPT")
|
|
# Drop everything prior to ->
|
|
link=$(expr "$ls" : '.*-> \(.*\)$')
|
|
if expr "$link" : '/.*' > /dev/null; then
|
|
SCRIPT="$link"
|
|
else
|
|
SCRIPT=$(dirname "$SCRIPT")/"$link"
|
|
fi
|
|
done
|
|
|
|
# determine base directory
|
|
BASE_DIR=$(dirname "$SCRIPT")/..
|
|
|
|
# make BASE_DIR absolute
|
|
BASE_DIR=$(cd "$BASE_DIR"; pwd)
|
|
|
|
PARENT_DIR=$(cd "$BASE_DIR"/../..; pwd)
|
|
|
|
# go to the parent directory
|
|
cd $PARENT_DIR
|
|
|
|
if [ -z ${USE_EXISTING_ES:+x} ]; then
|
|
if [ -d "./elasticsearch" ]; then
|
|
echo "I expected a clean workspace but an 'elasticsearch' sibling directory already exists in [$PARENT_DIR]!"
|
|
echo
|
|
echo "Either define 'USE_EXISTING_ES' or remove the existing 'elasticsearch' sibling."
|
|
exit 1
|
|
fi
|
|
|
|
function pick_clone_target {
|
|
echo "picking which branch of elasticsearch to clone"
|
|
|
|
# PR_* are provided by the CI git plugin for pull requests
|
|
if [[ -n "$PR_AUTHOR" && -n "$PR_SOURCE_BRANCH" ]]; then
|
|
GH_USER="$PR_AUTHOR"
|
|
BRANCH="$PR_SOURCE_BRANCH"
|
|
echo " -> using pull request author $GH_USER and branch $BRANCH"
|
|
if [[ -n "$(git ls-remote --heads https://github.com/$GH_USER/elasticsearch.git $BRANCH 2>/dev/null)" ]]; then
|
|
return
|
|
fi
|
|
fi
|
|
GH_USER="elastic"
|
|
# GIT_BRANCH is provided by normal CI runs. It starts with the repo, i.e., origin/master
|
|
# If we are not in CI, we fall back to the master branch
|
|
BRANCH="${PR_TARGET_BRANCH:-${GIT_BRANCH#*/}}"
|
|
BRANCH="${BRANCH:-master}"
|
|
echo " -> using CI branch $BRANCH from elastic repo"
|
|
}
|
|
|
|
# pick_clone_target NOCOMMIT just use master for testing our feature branch. Do not merge this.....
|
|
GH_USER="elastic"
|
|
BRANCH="master"
|
|
|
|
DEPTH=1
|
|
if [ -n "$BUILD_METADATA" ]; then
|
|
IFS=';' read -ra metadata <<< "$BUILD_METADATA"
|
|
for kv in "${metadata[@]}"; do
|
|
IFS='=' read -ra key_value <<< "$kv"
|
|
if [ "${key_value[0]}" == "git_ref_elasticsearch" ]; then
|
|
# Force checked out hash if build metadata is set. We use a depth of 100, which
|
|
# assumes there are no more than 100 commits between head of the branch and
|
|
# last-good-commit. This is still quite a bit faster than pulling the entire history.
|
|
ES_REF="${key_value[1]}"
|
|
DEPTH=100
|
|
fi
|
|
done
|
|
fi
|
|
|
|
echo " -> checking out '$BRANCH' branch from $GH_USER/elasticsearch..."
|
|
git clone -b $BRANCH "https://github.com/$GH_USER/elasticsearch.git" --depth=$DEPTH
|
|
|
|
if [ ! -z $ES_REF ]; then
|
|
echo " -> using elasticsearch ref from build metadata: $ES_REF"
|
|
git -C elasticsearch checkout $ES_REF
|
|
else
|
|
ES_REF="$(git -C elasticsearch rev-parse HEAD)"
|
|
fi
|
|
|
|
echo " -> checked out elasticsearch revision: $ES_REF"
|
|
echo
|
|
|
|
else
|
|
if [ -d "./elasticsearch" ]; then
|
|
echo "Using existing 'elasticsearch' checkout"
|
|
else
|
|
echo "You have defined 'USE_EXISTING_ES' but no existing Elasticsearch directory exists!"
|
|
exit 2
|
|
fi
|
|
fi
|
|
|
|
# back to base directory
|
|
cd "$BASE_DIR"
|
|
|
|
echo "Running x-pack-elasticsearch tests..."
|
|
echo "Running in $PWD"
|
|
|
|
# output the commands
|
|
set -xuf
|
|
|
|
# clean
|
|
gradle --stacktrace clean
|
|
|
|
# Actually run the tests
|
|
gradle "${GRADLE_CLI_ARGS[@]}"
|
|
|
|
# write the ES hash we checked out to build metadata
|
|
mkdir build
|
|
echo "git_ref_elasticsearch=$ES_REF" > build/build_metadata
|
|
|
|
# ~*~ shell-script-mode ~*~
|