diff --git a/dev-tools/build_release.py b/dev-tools/build_release.py index 345aaa5d5dd..02620c5ed34 100644 --- a/dev-tools/build_release.py +++ b/dev-tools/build_release.py @@ -57,7 +57,9 @@ Once it's done it will print all the remaining steps. - Python 3k for script execution - Boto for S3 Upload ($ apt-get install python-boto) - RPM for RPM building ($ apt-get install rpm) - - S3 keys exported via ENV Variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) + - S3 keys exported via ENV variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) + - GPG data exported via ENV variables (GPG_KEY_ID, GPG_PASSPHRASE, optionally GPG_KEYRING) + - S3 target repository via ENV variables (S3_BUCKET_SYNC_TO, optionally S3_BUCKET_SYNC_FROM) """ env = os.environ @@ -246,10 +248,13 @@ def build_release(run_tests=False, dry_run=True, cpus=1, bwc_version=None): print('Running Backwards compatibility tests against version [%s]' % (bwc_version)) run_mvn('clean', 'test -Dtests.filter=@backwards -Dtests.bwc.version=%s -Dtests.bwc=true -Dtests.jvms=1' % bwc_version) run_mvn('clean test-compile -Dforbidden.test.signatures="org.apache.lucene.util.LuceneTestCase\$AwaitsFix @ Please fix all bugs before release"') - run_mvn('clean %s -DskipTests' % (target)) + gpg_args = '-Dgpg.key="%s" -Dgpg.passphrase="%s" -Ddeb.sign=true' % (target, env.get('GPG_KEY_ID'), env.get('GPG_PASSPHRASE')) + if env.get('GPG_KEYRING'): + gpg_args += ' -Dgpg.keyring="%s"' % env.get('GPG_KEYRING') + run_mvn('clean %s -DskipTests %s' % (target, gpg_args)) success = False try: - run_mvn('-DskipTests rpm:rpm') + run_mvn('-DskipTests rpm:rpm %s' % (gpg_args)) success = True finally: if not success: @@ -502,6 +507,14 @@ def publish_artifacts(artifacts, base='elasticsearch/elasticsearch', dry_run=Tru # requires boto to be installed but it is not available on python3k yet so we use a dedicated tool run('python %s/upload-s3.py --file %s ' % (location, os.path.abspath(artifact))) +def publish_repositories(version, dry_run=True): + if dry_run: + print('Skipping package repository update') + else: + print('Triggering repository update - calling dev-tools/build_repositories.sh %s' % version) + # src_branch is a version like 1.5/1.6/2.0/etc.. so we can use this + run('dev-tools/build_repositories.sh %s', src_branch) + def print_sonatype_notice(): settings = os.path.join(os.path.expanduser('~'), '.m2/settings.xml') if os.path.isfile(settings): @@ -536,6 +549,16 @@ def check_s3_credentials(): if not env.get('AWS_ACCESS_KEY_ID', None) or not env.get('AWS_SECRET_ACCESS_KEY', None): raise RuntimeError('Could not find "AWS_ACCESS_KEY_ID" / "AWS_SECRET_ACCESS_KEY" in the env variables please export in order to upload to S3') +def check_gpg_credentials(): + if not env.get('GPG_KEY_ID', None) or not env.get('GPG_PASSPHRASE', None): + raise RuntimeError('Could not find "GPG_KEY_ID" / "GPG_PASSPHRASE" in the env variables please export in order to sign the packages (also make sure that GPG_KEYRING is set when not in ~/.gnupg)') + +def check_command_exists(name, cmd): + try: + subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError: + raise RuntimeError('Could not run command %s - please make sure it is installed' % (name)) + VERSION_FILE = 'src/main/java/org/elasticsearch/Version.java' POM_FILE = 'pom.xml' @@ -628,9 +651,16 @@ if __name__ == '__main__': if os.path.exists(LOG): raise RuntimeError('please remove old release log %s first' % LOG) + + check_gpg_credentials() + check_command_exists('gpg', 'gpg --version') + check_command_exists('expect', 'expect -v') if not dry_run: check_s3_credentials() + check_command_exists('createrepo', 'createrepo --version') + check_command_exists('s3cmd', 's3cmd --version') + check_command_exists('apt-ftparchive', 'apt-ftparchive --version') print('WARNING: dryrun is set to "false" - this will push and publish the release') input('Press Enter to continue...') @@ -687,6 +717,8 @@ if __name__ == '__main__': merge_tag_push(remote, src_branch, release_version, dry_run) print(' publish artifacts to S3 -- dry_run: %s' % dry_run) publish_artifacts(artifacts_and_checksum, dry_run=dry_run) + print(' Updating package repositories -- dry_run: %s' % dry_run) + publish_repositories(src_branch, dry_run=dry_run) cherry_pick_command = '.' if version_head_hash: cherry_pick_command = ' and cherry-pick the documentation changes: \'git cherry-pick %s\' to the development branch' % (version_head_hash) diff --git a/dev-tools/build_repositories.sh b/dev-tools/build_repositories.sh new file mode 100755 index 00000000000..a7c7dae9311 --- /dev/null +++ b/dev-tools/build_repositories.sh @@ -0,0 +1,247 @@ +#!/bin/bash + +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on +# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +# either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + + +# This tool uploads the debian and RPM packages to the specified S3 buckets +# The packages get signed as well +# A requirement is the sync of the existing repository + +set -e + +################### +## environment variables +## +## required +## +## GPG_PASSPHRASE: Passphrase of your GPG key +## GPG_KEY_ID: Key id of your GPG key +## AWS_ACCESS_KEY_ID: AWS access key id +## AWS_SECRET_ACCESS_KEY: AWS secret access key +## S3_BUCKET_SYNC_TO Bucket to write packages to, defaults to packages.elasticsearch.org/elasticsearch +## +## +## optional +## +## S3_BUCKET_SYNC_FROM Bucket to read packages from, defaults to packages.elasticsearch.org/elasticsearch +## KEEP_DIRECTORIES Allows to keep all the generated directory structures for debugging +## GPG_KEYRING Configure GPG keyring home, defaults to ~/.gnupg/ +## +################### + + + +################### +## configuration +################### + +# No trailing slashes! +if [ -z $S3_BUCKET_SYNC_FROM ] ; then + S3_BUCKET_SYNC_FROM="packages.elasticsearch.org/elasticsearch" +fi +if [ ! -z $GPG_KEYRING ] ; then + GPG_HOMEDIR="--homedir ${GPG_KEYRING}" +fi + +################### +## parameters +################### + +# Must be major and minor version, i.e. 1.5 instead of 1.5.0 +version=$1 + +################### +## prerequisites +################### + +if [ "$#" != "1" ] || [ "x$1" == "x-h" ] || [ "x$1" == "x--help" ] ; then + echo "Usage: $0 version" + echo + echo " version: The elasticsearch major and minor version, i.e. 1.5" + exit +fi + +echo "Checking for correct environment" + +error="" + +if [ -z "$GPG_PASSPHRASE" ] ; then + echo "Environment variable GPG_PASSPHRASE is not set" + error="true" +fi + +if [ -z "$S3_BUCKET_SYNC_TO" ] ; then + echo "Environment variable S3_BUCKET_SYNC_TO is not set" + error="true" +fi + +if [ -z "$GPG_KEY_ID" ] ; then + echo "Environment variable GPG_KEY_ID is not set" + error="true" +fi + +if [ -z "$AWS_ACCESS_KEY_ID" ] ; then + echo "Environment variable AWS_ACCESS_KEY_ID is not set" + error="true" +fi + +if [ -z "$AWS_SECRET_ACCESS_KEY" ] ; then + echo "Environment variable AWS_SECRET_ACCESS_KEY is not set" + error="true" +fi + +if [ "x$error" == "xtrue" ] ; then + echo "Please set all of the above environment variables first. Exiting..." + exit +fi + +echo "Checking for available command line tools:" + +check_for_command() { + echo -n " $1" + if [ -z "`which $1`" ]; then + echo "NO" + error="true" + else + echo "ok" + fi +} + +error="" +check_for_command "createrepo" +check_for_command "s3cmd" +check_for_command "apt-ftparchive" +check_for_command "gpg" +check_for_command "expect" # needed for the RPM plugin + +if [ "x$error" == "xtrue" ] ; then + echo "Please install all of the above tools first. Exiting..." + exit +fi + +################### +## setup +################### +tempdir=`mktemp -d /tmp/elasticsearch-repo.XXXX` +mkdir -p $tempdir + +# create custom s3cmd conf, in case s3cmd does not support --aws-secret-key like on ubuntu +( cat < $tempdir/.s3cmd +s3cmd="s3cmd -c $tempdir/.s3cmd" + +################### +## RPM +################### + +centosdir=$tempdir/repository/elasticsearch/$version/centos +mkdir -p $centosdir + +echo "RPM: Syncing repository for version $version into $centosdir" +$s3cmd sync s3://$S3_BUCKET_SYNC_FROM/$version/centos/ $centosdir + +rpm=target/rpm/elasticsearch/RPMS/noarch/elasticsearch*.rpm +echo "RPM: Copying $rpm into $centosdor" +cp $rpm $centosdir + +echo "RPM: Running createrepo in $centosdir" +createrepo --update $centosdir + +echo "RPM: Resigning repomd.xml" +rm -f $centosdir/repodata/repomd.xml.asc +gpg $GPG_HOMEDIR --passphrase "$GPG_PASSPHRASE" -a -b -o $centosdir/repodata/repomd.xml.asc $centosdir/repodata/repomd.xml + +echo "RPM: Syncing back repository for $version into S3 bucket $S3_BUCKET_SYNC_TO" +$s3cmd sync -P $centosdir/ s3://$S3_BUCKET_SYNC_TO/elasticsearch/$version/centos/ + +################### +## DEB +################### + +deb=target/releases/elasticsearch*.deb + +echo "DEB: Creating repository directory structure" + +if [ -z $tempdir ] ; then + echo "DEB: Could not create tempdir directory name, exiting" + exit +fi + +debbasedir=$tempdir/repository/elasticsearch/$version/debian +mkdir -p $debbasedir + + +echo "DEB: Syncing debian repository of version $version to $debbasedir" +# sync all former versions into directory +$s3cmd sync s3://$S3_BUCKET_SYNC_FROM/$version/debian/ $debbasedir + +# create directories in case of a new release so that syncing did not create this structure +mkdir -p $debbasedir/dists/stable/main/binary-all +mkdir -p $debbasedir/dists/stable/main/binary-i386 +mkdir -p $debbasedir/dists/stable/main/binary-amd64 +mkdir -p $debbasedir/.cache +mkdir -p $debbasedir/pool/main + +# create elasticsearch-1.4.conf +( cat < $tempdir/elasticsearch-$version-releases.conf + +# create packages file using apt-ftparchive +mkdir -p $debbasedir/dists/stable/main/binary-all +mkdir -p $debbasedir/pool/main/e/elasticsearch + +echo "DEB: Copying $deb to elasticsearch repo directory" +cp $deb $debbasedir/pool/main/e/elasticsearch + +echo "DEB: Creating new Packages and Release files" +cd $debbasedir +apt-ftparchive packages pool > dists/stable/main/binary-all/Packages +cat dists/stable/main/binary-all/Packages | gzip -9 > dists/stable/main/binary-all/Packages.gz +cp dists/stable/main/binary-all/Packages* dists/stable/main/binary-i386/ +cp dists/stable/main/binary-all/Packages* dists/stable/main/binary-amd64/ +apt-ftparchive -c $tempdir/elasticsearch-$version-releases.conf release $debbasedir/dists/stable/ > $debbasedir/dists/stable/Release + +echo "DEB: Signing newly created release file at $debbasedir/dists/stable/Release.gpg" +rm -f $debbasedir/dists/stable/Release.gpg +gpg $GPG_HOMEDIR --passphrase "$GPG_PASSPHRASE" -a -b -o $debbasedir/dists/stable/Release.gpg $debbasedir/dists/stable/Release + +# upload to S3 +echo "DEB: Uploading to S3 bucket to $S3_BUCKET_SYNC_TO" +$s3cmd sync -P $debbasedir/ s3://$S3_BUCKET_SYNC_TO/elasticsearch/$version/debian/ + +# back to original dir +cd - + +# delete directories unless configured otherwise +if [ -z $KEEP_DIRECTORIES ] ; then + echo "Done! Deleting repository directories at $tempdir" + rm -fr $tempdir +else + echo "Done! Keeping repository directories at $tempdir" +fi diff --git a/pom.xml b/pom.xml index 1a0213d32d1..86dcb460825 100644 --- a/pom.xml +++ b/pom.xml @@ -60,7 +60,8 @@ /var/log/elasticsearch ${packaging.elasticsearch.home.dir}/plugins /var/run/elasticsearch - + false + dpkg-sig @@ -1081,6 +1082,11 @@ jdeb + ${deb.sign} + ${gpg.keyring} + ${gpg.key} + ${gpg.passphrase} + ${deb.sign.method} @@ -1245,6 +1251,11 @@ 755 root root + ${gpg.key} + ${gpg.keyring} + + ${gpg.passphrase} +