diff --git a/dev-tools/build_release.py b/dev-tools/build_release.py
index af3baed7e7d..8ae300d98fd 100644
--- a/dev-tools/build_release.py
+++ b/dev-tools/build_release.py
@@ -6,10 +6,10 @@
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on
+# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
@@ -19,119 +19,146 @@ import tempfile
import shutil
import os
import datetime
-import argparse
-import github3
-import smtplib
+import json
+import time
import sys
+import argparse
+import hmac
+import urllib
+import fnmatch
+import socket
+import urllib.request
+import subprocess
-from email.mime.multipart import MIMEMultipart
-from email.mime.text import MIMEText
+from functools import partial
+from http.client import HTTPConnection
+from http.client import HTTPSConnection
-from os.path import dirname, abspath
-"""
- This tool builds a release from the a given elasticsearch plugin branch.
+"""
+ This tool builds a release from the a given elasticsearch branch.
In order to execute it go in the top level directory and run:
- $ python3 dev_tools/build_release.py --branch master --publish --remote origin
+ $ python3 dev_tools/build_release.py --branch 0.90 --publish --remote origin
By default this script runs in 'dry' mode which essentially simulates a release. If the
- '--publish' option is set the actual release is done.
-
- $ python3 dev_tools/build_release.py --publish --remote origin
-
- The script takes over almost all
+ '--publish' option is set the actual release is done. The script takes over almost all
steps necessary for a release from a high level point of view it does the following things:
- - run prerequisite checks
+ - run prerequisit checks ie. check for Java 1.7 being presend or S3 credentials available as env variables
- detect the version to release from the specified branch (--branch) or the current branch
- - creates a version release branch & updates pom.xml to point to a release version rather than a snapshot
- - builds the artifacts
- - commits the new version and merges the version release branch into the source branch
- - merges the master release branch into the master branch
- - creates a tag and pushes branch and master to the specified origin (--remote)
- - publishes the releases to sonatype
+ - creates a release branch & updates pom.xml and Version.java to point to a release version rather than a snapshot
+ - builds the artifacts and runs smoke-tests on the build zip & tar.gz files
+ - commits the new version and merges the release branch into the source branch
+ - creates a tag and pushes the commit to the specified origin (--remote)
+ - publishes the releases to Sonatype and S3
Once it's done it will print all the remaining steps.
Prerequisites:
- Python 3k for script execution
+ - Boto for S3 Upload ($ apt-get install python-boto)
+ - RPM for RPM building ($ apt-get install rpm)
+ - S3 keys exported via ENV variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
+ - GPG data exported via ENV variables (GPG_KEY_ID, GPG_PASSPHRASE, optionally GPG_KEYRING)
+ - S3 target repository via ENV variables (S3_BUCKET_SYNC_TO, optionally S3_BUCKET_SYNC_FROM)
"""
env = os.environ
+PLUGINS = [('license', 'elasticsearch/license/latest'),
+ ('bigdesk', 'lukas-vlcek/bigdesk'),
+ ('paramedic', 'karmi/elasticsearch-paramedic'),
+ ('segmentspy', 'polyfractal/elasticsearch-segmentspy'),
+ ('inquisitor', 'polyfractal/elasticsearch-inquisitor'),
+ ('head', 'mobz/elasticsearch-head')]
+
LOG = env.get('ES_RELEASE_LOG', '/tmp/elasticsearch_release.log')
-ROOT_DIR = abspath(os.path.join(abspath(dirname(__file__)), '../'))
-POM_FILE = ROOT_DIR + '/pom.xml'
-##########################################################
-#
-# Utility methods (log and run)
-#
-##########################################################
-# Log a message
+# console colors
+COLOR_OK = '\033[92m'
+COLOR_END = '\033[0m'
+COLOR_FAIL = '\033[91m'
+
def log(msg):
- log_plain('\n%s' % msg)
+ log_plain('\n%s' % msg)
-
-# Purge the log file
-def purge_log():
- try:
- os.remove(LOG)
- except FileNotFoundError:
- pass
-
-
-# Log a message to the LOG file
def log_plain(msg):
- f = open(LOG, mode='ab')
- f.write(msg.encode('utf-8'))
- f.close()
+ f = open(LOG, mode='ab')
+ f.write(msg.encode('utf-8'))
+ f.close()
-
-# Run a command and log it
def run(command, quiet=False):
- log('%s: RUN: %s\n' % (datetime.datetime.now(), command))
- if os.system('%s >> %s 2>&1' % (command, LOG)):
- msg = ' FAILED: %s [see log %s]' % (command, LOG)
- if not quiet:
- print(msg)
- raise RuntimeError(msg)
+ log('%s: RUN: %s\n' % (datetime.datetime.now(), command))
+ if os.system('%s >> %s 2>&1' % (command, LOG)):
+ msg = ' FAILED: %s [see log %s]' % (command, LOG)
+ if not quiet:
+ print(msg)
+ raise RuntimeError(msg)
-##########################################################
-#
-# Clean logs and check JAVA and Maven
-#
-##########################################################
try:
- purge_log()
- JAVA_HOME = env['JAVA_HOME']
+ JAVA_HOME = env['JAVA_HOME']
except KeyError:
- raise RuntimeError("""
+ raise RuntimeError("""
Please set JAVA_HOME in the env before running release tool
On OSX use: export JAVA_HOME=`/usr/libexec/java_home -v '1.7*'`""")
try:
- MVN = 'mvn'
- # make sure mvn3 is used if mvn3 is available
- # some systems use maven 2 as default
- run('mvn3 --version', quiet=True)
- MVN = 'mvn3'
-except RuntimeError:
- pass
+ JAVA_HOME = env['JAVA7_HOME']
+except KeyError:
+ pass #no JAVA7_HOME - we rely on JAVA_HOME
+try:
+ # make sure mvn3 is used if mvn3 is available
+ # some systems use maven 2 as default
+ subprocess.check_output('mvn3 --version', shell=True, stderr=subprocess.STDOUT)
+ MVN = 'mvn3'
+except subprocess.CalledProcessError:
+ MVN = 'mvn'
+
def java_exe():
- path = JAVA_HOME
- return 'export JAVA_HOME="%s" PATH="%s/bin:$PATH" JAVACMD="%s/bin/java"' % (path, path, path)
+ path = JAVA_HOME
+ return 'export JAVA_HOME="%s" PATH="%s/bin:$PATH" JAVACMD="%s/bin/java"' % (path, path, path)
+def verify_java_version(version):
+ s = os.popen('%s; java -version 2>&1' % java_exe()).read()
+ if ' version "%s.' % version not in s:
+ raise RuntimeError('got wrong version for java %s:\n%s' % (version, s))
+
+# Verifies the java version. We guarantee that we run with Java 1.7
+# If 1.7 is not available fail the build!
+def verify_mvn_java_version(version, mvn):
+ s = os.popen('%s; %s --version 2>&1' % (java_exe(), mvn)).read()
+ if 'Java version: %s' % version not in s:
+ raise RuntimeError('got wrong java version for %s %s:\n%s' % (mvn, version, s))
+
+# Returns the hash of the current git HEAD revision
+def get_head_hash():
+ return os.popen(' git rev-parse --verify HEAD 2>&1').read().strip()
+
+# Returns the hash of the given tag revision
+def get_tag_hash(tag):
+ return os.popen('git show-ref --tags %s --hash 2>&1' % (tag)).read().strip()
+
+# Returns the name of the current branch
+def get_current_branch():
+ return os.popen('git rev-parse --abbrev-ref HEAD 2>&1').read().strip()
-##########################################################
-#
-# String and file manipulation utils
-#
-##########################################################
# Utility that returns the name of the release branch for a given version
-def release_branch(branchsource, version):
- return 'release_branch_%s_%s' % (branchsource, version)
+def release_branch(version):
+ return 'release_branch_%s' % version
+
+# runs get fetch on the given remote
+def fetch(remote):
+ run('git fetch %s' % remote)
+
+# Creates a new release branch from the given source branch
+# and rebases the source branch from the remote before creating
+# the release branch. Note: This fails if the source branch
+# doesn't exist on the provided remote.
+def create_release_branch(remote, src_branch, release):
+ run('git checkout %s' % src_branch)
+ run('git pull --rebase %s %s' % (remote, src_branch))
+ run('git checkout -b %s' % (release_branch(release)))
# Reads the given file and applies the
@@ -139,204 +166,367 @@ def release_branch(branchsource, version):
# a line the given file is replaced with
# the modified input.
def process_file(file_path, line_callback):
- fh, abs_path = tempfile.mkstemp()
- modified = False
- with open(abs_path, 'w', encoding='utf-8') as new_file:
- with open(file_path, encoding='utf-8') as old_file:
- for line in old_file:
- new_line = line_callback(line)
- modified = modified or (new_line != line)
- new_file.write(new_line)
- os.close(fh)
- if modified:
- #Remove original file
- os.remove(file_path)
- #Move new file
- shutil.move(abs_path, file_path)
- return True
- else:
- # nothing to do - just remove the tmp file
- os.remove(abs_path)
- return False
-
-
-# Split a version x.y.z as an array of digits [x,y,z]
-def split_version_to_digits(version):
- return list(map(int, re.findall(r'\d+', version)))
-
-
-# Guess the next snapshot version number (increment last digit)
-def guess_snapshot(version):
- digits = split_version_to_digits(version)
- source = '%s.%s.%s' % (digits[0], digits[1], digits[2])
- destination = '%s.%s.%s' % (digits[0], digits[1], digits[2] + 1)
- return version.replace(source, destination)
-
-
-# Guess the anchor in generated documentation
-# Looks like this "#version-230-for-elasticsearch-13"
-def get_doc_anchor(release, esversion):
- plugin_digits = split_version_to_digits(release)
- es_digits = split_version_to_digits(esversion)
- return '#version-%s%s%s-for-elasticsearch-%s%s' % (
- plugin_digits[0], plugin_digits[1], plugin_digits[2], es_digits[0], es_digits[1])
+ fh, abs_path = tempfile.mkstemp()
+ modified = False
+ with open(abs_path,'w', encoding='utf-8') as new_file:
+ with open(file_path, encoding='utf-8') as old_file:
+ for line in old_file:
+ new_line = line_callback(line)
+ modified = modified or (new_line != line)
+ new_file.write(new_line)
+ os.close(fh)
+ if modified:
+ #Remove original file
+ os.remove(file_path)
+ #Move new file
+ shutil.move(abs_path, file_path)
+ return True
+ else:
+ # nothing to do - just remove the tmp file
+ os.remove(abs_path)
+ return False
+# Walks the given directory path (defaults to 'docs')
+# and replaces all 'coming[$version]' tags with
+# 'added[$version]'. This method only accesses asciidoc files.
+def update_reference_docs(release_version, path='docs'):
+ pattern = 'coming[%s' % (release_version)
+ replacement = 'added[%s' % (release_version)
+ pending_files = []
+ def callback(line):
+ return line.replace(pattern, replacement)
+ for root, _, file_names in os.walk(path):
+ for file_name in fnmatch.filter(file_names, '*.asciidoc'):
+ full_path = os.path.join(root, file_name)
+ if process_file(full_path, callback):
+ pending_files.append(os.path.join(root, file_name))
+ return pending_files
# Moves the pom.xml file from a snapshot to a release
def remove_maven_snapshot(pom, release):
- pattern = '%s-SNAPSHOT' % release
- replacement = '%s' % release
+ pattern = '%s-SNAPSHOT' % (release)
+ replacement = '%s' % (release)
+ def callback(line):
+ return line.replace(pattern, replacement)
+ process_file(pom, callback)
- def callback(line):
- return line.replace(pattern, replacement)
+# Moves the Version.java file from a snapshot to a release
+def remove_version_snapshot(version_file, release):
+ # 1.0.0.Beta1 -> 1_0_0_Beta1
+ release = release.replace('.', '_')
+ pattern = 'new Version(V_%s_ID, true' % (release)
+ replacement = 'new Version(V_%s_ID, false' % (release)
+ def callback(line):
+ return line.replace(pattern, replacement)
+ process_file(version_file, callback)
- process_file(pom, callback)
+# Stages the given files for the next git commit
+def add_pending_files(*files):
+ for file in files:
+ run('git add %s' % (file))
+# Executes a git commit with 'release [version]' as the commit message
+def commit_release(release):
+ run('git commit -m "release [%s]"' % release)
-# Moves the pom.xml file to the next snapshot
-def add_maven_snapshot(pom, release, snapshot):
- pattern = '%s' % release
- replacement = '%s-SNAPSHOT' % snapshot
+def commit_feature_flags(release):
+ run('git commit -m "Update Documentation Feature Flags [%s]"' % release)
- def callback(line):
- return line.replace(pattern, replacement)
+def tag_release(release):
+ run('git tag -a v%s -m "Tag release version %s"' % (release, release))
- process_file(pom, callback)
+def run_mvn(*cmd):
+ for c in cmd:
+ run('%s; %s %s' % (java_exe(), MVN, c))
+def build_release(run_tests=False, dry_run=True, cpus=1, bwc_version=None):
+ target = 'deploy'
+ if dry_run:
+ target = 'package'
+ if run_tests:
+ run_mvn('clean',
+ 'test -Dtests.jvms=%s -Des.node.mode=local' % (cpus),
+ 'test -Dtests.jvms=%s -Des.node.mode=network' % (cpus))
+ if bwc_version:
+ print('Running Backwards compatibility tests against version [%s]' % (bwc_version))
+ run_mvn('clean', 'test -Dtests.filter=@backwards -Dtests.bwc.version=%s -Dtests.bwc=true -Dtests.jvms=1' % bwc_version)
+ run_mvn('clean test-compile -Dforbidden.test.signatures="org.apache.lucene.util.LuceneTestCase\$AwaitsFix @ Please fix all bugs before release"')
+ gpg_args = '-Dgpg.key="%s" -Dgpg.passphrase="%s" -Ddeb.sign=true' % (env.get('GPG_KEY_ID'), env.get('GPG_PASSPHRASE'))
+ if env.get('GPG_KEYRING'):
+ gpg_args += ' -Dgpg.keyring="%s"' % env.get('GPG_KEYRING')
+ run_mvn('clean %s -DskipTests %s' % (target, gpg_args))
+ success = False
+ try:
+ run_mvn('-DskipTests rpm:rpm %s' % (gpg_args))
+ success = True
+ finally:
+ if not success:
+ print("""
+ RPM Bulding failed make sure "rpm" tools are installed.
+ Use on of the following commands to install:
+ $ brew install rpm # on OSX
+ $ apt-get install rpm # on Ubuntu et.al
+ """)
-# Checks the pom.xml for the release version. 2.0.0-SNAPSHOT
+# Uses the github API to fetch open tickets for the given release version
+# if it finds any tickets open for that version it will throw an exception
+def ensure_no_open_tickets(version):
+ version = "v%s" % version
+ conn = HTTPSConnection('api.github.com')
+ try:
+ log('Checking for open tickets on Github for version %s' % version)
+ log('Check if node is available')
+ conn.request('GET', '/repos/elastic/elasticsearch/issues?state=open&labels=%s' % version, headers= {'User-Agent' : 'Elasticsearch version checker'})
+ res = conn.getresponse()
+ if res.status == 200:
+ issues = json.loads(res.read().decode("utf-8"))
+ if issues:
+ urls = []
+ for issue in issues:
+ urls.append(issue['html_url'])
+ raise RuntimeError('Found open issues for release version %s:\n%s' % (version, '\n'.join(urls)))
+ else:
+ log("No open issues found for version %s" % version)
+ else:
+ raise RuntimeError('Failed to fetch issue list from Github for release version %s' % version)
+ except socket.error as e:
+ log("Failed to fetch issue list from Github for release version %s' % version - Exception: [%s]" % (version, e))
+ #that is ok it might not be there yet
+ finally:
+ conn.close()
+
+def wait_for_node_startup(host='127.0.0.1', port=9200,timeout=15):
+ for _ in range(timeout):
+ conn = HTTPConnection(host, port, timeout)
+ try:
+ log('Waiting until node becomes available for 1 second')
+ time.sleep(1)
+ log('Check if node is available')
+ conn.request('GET', '')
+ res = conn.getresponse()
+ if res.status == 200:
+ return True
+ except socket.error as e:
+ log("Failed while waiting for node - Exception: [%s]" % e)
+ #that is ok it might not be there yet
+ finally:
+ conn.close()
+
+ return False
+
+# Ensures we are using a true Lucene release, not a snapshot build:
+def verify_lucene_version():
+ s = open('pom.xml', encoding='utf-8').read()
+ if 'download.elastic.co/lucenesnapshots' in s:
+ raise RuntimeError('pom.xml contains download.elastic.co/lucenesnapshots repository: remove that before releasing')
+
+ m = re.search(r'(.*?)', s)
+ if m is None:
+ raise RuntimeError('unable to locate lucene.version in pom.xml')
+ lucene_version = m.group(1)
+
+ m = re.search(r'(.*?)', s)
+ if m is None:
+ raise RuntimeError('unable to locate lucene.maven.version in pom.xml')
+ lucene_maven_version = m.group(1)
+ if lucene_version != lucene_maven_version:
+ raise RuntimeError('pom.xml is still using a snapshot release of lucene (%s): cutover to a real lucene release before releasing' % lucene_maven_version)
+
+# Checks the pom.xml for the release version.
# This method fails if the pom file has no SNAPSHOT version set ie.
# if the version is already on a release version we fail.
# Returns the next version string ie. 0.90.7
def find_release_version(src_branch):
- git_checkout(src_branch)
- with open(POM_FILE, encoding='utf-8') as file:
- for line in file:
- match = re.search(r'(.+)-SNAPSHOT', line)
- if match:
- return match.group(1)
- raise RuntimeError('Could not find release version in branch %s' % src_branch)
+ run('git checkout %s' % src_branch)
+ with open('pom.xml', encoding='utf-8') as file:
+ for line in file:
+ match = re.search(r'(.+)-SNAPSHOT', line)
+ if match:
+ return match.group(1)
+ raise RuntimeError('Could not find release version in branch %s' % src_branch)
+def artifact_names(release, path = ''):
+ return [os.path.join(path, 'elasticsearch-%s.%s' % (release, t)) for t in ['deb', 'tar.gz', 'zip']]
-# extract a value from pom.xml
-def find_from_pom(tag):
- with open(POM_FILE, encoding='utf-8') as file:
- for line in file:
- match = re.search(r'<%s>(.+)%s>' % (tag, tag), line)
- if match:
- return match.group(1)
- raise RuntimeError('Could not find <%s> in pom.xml file' % (tag))
+def get_artifacts(release):
+ common_artifacts = artifact_names(release, 'target/releases/')
+ for f in common_artifacts:
+ if not os.path.isfile(f):
+ raise RuntimeError('Could not find required artifact at %s' % f)
+ rpm = os.path.join('target/rpm/elasticsearch/RPMS/noarch/', 'elasticsearch-%s-1.noarch.rpm' % release)
+ if os.path.isfile(rpm):
+ log('RPM [%s] contains: ' % rpm)
+ run('rpm -pqli %s' % rpm)
+ # this is an oddness of RPM that is attches -1 so we have to rename it
+ renamed_rpm = os.path.join('target/rpm/elasticsearch/RPMS/noarch/', 'elasticsearch-%s.noarch.rpm' % release)
+ shutil.move(rpm, renamed_rpm)
+ common_artifacts.append(renamed_rpm)
+ else:
+ raise RuntimeError('Could not find required artifact at %s' % rpm)
+ return common_artifacts
+# Checks the jar files in each package
+# Barfs if any of the package jar files differ
+def check_artifacts_for_same_jars(artifacts):
+ jars = []
+ for file in artifacts:
+ if file.endswith('.zip'):
+ jars.append(subprocess.check_output("unzip -l %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True))
+ if file.endswith('.tar.gz'):
+ jars.append(subprocess.check_output("tar tzvf %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True))
+ if file.endswith('.rpm'):
+ jars.append(subprocess.check_output("rpm -pqli %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True))
+ if file.endswith('.deb'):
+ jars.append(subprocess.check_output("dpkg -c %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True))
+ if len(set(jars)) != 1:
+ raise RuntimeError('JAR contents of packages are not the same, please check the package contents. Use [unzip -l], [tar tzvf], [dpkg -c], [rpm -pqli] to inspect')
-##########################################################
-#
-# GIT commands
-#
-##########################################################
-# Returns the hash of the current git HEAD revision
-def get_head_hash():
- return os.popen('git rev-parse --verify HEAD 2>&1').read().strip()
+# Generates sha1 checsums for all files
+# and returns the checksum files as well
+# as the given files in a list
+def generate_checksums(files):
+ res = []
+ for release_file in files:
+ directory = os.path.dirname(release_file)
+ file = os.path.basename(release_file)
+ checksum_file = '%s.sha1.txt' % file
+ if os.system('cd %s; shasum %s > %s' % (directory, file, checksum_file)):
+ raise RuntimeError('Failed to generate checksum for file %s' % release_file)
+ res = res + [os.path.join(directory, checksum_file), release_file]
+ return res
-# Returns the name of the current branch
-def get_current_branch():
- return os.popen('git rev-parse --abbrev-ref HEAD 2>&1').read().strip()
-
-
-# runs get fetch on the given remote
-def fetch(remote):
- run('git fetch %s' % remote)
-
-
-# Creates a new release branch from the given source branch
-# and rebases the source branch from the remote before creating
-# the release branch. Note: This fails if the source branch
-# doesn't exist on the provided remote.
-def create_release_branch(remote, src_branch, release):
- git_checkout(src_branch)
- run('git pull --rebase %s %s' % (remote, src_branch))
- run('git checkout -b %s' % (release_branch(src_branch, release)))
-
-
-# Stages the given files for the next git commit
-def add_pending_files(*files):
+def download_and_verify(release, files, plugins=None, base_url='https://download.elastic.co/elasticsearch/elasticsearch'):
+ print('Downloading and verifying release %s from %s' % (release, base_url))
+ tmp_dir = tempfile.mkdtemp()
+ try:
+ downloaded_files = []
for file in files:
- run('git add %s' % file)
+ name = os.path.basename(file)
+ url = '%s/%s' % (base_url, name)
+ abs_file_path = os.path.join(tmp_dir, name)
+ print(' Downloading %s' % (url))
+ downloaded_files.append(abs_file_path)
+ urllib.request.urlretrieve(url, abs_file_path)
+ url = ''.join([url, '.sha1.txt'])
+ checksum_file = os.path.join(tmp_dir, ''.join([abs_file_path, '.sha1.txt']))
+ urllib.request.urlretrieve(url, checksum_file)
+ print(' Verifying checksum %s' % (checksum_file))
+ run('cd %s && sha1sum -c %s' % (tmp_dir, os.path.basename(checksum_file)))
+ smoke_test_release(release, downloaded_files, get_tag_hash('v%s' % release), plugins)
+ print(' SUCCESS')
+ finally:
+ shutil.rmtree(tmp_dir)
-
-# Executes a git commit with 'release [version]' as the commit message
-def commit_release(artifact_id, release):
- run('git commit -m "prepare release %s-%s"' % (artifact_id, release))
-
-
-# Commit documentation changes on the master branch
-def commit_master(release):
- run('git commit -m "update documentation with release %s"' % release)
-
-
-# Commit next snapshot files
-def commit_snapshot():
- run('git commit -m "prepare for next development iteration"')
-
-
-# Put the version tag on on the current commit
-def tag_release(release):
- run('git tag -a v%s -m "Tag release version %s"' % (release, release))
-
-
-# Checkout a given branch
-def git_checkout(branch):
- run('git checkout %s' % branch)
-
-
-# Merge the release branch with the actual branch
-def git_merge(src_branch, release_version):
- git_checkout(src_branch)
- run('git merge %s' % release_branch(src_branch, release_version))
-
-
-# Push the actual branch and master branch
-def git_push(remote, src_branch, release_version, dry_run):
- if not dry_run:
- run('git push %s %s master' % (remote, src_branch)) # push the commit and the master
- run('git push %s v%s' % (remote, release_version)) # push the tag
+def smoke_test_release(release, files, expected_hash, plugins):
+ for release_file in files:
+ if not os.path.isfile(release_file):
+ raise RuntimeError('Smoketest failed missing file %s' % (release_file))
+ tmp_dir = tempfile.mkdtemp()
+ if release_file.endswith('tar.gz'):
+ run('tar -xzf %s -C %s' % (release_file, tmp_dir))
+ elif release_file.endswith('zip'):
+ run('unzip %s -d %s' % (release_file, tmp_dir))
else:
- print(' dryrun [True] -- skipping push to remote %s %s master' % (remote, src_branch))
+ log('Skip SmokeTest for [%s]' % release_file)
+ continue # nothing to do here
+ es_run_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'bin/elasticsearch')
+ print(' Smoke testing package [%s]' % release_file)
+ es_plugin_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release),'bin/plugin')
+ plugin_names = {}
+ for name, plugin in plugins:
+ print(' Install plugin [%s] from [%s]' % (name, plugin))
+ run('%s; %s %s %s' % (java_exe(), es_plugin_path, '-install', plugin))
+ plugin_names[name] = True
+ if release.startswith("0.90."):
+ background = '' # 0.90.x starts in background automatically
+ else:
+ background = '-d'
+ print(' Starting elasticsearch deamon from [%s]' % os.path.join(tmp_dir, 'elasticsearch-%s' % release))
+ run('%s; %s -Des.node.name=smoke_tester -Des.cluster.name=prepare_release -Des.discovery.zen.ping.multicast.enabled=false -Des.script.inline=on -Des.script.indexed=on %s'
+ % (java_exe(), es_run_path, background))
+ conn = HTTPConnection('127.0.0.1', 9200, 20);
+ wait_for_node_startup()
+ try:
+ try:
+ conn.request('GET', '')
+ res = conn.getresponse()
+ if res.status == 200:
+ version = json.loads(res.read().decode("utf-8"))['version']
+ if release != version['number']:
+ raise RuntimeError('Expected version [%s] but was [%s]' % (release, version['number']))
+ if version['build_snapshot']:
+ raise RuntimeError('Expected non snapshot version')
+ if version['build_hash'].strip() != expected_hash:
+ raise RuntimeError('HEAD hash does not match expected [%s] but got [%s]' % (expected_hash, version['build_hash']))
+ print(' Running REST Spec tests against package [%s]' % release_file)
+ run_mvn('test -Dtests.cluster=%s -Dtests.class=*.*RestTests' % ("127.0.0.1:9300"))
+ print(' Verify if plugins are listed in _nodes')
+ conn.request('GET', '/_nodes?plugin=true&pretty=true')
+ res = conn.getresponse()
+ if res.status == 200:
+ nodes = json.loads(res.read().decode("utf-8"))['nodes']
+ for _, node in nodes.items():
+ node_plugins = node['plugins']
+ for node_plugin in node_plugins:
+ if not plugin_names.get(node_plugin['name'], False):
+ raise RuntimeError('Unexpeced plugin %s' % node_plugin['name'])
+ del plugin_names[node_plugin['name']]
+ if plugin_names:
+ raise RuntimeError('Plugins not loaded %s' % list(plugin_names.keys()))
-##########################################################
-#
-# Maven commands
-#
-##########################################################
-# Run a given maven command
-def run_mvn(*cmd):
- for c in cmd:
- run('%s; %s -f %s %s' % (java_exe(), MVN, POM_FILE, c))
+ else:
+ raise RuntimeError('Expected HTTP 200 but got %s' % res.status)
+ else:
+ raise RuntimeError('Expected HTTP 200 but got %s' % res.status)
+ finally:
+ conn.request('POST', '/_cluster/nodes/_local/_shutdown')
+ time.sleep(1) # give the node some time to shut down
+ if conn.getresponse().status != 200:
+ raise RuntimeError('Expected HTTP 200 but got %s on node shutdown' % res.status)
+ finally:
+ conn.close()
+ shutil.rmtree(tmp_dir)
-# Run deploy or package depending on dry_run
-# Default to run mvn package
-# When run_tests=True a first mvn clean test is run
-def build_release(run_tests=False, dry_run=True):
- target = 'deploy'
- tests = '-DskipTests'
- if run_tests:
- tests = ''
+def merge_tag_push(remote, src_branch, release_version, dry_run):
+ run('git checkout %s' % src_branch)
+ run('git merge %s' % release_branch(release_version))
+ run('git tag v%s' % release_version)
+ if not dry_run:
+ run('git push %s %s' % (remote, src_branch)) # push the commit
+ run('git push %s v%s' % (remote, release_version)) # push the tag
+ else:
+ print(' dryrun [True] -- skipping push to remote %s' % remote)
+
+def publish_artifacts(artifacts, base='elasticsearch/elasticsearch', dry_run=True):
+ location = os.path.dirname(os.path.realpath(__file__))
+ for artifact in artifacts:
if dry_run:
- target = 'package'
- run_mvn('clean %s %s' % (target, tests))
+ print('Skip Uploading %s to Amazon S3' % artifact)
+ else:
+ print('Uploading %s to Amazon S3' % artifact)
+ # requires boto to be installed but it is not available on python3k yet so we use a dedicated tool
+ run('python %s/upload-s3.py --file %s ' % (location, os.path.abspath(artifact)))
+def publish_repositories(version, dry_run=True):
+ if dry_run:
+ print('Skipping package repository update')
+ else:
+ print('Triggering repository update - calling dev-tools/build_repositories.sh %s' % version)
+ # src_branch is a version like 1.5/1.6/2.0/etc.. so we can use this
+ run('dev-tools/build_repositories.sh %s' % src_branch)
def print_sonatype_notice():
- settings = os.path.join(os.path.expanduser('~'), '.m2/settings.xml')
- if os.path.isfile(settings):
- with open(settings, encoding='utf-8') as settings_file:
- for line in settings_file:
- if line.strip() == 'sonatype-nexus-snapshots':
- # moving out - we found the indicator no need to print the warning
- return
- print("""
+ settings = os.path.join(os.path.expanduser('~'), '.m2/settings.xml')
+ if os.path.isfile(settings):
+ with open(settings, encoding='utf-8') as settings_file:
+ for line in settings_file:
+ if line.strip() == 'sonatype-nexus-snapshots':
+ # moving out - we found the indicator no need to print the warning
+ return
+ print("""
NOTE: No sonatype settings detected, make sure you have configured
your sonatype credentials in '~/.m2/settings.xml':
@@ -358,146 +548,256 @@ def print_sonatype_notice():
""")
+def check_command_exists(name, cmd):
+ try:
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ raise RuntimeError('Could not run command %s - please make sure it is installed' % (name))
-# we print a notice if we can not find the relevant infos in the ~/.m2/settings.xml
-print_sonatype_notice()
+VERSION_FILE = 'src/main/java/org/elasticsearch/Version.java'
+POM_FILE = 'pom.xml'
+
+# finds the highest available bwc version to test against
+def find_bwc_version(release_version, bwc_dir='backwards'):
+ log(' Lookup bwc version in directory [%s]' % bwc_dir)
+ bwc_version = None
+ if os.path.exists(bwc_dir) and os.path.isdir(bwc_dir):
+ max_version = [int(x) for x in release_version.split('.')]
+ for dir in os.listdir(bwc_dir):
+ if os.path.isdir(os.path.join(bwc_dir, dir)) and dir.startswith('elasticsearch-'):
+ version = [int(x) for x in dir[len('elasticsearch-'):].split('.')]
+ if version < max_version: # bwc tests only against smaller versions
+ if (not bwc_version) or version > [int(x) for x in bwc_version.split('.')]:
+ bwc_version = dir[len('elasticsearch-'):]
+ log(' Using bwc version [%s]' % bwc_version)
+ else:
+ log(' bwc directory [%s] does not exists or is not a directory - skipping' % bwc_dir)
+ return bwc_version
+
+def ensure_checkout_is_clean(branchName):
+ # Make sure no local mods:
+ s = subprocess.check_output('git diff --shortstat', shell=True)
+ if len(s) > 0:
+ raise RuntimeError('git diff --shortstat is non-empty: got:\n%s' % s)
+
+ # Make sure no untracked files:
+ s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace')
+ if 'Untracked files:' in s:
+ raise RuntimeError('git status shows untracked files: got:\n%s' % s)
+
+ # Make sure we are on the right branch (NOTE: a bit weak, since we default to current branch):
+ if 'On branch %s' % branchName not in s:
+ raise RuntimeError('git status does not show branch %s: got:\n%s' % (branchName, s))
+
+ # Make sure we have all changes from origin:
+ if 'is behind' in s:
+ raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin %s": got:\n%s' % (branchName, s))
+
+ # Make sure we no local unpushed changes (this is supposed to be a clean area):
+ if 'is ahead' in s:
+ raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout %s", "git reset --hard origin/%s": got:\n%s' % (branchName, branchName, s))
+
+# Checks all source files for //NORELEASE comments
+def check_norelease(path='src'):
+ pattern = re.compile(r'\bnorelease\b', re.IGNORECASE)
+ for root, _, file_names in os.walk(path):
+ for file_name in fnmatch.filter(file_names, '*.java'):
+ full_path = os.path.join(root, file_name)
+ line_number = 0
+ with open(full_path, 'r', encoding='utf-8') as current_file:
+ for line in current_file:
+ line_number = line_number + 1
+ if pattern.search(line):
+ raise RuntimeError('Found //norelease comment in %s line %s' % (full_path, line_number))
+
+def run_and_print(text, run_function):
+ try:
+ print(text, end='')
+ run_function()
+ print(COLOR_OK + 'OK' + COLOR_END)
+ return True
+ except RuntimeError:
+ print(COLOR_FAIL + 'NOT OK' + COLOR_END)
+ return False
+
+def check_env_var(text, env_var):
+ try:
+ print(text, end='')
+ env[env_var]
+ print(COLOR_OK + 'OK' + COLOR_END)
+ return True
+ except KeyError:
+ print(COLOR_FAIL + 'NOT OK' + COLOR_END)
+ return False
+
+def check_environment_and_commandline_tools(check_only):
+ checks = list()
+ checks.append(check_env_var('Checking for AWS env configuration AWS_SECRET_ACCESS_KEY_ID... ', 'AWS_SECRET_ACCESS_KEY'))
+ checks.append(check_env_var('Checking for AWS env configuration AWS_ACCESS_KEY_ID... ', 'AWS_ACCESS_KEY_ID'))
+ checks.append(check_env_var('Checking for SONATYPE env configuration SONATYPE_USERNAME... ', 'SONATYPE_USERNAME'))
+ checks.append(check_env_var('Checking for SONATYPE env configuration SONATYPE_PASSWORD... ', 'SONATYPE_PASSWORD'))
+ checks.append(check_env_var('Checking for GPG env configuration GPG_KEY_ID... ', 'GPG_KEY_ID'))
+ checks.append(check_env_var('Checking for GPG env configuration GPG_PASSPHRASE... ', 'GPG_PASSPHRASE'))
+ checks.append(check_env_var('Checking for S3 repo upload env configuration S3_BUCKET_SYNC_TO... ', 'S3_BUCKET_SYNC_TO'))
+ checks.append(check_env_var('Checking for git env configuration GIT_AUTHOR_NAME... ', 'GIT_AUTHOR_NAME'))
+ checks.append(check_env_var('Checking for git env configuration GIT_AUTHOR_EMAIL... ', 'GIT_AUTHOR_EMAIL'))
+
+ checks.append(run_and_print('Checking command: rpm... ', partial(check_command_exists, 'rpm', 'rpm --version')))
+ checks.append(run_and_print('Checking command: dpkg... ', partial(check_command_exists, 'dpkg', 'dpkg --version')))
+ checks.append(run_and_print('Checking command: gpg... ', partial(check_command_exists, 'gpg', 'gpg --version')))
+ checks.append(run_and_print('Checking command: expect... ', partial(check_command_exists, 'expect', 'expect -v')))
+ checks.append(run_and_print('Checking command: createrepo... ', partial(check_command_exists, 'createrepo', 'createrepo --version')))
+ checks.append(run_and_print('Checking command: s3cmd... ', partial(check_command_exists, 's3cmd', 's3cmd --version')))
+ checks.append(run_and_print('Checking command: apt-ftparchive... ', partial(check_command_exists, 'apt-ftparchive', 'apt-ftparchive --version')))
+
+ # boto, check error code being returned
+ location = os.path.dirname(os.path.realpath(__file__))
+ command = 'python %s/upload-s3.py -h' % (location)
+ checks.append(run_and_print('Testing boto python dependency... ', partial(check_command_exists, 'python-boto', command)))
+
+ checks.append(run_and_print('Checking java version... ', partial(verify_java_version, '1.7')))
+ checks.append(run_and_print('Checking java mvn version... ', partial(verify_mvn_java_version, '1.7', MVN)))
+
+ if check_only:
+ sys.exit(0)
+
+ if False in checks:
+ print("Exiting due to failing checks")
+ sys.exit(0)
if __name__ == '__main__':
- parser = argparse.ArgumentParser(description='Builds and publishes a Elasticsearch Plugin Release')
- parser.add_argument('--branch', '-b', metavar='master', default=get_current_branch(),
- help='The branch to release from. Defaults to the current branch.')
- parser.add_argument('--skiptests', '-t', dest='tests', action='store_false',
- help='Skips tests before release. Tests are run by default.')
- parser.set_defaults(tests=True)
- parser.add_argument('--remote', '-r', metavar='origin', default='origin',
- help='The remote to push the release commit and tag to. Default is [origin]')
- parser.add_argument('--publish', '-p', dest='dryrun', action='store_false',
- help='Publishes the release. Disable by default.')
- parser.add_argument('--disable_mail', '-dm', dest='mail', action='store_false',
- help='Do not send a release email. Email is sent by default.')
+ parser = argparse.ArgumentParser(description='Builds and publishes a Elasticsearch Release')
+ parser.add_argument('--branch', '-b', metavar='RELEASE_BRANCH', default=get_current_branch(),
+ help='The branch to release from. Defaults to the current branch.')
+ parser.add_argument('--cpus', '-c', metavar='1', default=1,
+ help='The number of cpus to use for running the test. Default is [1]')
+ parser.add_argument('--skiptests', '-t', dest='tests', action='store_false',
+ help='Skips tests before release. Tests are run by default.')
+ parser.set_defaults(tests=True)
+ parser.add_argument('--remote', '-r', metavar='origin', default='origin',
+ help='The remote to push the release commit and tag to. Default is [origin]')
+ parser.add_argument('--publish', '-d', dest='dryrun', action='store_false',
+ help='Publishes the release. Disable by default.')
+ parser.add_argument('--smoke', '-s', dest='smoke', default='',
+ help='Smoke tests the given release')
+ parser.add_argument('--bwc', '-w', dest='bwc', metavar='backwards', default='backwards',
+ help='Backwards compatibility version path to use to run compatibility tests against')
+ parser.add_argument('--check-only', dest='check_only', action='store_true',
+ help='Checks and reports for all requirements and then exits')
- parser.set_defaults(dryrun=True)
- parser.set_defaults(mail=True)
- args = parser.parse_args()
+ parser.set_defaults(dryrun=True)
+ parser.set_defaults(smoke=None)
+ parser.set_defaults(check_only=False)
+ args = parser.parse_args()
+ bwc_path = args.bwc
+ src_branch = args.branch
+ remote = args.remote
+ run_tests = args.tests
+ dry_run = args.dryrun
+ cpus = args.cpus
+ build = not args.smoke
+ smoke_test_version = args.smoke
- src_branch = args.branch
- remote = args.remote
- run_tests = args.tests
- dry_run = args.dryrun
- mail = args.mail
+ check_environment_and_commandline_tools(args.check_only)
- if src_branch == 'master':
- raise RuntimeError('Can not release the master branch. You need to create another branch before a release')
+ # we print a notice if we can not find the relevant infos in the ~/.m2/settings.xml
+ print_sonatype_notice()
- if not dry_run:
- print('WARNING: dryrun is set to "false" - this will push and publish the release')
- input('Press Enter to continue...')
+ # we require to build with 1.7
+ verify_java_version('1.7')
+ verify_mvn_java_version('1.7', MVN)
- print(''.join(['-' for _ in range(80)]))
- print('Preparing Release from branch [%s] running tests: [%s] dryrun: [%s]' % (src_branch, run_tests, dry_run))
- print(' JAVA_HOME is [%s]' % JAVA_HOME)
- print(' Running with maven command: [%s] ' % (MVN))
+ if os.path.exists(LOG):
+ raise RuntimeError('please remove old release log %s first' % LOG)
+ if not dry_run:
+ print('WARNING: dryrun is set to "false" - this will push and publish the release')
+ input('Press Enter to continue...')
+
+ print(''.join(['-' for _ in range(80)]))
+ print('Preparing Release from branch [%s] running tests: [%s] dryrun: [%s]' % (src_branch, run_tests, dry_run))
+ print(' JAVA_HOME is [%s]' % JAVA_HOME)
+ print(' Running with maven command: [%s] ' % (MVN))
+ if build:
+ check_norelease(path='src')
+ ensure_checkout_is_clean(src_branch)
+ verify_lucene_version()
release_version = find_release_version(src_branch)
- artifact_id = find_from_pom('artifactId')
- artifact_name = find_from_pom('name')
- artifact_description = find_from_pom('description')
- elasticsearch_version = find_from_pom('elasticsearch.version')
- print(' Artifact Id: [%s]' % artifact_id)
- print(' Release version: [%s]' % release_version)
- print(' Elasticsearch: [%s]' % elasticsearch_version)
- if elasticsearch_version.find('-SNAPSHOT') != -1:
- raise RuntimeError('Can not release with a SNAPSHOT elasticsearch dependency: %s' % elasticsearch_version)
-
- # extract snapshot
- default_snapshot_version = guess_snapshot(release_version)
- snapshot_version = input('Enter next snapshot version [%s]:' % default_snapshot_version)
- snapshot_version = snapshot_version or default_snapshot_version
-
- print(' Next version: [%s-SNAPSHOT]' % snapshot_version)
- print(' Artifact Name: [%s]' % artifact_name)
- print(' Artifact Description: [%s]' % artifact_description)
-
+ ensure_no_open_tickets(release_version)
if not dry_run:
- smoke_test_version = release_version
-
- try:
- git_checkout(src_branch)
- version_hash = get_head_hash()
- run_mvn('clean') # clean the env!
- create_release_branch(remote, src_branch, release_version)
- print(' Created release branch [%s]' % (release_branch(src_branch, release_version)))
- except RuntimeError:
- print('Logs:')
- with open(LOG, 'r') as log_file:
- print(log_file.read())
- sys.exit(-1)
-
+ smoke_test_version = release_version
+ head_hash = get_head_hash()
+ run_mvn('clean') # clean the env!
+ print(' Release version: [%s]' % release_version)
+ create_release_branch(remote, src_branch, release_version)
+ print(' Created release branch [%s]' % (release_branch(release_version)))
success = False
try:
- ########################################
- # Start update process in version branch
- ########################################
- pending_files = [POM_FILE]
- remove_maven_snapshot(POM_FILE, release_version)
- print(' Done removing snapshot version')
- add_pending_files(*pending_files) # expects var args use * to expand
- commit_release(artifact_id, release_version)
- print(' Committed release version [%s]' % release_version)
- print(''.join(['-' for _ in range(80)]))
- print('Building Release candidate')
- input('Press Enter to continue...')
- if not dry_run:
- print(' Running maven builds now and publish to sonatype - run-tests [%s]' % run_tests)
- else:
- print(' Running maven builds now run-tests [%s]' % run_tests)
- build_release(run_tests=run_tests, dry_run=dry_run)
-
- print(''.join(['-' for _ in range(80)]))
-
- print('Finish Release -- dry_run: %s' % dry_run)
- input('Press Enter to continue...')
-
- print(' merge release branch')
- git_merge(src_branch, release_version)
- print(' tag')
- tag_release(release_version)
-
- add_maven_snapshot(POM_FILE, release_version, snapshot_version)
- add_pending_files(*pending_files)
- commit_snapshot()
-
- print(' push to %s %s -- dry_run: %s' % (remote, src_branch, dry_run))
- git_push(remote, src_branch, release_version, dry_run)
-
- pending_msg = """
-Release successful pending steps:
- * close and release sonatype repo: https://oss.sonatype.org/
- * check if the release is there https://oss.sonatype.org/content/repositories/releases/org/elasticsearch/%(artifact_id)s/%(version)s
-"""
- print(pending_msg % {'version': release_version,
- 'artifact_id': artifact_id})
- success = True
+ pending_files = [POM_FILE, VERSION_FILE]
+ remove_maven_snapshot(POM_FILE, release_version)
+ remove_version_snapshot(VERSION_FILE, release_version)
+ print(' Done removing snapshot version')
+ add_pending_files(*pending_files) # expects var args use * to expand
+ commit_release(release_version)
+ pending_files = update_reference_docs(release_version)
+ version_head_hash = None
+ # split commits for docs and version to enable easy cherry-picking
+ if pending_files:
+ add_pending_files(*pending_files) # expects var args use * to expand
+ commit_feature_flags(release_version)
+ version_head_hash = get_head_hash()
+ print(' Committed release version [%s]' % release_version)
+ print(''.join(['-' for _ in range(80)]))
+ print('Building Release candidate')
+ input('Press Enter to continue...')
+ if not dry_run:
+ print(' Running maven builds now and publish to Sonatype - run-tests [%s]' % run_tests)
+ else:
+ print(' Running maven builds now run-tests [%s]' % run_tests)
+ build_release(run_tests=run_tests, dry_run=dry_run, cpus=cpus, bwc_version=find_bwc_version(release_version, bwc_path))
+ artifacts = get_artifacts(release_version)
+ print('Checking if all artifacts contain the same jars')
+ check_artifacts_for_same_jars(artifacts)
+ artifacts_and_checksum = generate_checksums(artifacts)
+ smoke_test_release(release_version, artifacts, get_head_hash(), PLUGINS)
+ print(''.join(['-' for _ in range(80)]))
+ print('Finish Release -- dry_run: %s' % dry_run)
+ input('Press Enter to continue...')
+ print(' merge release branch, tag and push to %s %s -- dry_run: %s' % (remote, src_branch, dry_run))
+ merge_tag_push(remote, src_branch, release_version, dry_run)
+ print(' publish artifacts to S3 -- dry_run: %s' % dry_run)
+ publish_artifacts(artifacts_and_checksum, dry_run=dry_run)
+ print(' Updating package repositories -- dry_run: %s' % dry_run)
+ publish_repositories(src_branch, dry_run=dry_run)
+ cherry_pick_command = '.'
+ if version_head_hash:
+ cherry_pick_command = ' and cherry-pick the documentation changes: \'git cherry-pick %s\' to the development branch' % (version_head_hash)
+ pending_msg = """
+ Release successful pending steps:
+ * create a new vX.Y.Z label on github for the next release, with label color #dddddd (https://github.com/elastic/elasticsearch/labels)
+ * publish the maven artifacts on Sonatype: https://oss.sonatype.org/index.html
+ - here is a guide: http://central.sonatype.org/pages/releasing-the-deployment.html
+ * check if the release is there https://oss.sonatype.org/content/repositories/releases/org/elasticsearch/elasticsearch/%(version)s
+ * announce the release on the website / blog post
+ * tweet about the release
+ * announce the release in the google group/mailinglist
+ * Move to a Snapshot version to the current branch for the next point release%(cherry_pick)s
+ """
+ print(pending_msg % { 'version' : release_version, 'cherry_pick' : cherry_pick_command} )
+ success = True
finally:
- if not success:
- print('Logs:')
- with open(LOG, 'r') as log_file:
- print(log_file.read())
- git_checkout(src_branch)
- run('git reset --hard %s' % version_hash)
- try:
- run('git tag -d v%s' % release_version)
- except RuntimeError:
- pass
- elif dry_run:
- print('End of dry_run')
- input('Press Enter to reset changes...')
- git_checkout(src_branch)
- run('git reset --hard %s' % version_hash)
- run('git tag -d v%s' % release_version)
-
- # we delete this one anyways
- run('git branch -D %s' % (release_branch(src_branch, release_version)))
-
- # Checkout the branch we started from
- git_checkout(src_branch)
+ if not success:
+ run('git reset --hard HEAD')
+ run('git checkout %s' % src_branch)
+ elif dry_run:
+ run('git reset --hard %s' % head_hash)
+ run('git tag -d v%s' % release_version)
+ # we delete this one anyways
+ run('git branch -D %s' % (release_branch(release_version)))
+ else:
+ print("Skipping build - smoketest only against version %s" % smoke_test_version)
+ run_mvn('clean') # clean the env!
+
+ if smoke_test_version:
+ fetch(remote)
+ download_and_verify(smoke_test_version, artifact_names(smoke_test_version), plugins=PLUGINS)