2014-01-06 16:48:02 -05:00
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
2013-11-02 04:22:05 -04:00
#
# http://www.apache.org/licenses/LICENSE-2.0
#
2014-01-06 16:48:02 -05:00
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
2013-11-02 04:22:05 -04:00
import re
import tempfile
import shutil
import os
import datetime
import json
import time
import sys
import argparse
import hmac
import urllib
2013-11-04 15:11:33 -05:00
import fnmatch
2013-11-05 08:46:06 -05:00
import socket
2013-12-23 04:19:48 -05:00
import urllib . request
2014-10-02 06:31:45 -04:00
import subprocess
2013-12-23 04:19:48 -05:00
2013-11-02 04:22:05 -04:00
from http . client import HTTPConnection
2014-04-14 12:24:42 -04:00
from http . client import HTTPSConnection
2013-11-02 04:22:05 -04:00
2013-11-04 09:59:14 -05:00
"""
This tool builds a release from the a given elasticsearch branch .
In order to execute it go in the top level directory and run :
$ python3 dev_tools / build_release . py - - branch 0.90 - - publish - - remote origin
By default this script runs in ' dry ' mode which essentially simulates a release . If the
' --publish ' option is set the actual release is done . The script takes over almost all
steps necessary for a release from a high level point of view it does the following things :
2014-02-26 14:33:27 -05:00
- run prerequisit checks ie . check for Java 1.7 being presend or S3 credentials available as env variables
2013-11-04 09:59:14 -05:00
- detect the version to release from the specified branch ( - - branch ) or the current branch
- creates a release branch & updates pom . xml and Version . java to point to a release version rather than a snapshot
- builds the artifacts and runs smoke - tests on the build zip & tar . gz files
- commits the new version and merges the release branch into the source branch
- creates a tag and pushes the commit to the specified origin ( - - remote )
2014-09-29 04:09:13 -04:00
- publishes the releases to Sonatype and S3
2013-11-04 09:59:14 -05:00
Once it ' s done it will print all the remaining steps.
2013-11-05 17:41:18 -05:00
Prerequisites :
- Python 3 k for script execution
- Boto for S3 Upload ( $ apt - get install python - boto )
- RPM for RPM building ( $ apt - get install rpm )
- S3 keys exported via ENV Variables ( AWS_ACCESS_KEY_ID , AWS_SECRET_ACCESS_KEY )
2013-11-04 09:59:14 -05:00
"""
2013-12-02 10:16:17 -05:00
env = os . environ
2013-11-04 09:59:14 -05:00
2015-02-13 18:03:12 -05:00
PLUGINS = [ ( ' license ' , ' elasticsearch/license/latest ' ) ,
( ' marvel ' , ' elasticsearch/marvel/latest ' ) ,
( ' bigdesk ' , ' lukas-vlcek/bigdesk ' ) ,
2014-01-14 10:11:45 -05:00
( ' paramedic ' , ' karmi/elasticsearch-paramedic ' ) ,
( ' segmentspy ' , ' polyfractal/elasticsearch-segmentspy ' ) ,
( ' inquisitor ' , ' polyfractal/elasticsearch-inquisitor ' ) ,
( ' head ' , ' mobz/elasticsearch-head ' ) ]
2013-12-02 10:16:17 -05:00
LOG = env . get ( ' ES_RELEASE_LOG ' , ' /tmp/elasticsearch_release.log ' )
2013-11-02 04:22:05 -04:00
def log ( msg ) :
2013-11-05 08:46:06 -05:00
log_plain ( ' \n %s ' % msg )
def log_plain ( msg ) :
2013-11-02 04:22:05 -04:00
f = open ( LOG , mode = ' ab ' )
f . write ( msg . encode ( ' utf-8 ' ) )
f . close ( )
def run ( command , quiet = False ) :
2013-11-05 08:46:06 -05:00
log ( ' %s : RUN: %s \n ' % ( datetime . datetime . now ( ) , command ) )
2013-11-02 04:22:05 -04:00
if os . system ( ' %s >> %s 2>&1 ' % ( command , LOG ) ) :
msg = ' FAILED: %s [see log %s ] ' % ( command , LOG )
if not quiet :
print ( msg )
raise RuntimeError ( msg )
try :
JAVA_HOME = env [ ' JAVA_HOME ' ]
except KeyError :
raise RuntimeError ( """
Please set JAVA_HOME in the env before running release tool
2014-02-26 14:33:27 -05:00
On OSX use : export JAVA_HOME = ` / usr / libexec / java_home - v ' 1.7* ' ` """ )
2013-11-02 04:22:05 -04:00
try :
2014-02-26 14:33:27 -05:00
JAVA_HOME = env [ ' JAVA7_HOME ' ]
2013-11-02 04:22:05 -04:00
except KeyError :
2014-02-26 14:33:27 -05:00
pass #no JAVA7_HOME - we rely on JAVA_HOME
2013-11-02 04:22:05 -04:00
try :
# make sure mvn3 is used if mvn3 is available
# some systems use maven 2 as default
2014-10-02 07:35:38 -04:00
subprocess . check_output ( ' mvn3 --version ' , shell = True , stderr = subprocess . STDOUT )
MVN = ' mvn3 '
except subprocess . CalledProcessError :
MVN = ' mvn '
2013-11-02 04:22:05 -04:00
def java_exe ( ) :
path = JAVA_HOME
return ' export JAVA_HOME= " %s " PATH= " %s /bin:$PATH " JAVACMD= " %s /bin/java " ' % ( path , path , path )
def verify_java_version ( version ) :
s = os . popen ( ' %s ; java -version 2>&1 ' % java_exe ( ) ) . read ( )
2014-10-02 06:31:45 -04:00
if ' version " %s . ' % version not in s :
2013-11-02 04:22:05 -04:00
raise RuntimeError ( ' got wrong version for java %s : \n %s ' % ( version , s ) )
2014-02-26 14:33:27 -05:00
# Verifies the java version. We guarantee that we run with Java 1.7
# If 1.7 is not available fail the build!
2013-11-02 04:22:05 -04:00
def verify_mvn_java_version ( version , mvn ) :
s = os . popen ( ' %s ; %s --version 2>&1 ' % ( java_exe ( ) , mvn ) ) . read ( )
2014-10-02 06:31:45 -04:00
if ' Java version: %s ' % version not in s :
2013-11-02 04:22:05 -04:00
raise RuntimeError ( ' got wrong java version for %s %s : \n %s ' % ( mvn , version , s ) )
2013-11-04 15:11:33 -05:00
# Returns the hash of the current git HEAD revision
2013-11-02 04:22:05 -04:00
def get_head_hash ( ) :
2014-01-14 10:11:45 -05:00
return os . popen ( ' git rev-parse --verify HEAD 2>&1 ' ) . read ( ) . strip ( )
2013-12-23 04:19:48 -05:00
2014-01-14 10:11:45 -05:00
# Returns the hash of the given tag revision
def get_tag_hash ( tag ) :
return os . popen ( ' git show-ref --tags %s --hash 2>&1 ' % ( tag ) ) . read ( ) . strip ( )
2013-11-02 04:22:05 -04:00
2013-11-04 15:11:33 -05:00
# Returns the name of the current branch
2013-11-02 04:22:05 -04:00
def get_current_branch ( ) :
return os . popen ( ' git rev-parse --abbrev-ref HEAD 2>&1 ' ) . read ( ) . strip ( )
2014-02-26 14:33:27 -05:00
verify_java_version ( ' 1.7 ' ) # we require to build with 1.7
verify_mvn_java_version ( ' 1.7 ' , MVN )
2013-11-02 04:22:05 -04:00
2013-11-04 15:11:33 -05:00
# Utility that returns the name of the release branch for a given version
def release_branch ( version ) :
return ' release_branch_ %s ' % version
2013-11-02 04:22:05 -04:00
2014-01-14 10:11:45 -05:00
# runs get fetch on the given remote
def fetch ( remote ) :
run ( ' git fetch %s ' % remote )
2013-11-04 15:11:33 -05:00
# Creates a new release branch from the given source branch
# and rebases the source branch from the remote before creating
# the release branch. Note: This fails if the source branch
# doesn't exist on the provided remote.
2013-11-02 04:22:05 -04:00
def create_release_branch ( remote , src_branch , release ) :
2013-11-04 15:11:33 -05:00
run ( ' git checkout %s ' % src_branch )
2013-11-02 04:22:05 -04:00
run ( ' git pull --rebase %s %s ' % ( remote , src_branch ) )
run ( ' git checkout -b %s ' % ( release_branch ( release ) ) )
2013-11-04 15:11:33 -05:00
# Reads the given file and applies the
# callback to it. If the callback changed
# a line the given file is replaced with
# the modified input.
2013-11-02 04:22:05 -04:00
def process_file ( file_path , line_callback ) :
fh , abs_path = tempfile . mkstemp ( )
2013-11-04 15:11:33 -05:00
modified = False
2014-01-10 04:11:16 -05:00
with open ( abs_path , ' w ' , encoding = ' utf-8 ' ) as new_file :
with open ( file_path , encoding = ' utf-8 ' ) as old_file :
2013-11-02 04:22:05 -04:00
for line in old_file :
2013-11-04 15:11:33 -05:00
new_line = line_callback ( line )
modified = modified or ( new_line != line )
new_file . write ( new_line )
2013-11-04 09:59:14 -05:00
os . close ( fh )
2013-11-04 15:11:33 -05:00
if modified :
#Remove original file
os . remove ( file_path )
#Move new file
shutil . move ( abs_path , file_path )
return True
else :
# nothing to do - just remove the tmp file
os . remove ( abs_path )
return False
# Walks the given directory path (defaults to 'docs')
# and replaces all 'coming[$version]' tags with
# 'added[$version]'. This method only accesses asciidoc files.
def update_reference_docs ( release_version , path = ' docs ' ) :
pattern = ' coming[ %s ' % ( release_version )
replacement = ' added[ %s ' % ( release_version )
pending_files = [ ]
def callback ( line ) :
return line . replace ( pattern , replacement )
for root , _ , file_names in os . walk ( path ) :
for file_name in fnmatch . filter ( file_names , ' *.asciidoc ' ) :
full_path = os . path . join ( root , file_name )
if process_file ( full_path , callback ) :
pending_files . append ( os . path . join ( root , file_name ) )
return pending_files
# Moves the pom.xml file from a snapshot to a release
2013-11-02 04:22:05 -04:00
def remove_maven_snapshot ( pom , release ) :
pattern = ' <version> %s -SNAPSHOT</version> ' % ( release )
replacement = ' <version> %s </version> ' % ( release )
def callback ( line ) :
return line . replace ( pattern , replacement )
process_file ( pom , callback )
2013-11-04 15:11:33 -05:00
# Moves the Version.java file from a snapshot to a release
2013-11-02 04:22:05 -04:00
def remove_version_snapshot ( version_file , release ) :
2014-02-17 12:46:51 -05:00
# 1.0.0.Beta1 -> 1_0_0_Beta1
2013-11-02 04:22:05 -04:00
release = release . replace ( ' . ' , ' _ ' )
pattern = ' new Version(V_ %s _ID, true ' % ( release )
replacement = ' new Version(V_ %s _ID, false ' % ( release )
def callback ( line ) :
return line . replace ( pattern , replacement )
process_file ( version_file , callback )
2013-11-04 15:11:33 -05:00
# Stages the given files for the next git commit
2013-11-02 04:22:05 -04:00
def add_pending_files ( * files ) :
for file in files :
run ( ' git add %s ' % ( file ) )
2013-11-04 15:11:33 -05:00
# Executes a git commit with 'release [version]' as the commit message
2013-11-02 04:22:05 -04:00
def commit_release ( release ) :
run ( ' git commit -m " release [ %s ] " ' % release )
2014-03-25 03:44:01 -04:00
def commit_feature_flags ( release ) :
run ( ' git commit -m " Update Documentation Feature Flags [ %s ] " ' % release )
2013-11-02 04:22:05 -04:00
def tag_release ( release ) :
run ( ' git tag -a v %s -m " Tag release version %s " ' % ( release , release ) )
def run_mvn ( * cmd ) :
for c in cmd :
run ( ' %s ; %s %s ' % ( java_exe ( ) , MVN , c ) )
2013-11-04 15:11:33 -05:00
2014-07-22 04:52:08 -04:00
def build_release ( run_tests = False , dry_run = True , cpus = 1 , bwc_version = None ) :
2013-11-02 04:22:05 -04:00
target = ' deploy '
if dry_run :
target = ' package '
if run_tests :
run_mvn ( ' clean ' ,
2013-11-04 08:30:17 -05:00
' test -Dtests.jvms= %s -Des.node.mode=local ' % ( cpus ) ,
' test -Dtests.jvms= %s -Des.node.mode=network ' % ( cpus ) )
2014-07-22 04:52:08 -04:00
if bwc_version :
2014-11-02 18:36:06 -05:00
print ( ' Running Backwards compatibility tests against version [ %s ] ' % ( bwc_version ) )
2014-07-22 04:52:08 -04:00
run_mvn ( ' clean ' , ' test -Dtests.filter=@backwards -Dtests.bwc.version= %s -Dtests.bwc=true -Dtests.jvms=1 ' % bwc_version )
2014-04-16 15:51:53 -04:00
run_mvn ( ' clean test-compile -Dforbidden.test.signatures= " org.apache.lucene.util.LuceneTestCase \ $AwaitsFix @ Please fix all bugs before release " ' )
run_mvn ( ' clean %s -DskipTests ' % ( target ) )
2013-11-05 17:41:18 -05:00
success = False
2013-11-05 08:46:06 -05:00
try :
run_mvn ( ' -DskipTests rpm:rpm ' )
2013-11-05 17:41:18 -05:00
success = True
finally :
if not success :
print ( """
RPM Bulding failed make sure " rpm " tools are installed .
Use on of the following commands to install :
$ brew install rpm # on OSX
$ apt - get install rpm # on Ubuntu et.al
""" )
2014-04-14 12:24:42 -04:00
# Uses the github API to fetch open tickets for the given release version
# if it finds any tickets open for that version it will throw an exception
def ensure_no_open_tickets ( version ) :
version = " v %s " % version
conn = HTTPSConnection ( ' api.github.com ' )
try :
log ( ' Checking for open tickets on Github for version %s ' % version )
log ( ' Check if node is available ' )
conn . request ( ' GET ' , ' /repos/elasticsearch/elasticsearch/issues?state=open&labels= %s ' % version , headers = { ' User-Agent ' : ' Elasticsearch version checker ' } )
res = conn . getresponse ( )
if res . status == 200 :
issues = json . loads ( res . read ( ) . decode ( " utf-8 " ) )
if issues :
urls = [ ]
for issue in issues :
2015-02-04 17:31:22 -05:00
urls . append ( issue [ ' html_url ' ] )
raise RuntimeError ( ' Found open issues for release version %s : \n %s ' % ( version , ' \n ' . join ( urls ) ) )
2014-04-14 12:24:42 -04:00
else :
log ( " No open issues found for version %s " % version )
else :
raise RuntimeError ( ' Failed to fetch issue list from Github for release version %s ' % version )
except socket . error as e :
log ( " Failed to fetch issue list from Github for release version %s ' % version - Exception: [ %s ] " % ( version , e ) )
#that is ok it might not be there yet
finally :
conn . close ( )
2013-11-02 04:22:05 -04:00
def wait_for_node_startup ( host = ' 127.0.0.1 ' , port = 9200 , timeout = 15 ) :
for _ in range ( timeout ) :
2013-12-23 04:19:48 -05:00
conn = HTTPConnection ( host , port , timeout )
2013-11-02 04:22:05 -04:00
try :
2013-11-04 15:11:33 -05:00
log ( ' Waiting until node becomes available for 1 second ' )
2013-11-02 04:22:05 -04:00
time . sleep ( 1 )
2013-11-04 15:11:33 -05:00
log ( ' Check if node is available ' )
2013-11-02 04:22:05 -04:00
conn . request ( ' GET ' , ' ' )
res = conn . getresponse ( )
if res . status == 200 :
return True
2013-11-05 08:46:06 -05:00
except socket . error as e :
log ( " Failed while waiting for node - Exception: [ %s ] " % e )
2013-11-04 15:11:33 -05:00
#that is ok it might not be there yet
2013-11-05 08:46:06 -05:00
finally :
conn . close ( )
2013-11-02 04:22:05 -04:00
return False
2014-09-24 13:48:49 -04:00
# Ensures we are using a true Lucene release, not a snapshot build:
def verify_lucene_version ( ) :
s = open ( ' pom.xml ' , encoding = ' utf-8 ' ) . read ( )
2014-10-02 06:31:45 -04:00
if ' download.elasticsearch.org/lucenesnapshots ' in s :
2014-09-24 13:48:49 -04:00
raise RuntimeError ( ' pom.xml contains download.elasticsearch.org/lucenesnapshots repository: remove that before releasing ' )
m = re . search ( r ' <lucene.version>(.*?)</lucene.version> ' , s )
if m is None :
raise RuntimeError ( ' unable to locate lucene.version in pom.xml ' )
lucene_version = m . group ( 1 )
m = re . search ( r ' <lucene.maven.version>(.*?)</lucene.maven.version> ' , s )
if m is None :
raise RuntimeError ( ' unable to locate lucene.maven.version in pom.xml ' )
lucene_maven_version = m . group ( 1 )
if lucene_version != lucene_maven_version :
raise RuntimeError ( ' pom.xml is still using a snapshot release of lucene ( %s ): cutover to a real lucene release before releasing ' % lucene_maven_version )
2013-11-04 15:11:33 -05:00
# Checks the pom.xml for the release version.
# This method fails if the pom file has no SNAPSHOT version set ie.
# if the version is already on a release version we fail.
# Returns the next version string ie. 0.90.7
2013-11-02 04:22:05 -04:00
def find_release_version ( src_branch ) :
2013-11-04 15:11:33 -05:00
run ( ' git checkout %s ' % src_branch )
2014-01-10 04:11:16 -05:00
with open ( ' pom.xml ' , encoding = ' utf-8 ' ) as file :
2013-11-02 04:22:05 -04:00
for line in file :
match = re . search ( r ' <version>(.+)-SNAPSHOT</version> ' , line )
if match :
return match . group ( 1 )
2013-11-04 15:11:33 -05:00
raise RuntimeError ( ' Could not find release version in branch %s ' % src_branch )
2013-11-02 04:22:05 -04:00
2014-06-03 10:23:06 -04:00
def artifact_names ( release , path = ' ' ) :
return [ os . path . join ( path , ' elasticsearch- %s . %s ' % ( release , t ) ) for t in [ ' deb ' , ' tar.gz ' , ' zip ' ] ]
2013-12-23 04:19:48 -05:00
2013-11-05 08:46:06 -05:00
def get_artifacts ( release ) :
2014-06-03 10:23:06 -04:00
common_artifacts = artifact_names ( release , ' target/releases/ ' )
for f in common_artifacts :
if not os . path . isfile ( f ) :
raise RuntimeError ( ' Could not find required artifact at %s ' % f )
2013-11-05 08:46:06 -05:00
rpm = os . path . join ( ' target/rpm/elasticsearch/RPMS/noarch/ ' , ' elasticsearch- %s -1.noarch.rpm ' % release )
if os . path . isfile ( rpm ) :
log ( ' RPM [ %s ] contains: ' % rpm )
run ( ' rpm -pqli %s ' % rpm )
# this is an oddness of RPM that is attches -1 so we have to rename it
2014-06-03 10:23:06 -04:00
renamed_rpm = os . path . join ( ' target/rpm/elasticsearch/RPMS/noarch/ ' , ' elasticsearch- %s .noarch.rpm ' % release )
shutil . move ( rpm , renamed_rpm )
2013-11-05 08:46:06 -05:00
common_artifacts . append ( renamed_rpm )
else :
2013-11-05 17:41:18 -05:00
raise RuntimeError ( ' Could not find required artifact at %s ' % rpm )
2013-11-05 08:46:06 -05:00
return common_artifacts
2013-11-02 04:22:05 -04:00
2015-02-19 04:15:51 -05:00
# Checks the jar files in each package
# Barfs if any of the package jar files differ
def check_artifacts_for_same_jars ( artifacts ) :
jars = [ ]
for file in artifacts :
if file . endswith ( ' .zip ' ) :
jars . append ( subprocess . check_output ( " unzip -l %s | grep ' \ .jar$ ' | awk -F ' / ' ' { print $NF } ' | sort " % file , shell = True ) )
if file . endswith ( ' .tar.gz ' ) :
jars . append ( subprocess . check_output ( " tar tzvf %s | grep ' \ .jar$ ' | awk -F ' / ' ' { print $NF } ' | sort " % file , shell = True ) )
if file . endswith ( ' .rpm ' ) :
jars . append ( subprocess . check_output ( " rpm -pqli %s | grep ' \ .jar$ ' | awk -F ' / ' ' { print $NF } ' | sort " % file , shell = True ) )
if file . endswith ( ' .deb ' ) :
jars . append ( subprocess . check_output ( " dpkg -c %s | grep ' \ .jar$ ' | awk -F ' / ' ' { print $NF } ' | sort " % file , shell = True ) )
if len ( set ( jars ) ) != 1 :
raise RuntimeError ( ' JAR contents of packages are not the same, please check the package contents. Use [unzip -l], [tar tzvf], [dpkg -c], [rpm -pqli] to inspect ' )
2013-11-04 15:11:33 -05:00
# Generates sha1 checsums for all files
# and returns the checksum files as well
# as the given files in a list
2014-06-03 10:23:06 -04:00
def generate_checksums ( files ) :
2013-11-02 04:22:05 -04:00
res = [ ]
2014-06-03 10:23:06 -04:00
for release_file in files :
directory = os . path . dirname ( release_file )
file = os . path . basename ( release_file )
checksum_file = ' %s .sha1.txt ' % file
2014-07-22 04:52:08 -04:00
2013-11-02 04:22:05 -04:00
if os . system ( ' cd %s ; shasum %s > %s ' % ( directory , file , checksum_file ) ) :
2014-06-03 10:23:06 -04:00
raise RuntimeError ( ' Failed to generate checksum for file %s ' % release_file )
res = res + [ os . path . join ( directory , checksum_file ) , release_file ]
2013-11-02 04:22:05 -04:00
return res
2014-01-14 10:11:45 -05:00
def download_and_verify ( release , files , plugins = None , base_url = ' https://download.elasticsearch.org/elasticsearch/elasticsearch ' ) :
2013-12-23 04:19:48 -05:00
print ( ' Downloading and verifying release %s from %s ' % ( release , base_url ) )
tmp_dir = tempfile . mkdtemp ( )
try :
downloaded_files = [ ]
for file in files :
name = os . path . basename ( file )
url = ' %s / %s ' % ( base_url , name )
abs_file_path = os . path . join ( tmp_dir , name )
print ( ' Downloading %s ' % ( url ) )
downloaded_files . append ( abs_file_path )
urllib . request . urlretrieve ( url , abs_file_path )
url = ' ' . join ( [ url , ' .sha1.txt ' ] )
checksum_file = os . path . join ( tmp_dir , ' ' . join ( [ abs_file_path , ' .sha1.txt ' ] ) )
urllib . request . urlretrieve ( url , checksum_file )
print ( ' Verifying checksum %s ' % ( checksum_file ) )
run ( ' cd %s && sha1sum -c %s ' % ( tmp_dir , os . path . basename ( checksum_file ) ) )
2014-01-14 10:11:45 -05:00
smoke_test_release ( release , downloaded_files , get_tag_hash ( ' v %s ' % release ) , plugins )
2014-01-14 11:00:32 -05:00
print ( ' SUCCESS ' )
2013-12-23 04:19:48 -05:00
finally :
shutil . rmtree ( tmp_dir )
2014-01-14 10:11:45 -05:00
def smoke_test_release ( release , files , expected_hash , plugins ) :
2013-11-02 04:22:05 -04:00
for release_file in files :
if not os . path . isfile ( release_file ) :
raise RuntimeError ( ' Smoketest failed missing file %s ' % ( release_file ) )
tmp_dir = tempfile . mkdtemp ( )
if release_file . endswith ( ' tar.gz ' ) :
2014-07-22 04:52:08 -04:00
run ( ' tar -xzf %s -C %s ' % ( release_file , tmp_dir ) )
2013-11-02 04:22:05 -04:00
elif release_file . endswith ( ' zip ' ) :
2014-07-22 04:52:08 -04:00
run ( ' unzip %s -d %s ' % ( release_file , tmp_dir ) )
2013-11-02 04:22:05 -04:00
else :
2013-11-05 08:46:06 -05:00
log ( ' Skip SmokeTest for [ %s ] ' % release_file )
2014-07-22 04:52:08 -04:00
continue # nothing to do here
2013-11-02 04:22:05 -04:00
es_run_path = os . path . join ( tmp_dir , ' elasticsearch- %s ' % ( release ) , ' bin/elasticsearch ' )
print ( ' Smoke testing package [ %s ] ' % release_file )
2014-01-14 10:11:45 -05:00
es_plugin_path = os . path . join ( tmp_dir , ' elasticsearch- %s ' % ( release ) , ' bin/plugin ' )
plugin_names = { }
for name , plugin in plugins :
print ( ' Install plugin [ %s ] from [ %s ] ' % ( name , plugin ) )
2014-05-22 05:43:58 -04:00
run ( ' %s ; %s %s %s ' % ( java_exe ( ) , es_plugin_path , ' -install ' , plugin ) )
2014-01-14 10:11:45 -05:00
plugin_names [ name ] = True
if release . startswith ( " 0.90. " ) :
background = ' ' # 0.90.x starts in background automatically
else :
background = ' -d '
2013-11-02 04:22:05 -04:00
print ( ' Starting elasticsearch deamon from [ %s ] ' % os . path . join ( tmp_dir , ' elasticsearch- %s ' % release ) )
2014-05-10 05:16:25 -04:00
run ( ' %s ; %s -Des.node.name=smoke_tester -Des.cluster.name=prepare_release -Des.discovery.zen.ping.multicast.enabled=false -Des.node.bench=true -Des.script.disable_dynamic=false %s '
2014-01-14 10:11:45 -05:00
% ( java_exe ( ) , es_run_path , background ) )
2013-11-02 04:22:05 -04:00
conn = HTTPConnection ( ' 127.0.0.1 ' , 9200 , 20 ) ;
wait_for_node_startup ( )
try :
try :
conn . request ( ' GET ' , ' ' )
res = conn . getresponse ( )
if res . status == 200 :
version = json . loads ( res . read ( ) . decode ( " utf-8 " ) ) [ ' version ' ]
if release != version [ ' number ' ] :
raise RuntimeError ( ' Expected version [ %s ] but was [ %s ] ' % ( release , version [ ' number ' ] ) )
if version [ ' build_snapshot ' ] :
raise RuntimeError ( ' Expected non snapshot version ' )
2013-12-23 04:19:48 -05:00
if version [ ' build_hash ' ] . strip ( ) != expected_hash :
2014-01-14 10:11:45 -05:00
raise RuntimeError ( ' HEAD hash does not match expected [ %s ] but got [ %s ] ' % ( expected_hash , version [ ' build_hash ' ] ) )
2013-12-18 06:04:51 -05:00
print ( ' Running REST Spec tests against package [ %s ] ' % release_file )
2014-03-10 08:48:17 -04:00
run_mvn ( ' test -Dtests.cluster= %s -Dtests.class=*.*RestTests ' % ( " 127.0.0.1:9300 " ) )
2014-01-14 10:11:45 -05:00
print ( ' Verify if plugins are listed in _nodes ' )
conn . request ( ' GET ' , ' /_nodes?plugin=true&pretty=true ' )
res = conn . getresponse ( )
if res . status == 200 :
nodes = json . loads ( res . read ( ) . decode ( " utf-8 " ) ) [ ' nodes ' ]
for _ , node in nodes . items ( ) :
node_plugins = node [ ' plugins ' ]
for node_plugin in node_plugins :
if not plugin_names . get ( node_plugin [ ' name ' ] , False ) :
raise RuntimeError ( ' Unexpeced plugin %s ' % node_plugin [ ' name ' ] )
del plugin_names [ node_plugin [ ' name ' ] ]
if plugin_names :
raise RuntimeError ( ' Plugins not loaded %s ' % list ( plugin_names . keys ( ) ) )
else :
raise RuntimeError ( ' Expected HTTP 200 but got %s ' % res . status )
2013-11-02 04:22:05 -04:00
else :
raise RuntimeError ( ' Expected HTTP 200 but got %s ' % res . status )
finally :
conn . request ( ' POST ' , ' /_cluster/nodes/_local/_shutdown ' )
time . sleep ( 1 ) # give the node some time to shut down
if conn . getresponse ( ) . status != 200 :
raise RuntimeError ( ' Expected HTTP 200 but got %s on node shutdown ' % res . status )
finally :
conn . close ( )
shutil . rmtree ( tmp_dir )
def merge_tag_push ( remote , src_branch , release_version , dry_run ) :
run ( ' git checkout %s ' % src_branch )
run ( ' git merge %s ' % release_branch ( release_version ) )
run ( ' git tag v %s ' % release_version )
if not dry_run :
run ( ' git push %s %s ' % ( remote , src_branch ) ) # push the commit
run ( ' git push %s v %s ' % ( remote , release_version ) ) # push the tag
else :
print ( ' dryrun [True] -- skipping push to remote %s ' % remote )
def publish_artifacts ( artifacts , base = ' elasticsearch/elasticsearch ' , dry_run = True ) :
location = os . path . dirname ( os . path . realpath ( __file__ ) )
for artifact in artifacts :
if dry_run :
print ( ' Skip Uploading %s to Amazon S3 ' % artifact )
else :
print ( ' Uploading %s to Amazon S3 ' % artifact )
# requires boto to be installed but it is not available on python3k yet so we use a dedicated tool
run ( ' python %s /upload-s3.py --file %s ' % ( location , os . path . abspath ( artifact ) ) )
2014-09-29 04:09:13 -04:00
def print_sonatype_notice ( ) :
2013-11-02 04:22:05 -04:00
settings = os . path . join ( os . path . expanduser ( ' ~ ' ) , ' .m2/settings.xml ' )
if os . path . isfile ( settings ) :
2014-01-10 04:11:16 -05:00
with open ( settings , encoding = ' utf-8 ' ) as settings_file :
2013-11-02 04:22:05 -04:00
for line in settings_file :
if line . strip ( ) == ' <id>sonatype-nexus-snapshots</id> ' :
# moving out - we found the indicator no need to print the warning
2014-07-22 04:52:08 -04:00
return
2013-11-02 04:22:05 -04:00
print ( """
2014-09-29 04:09:13 -04:00
NOTE : No sonatype settings detected , make sure you have configured
your sonatype credentials in ' ~/.m2/settings.xml ' :
2013-11-02 04:22:05 -04:00
< settings >
. . .
< servers >
< server >
< id > sonatype - nexus - snapshots < / id >
< username > your - jira - id < / username >
< password > your - jira - pwd < / password >
< / server >
< server >
< id > sonatype - nexus - staging < / id >
< username > your - jira - id < / username >
< password > your - jira - pwd < / password >
< / server >
< / servers >
. . .
< / settings >
""" )
def check_s3_credentials ( ) :
if not env . get ( ' AWS_ACCESS_KEY_ID ' , None ) or not env . get ( ' AWS_SECRET_ACCESS_KEY ' , None ) :
raise RuntimeError ( ' Could not find " AWS_ACCESS_KEY_ID " / " AWS_SECRET_ACCESS_KEY " in the env variables please export in order to upload to S3 ' )
2014-07-22 04:52:08 -04:00
VERSION_FILE = ' src/main/java/org/elasticsearch/Version.java '
2013-11-02 04:22:05 -04:00
POM_FILE = ' pom.xml '
2014-07-22 04:52:08 -04:00
# we print a notice if we can not find the relevant infos in the ~/.m2/settings.xml
2014-09-29 04:09:13 -04:00
print_sonatype_notice ( )
2013-11-04 09:59:14 -05:00
2014-07-22 04:52:08 -04:00
# finds the highest available bwc version to test against
def find_bwc_version ( release_version , bwc_dir = ' backwards ' ) :
2014-07-23 08:54:01 -04:00
log ( ' Lookup bwc version in directory [ %s ] ' % bwc_dir )
2014-07-22 04:52:08 -04:00
bwc_version = None
2014-07-23 08:54:01 -04:00
if os . path . exists ( bwc_dir ) and os . path . isdir ( bwc_dir ) :
max_version = [ int ( x ) for x in release_version . split ( ' . ' ) ]
for dir in os . listdir ( bwc_dir ) :
if os . path . isdir ( os . path . join ( bwc_dir , dir ) ) and dir . startswith ( ' elasticsearch- ' ) :
version = [ int ( x ) for x in dir [ len ( ' elasticsearch- ' ) : ] . split ( ' . ' ) ]
if version < max_version : # bwc tests only against smaller versions
if ( not bwc_version ) or version > [ int ( x ) for x in bwc_version . split ( ' . ' ) ] :
bwc_version = dir [ len ( ' elasticsearch- ' ) : ]
log ( ' Using bwc version [ %s ] ' % bwc_version )
else :
log ( ' bwc directory [ %s ] does not exists or is not a directory - skipping ' % bwc_dir )
2014-07-22 04:52:08 -04:00
return bwc_version
2014-10-02 06:31:45 -04:00
def ensure_checkout_is_clean ( branchName ) :
# Make sure no local mods:
s = subprocess . check_output ( ' git diff --shortstat ' , shell = True )
if len ( s ) > 0 :
raise RuntimeError ( ' git diff --shortstat is non-empty: got: \n %s ' % s )
# Make sure no untracked files:
2014-10-02 08:58:10 -04:00
s = subprocess . check_output ( ' git status ' , shell = True ) . decode ( ' utf-8 ' , errors = ' replace ' )
2014-10-02 06:31:45 -04:00
if ' Untracked files: ' in s :
raise RuntimeError ( ' git status shows untracked files: got: \n %s ' % s )
# Make sure we are on the right branch (NOTE: a bit weak, since we default to current branch):
if ' On branch %s ' % branchName not in s :
raise RuntimeError ( ' git status does not show branch %s : got: \n %s ' % ( branchName , s ) )
# Make sure we have all changes from origin:
if ' is behind ' in s :
raise RuntimeError ( ' git status shows not all changes pulled from origin; try running " git pull origin %s " : got: \n %s ' % ( branchName , s ) )
# Make sure we no local unpushed changes (this is supposed to be a clean area):
if ' is ahead ' in s :
raise RuntimeError ( ' git status shows local commits; try running " git fetch origin " , " git checkout %s " , " git reset --hard origin/ %s " : got: \n %s ' % ( branchName , branchName , s ) )
2013-11-02 04:22:05 -04:00
if __name__ == ' __main__ ' :
parser = argparse . ArgumentParser ( description = ' Builds and publishes a Elasticsearch Release ' )
2014-10-02 06:31:45 -04:00
parser . add_argument ( ' --branch ' , ' -b ' , metavar = ' RELEASE_BRANCH ' , default = get_current_branch ( ) ,
2013-11-02 04:22:05 -04:00
help = ' The branch to release from. Defaults to the current branch. ' )
parser . add_argument ( ' --cpus ' , ' -c ' , metavar = ' 1 ' , default = 1 ,
help = ' The number of cpus to use for running the test. Default is [1] ' )
parser . add_argument ( ' --skiptests ' , ' -t ' , dest = ' tests ' , action = ' store_false ' ,
help = ' Skips tests before release. Tests are run by default. ' )
parser . set_defaults ( tests = True )
parser . add_argument ( ' --remote ' , ' -r ' , metavar = ' origin ' , default = ' origin ' ,
help = ' The remote to push the release commit and tag to. Default is [origin] ' )
parser . add_argument ( ' --publish ' , ' -d ' , dest = ' dryrun ' , action = ' store_false ' ,
help = ' Publishes the release. Disable by default. ' )
2013-12-23 04:19:48 -05:00
parser . add_argument ( ' --smoke ' , ' -s ' , dest = ' smoke ' , default = ' ' ,
help = ' Smoke tests the given release ' )
2014-07-22 04:52:08 -04:00
parser . add_argument ( ' --bwc ' , ' -w ' , dest = ' bwc ' , metavar = ' backwards ' , default = ' backwards ' ,
help = ' Backwards compatibility version path to use to run compatibility tests against ' )
2013-12-23 04:19:48 -05:00
2013-11-02 04:22:05 -04:00
parser . set_defaults ( dryrun = True )
2013-12-23 04:19:48 -05:00
parser . set_defaults ( smoke = None )
2013-11-02 04:22:05 -04:00
args = parser . parse_args ( )
2014-07-22 04:52:08 -04:00
bwc_path = args . bwc
2013-11-02 04:22:05 -04:00
src_branch = args . branch
remote = args . remote
run_tests = args . tests
dry_run = args . dryrun
cpus = args . cpus
2013-12-23 04:19:48 -05:00
build = not args . smoke
smoke_test_version = args . smoke
2014-10-02 06:31:45 -04:00
if os . path . exists ( LOG ) :
raise RuntimeError ( ' please remove old release log %s first ' % LOG )
2013-11-02 04:22:05 -04:00
if not dry_run :
check_s3_credentials ( )
2014-07-22 04:52:08 -04:00
print ( ' WARNING: dryrun is set to " false " - this will push and publish the release ' )
2013-11-02 04:22:05 -04:00
input ( ' Press Enter to continue... ' )
print ( ' ' . join ( [ ' - ' for _ in range ( 80 ) ] ) )
print ( ' Preparing Release from branch [ %s ] running tests: [ %s ] dryrun: [ %s ] ' % ( src_branch , run_tests , dry_run ) )
print ( ' JAVA_HOME is [ %s ] ' % JAVA_HOME )
print ( ' Running with maven command: [ %s ] ' % ( MVN ) )
2013-12-23 04:19:48 -05:00
if build :
2014-10-02 06:31:45 -04:00
ensure_checkout_is_clean ( src_branch )
2014-09-24 13:48:49 -04:00
verify_lucene_version ( )
2014-01-14 10:11:45 -05:00
release_version = find_release_version ( src_branch )
2014-04-14 12:24:42 -04:00
ensure_no_open_tickets ( release_version )
2014-01-14 10:11:45 -05:00
if not dry_run :
smoke_test_version = release_version
2013-12-23 04:19:48 -05:00
head_hash = get_head_hash ( )
run_mvn ( ' clean ' ) # clean the env!
print ( ' Release version: [ %s ] ' % release_version )
create_release_branch ( remote , src_branch , release_version )
print ( ' Created release branch [ %s ] ' % ( release_branch ( release_version ) ) )
success = False
try :
pending_files = [ POM_FILE , VERSION_FILE ]
remove_maven_snapshot ( POM_FILE , release_version )
remove_version_snapshot ( VERSION_FILE , release_version )
print ( ' Done removing snapshot version ' )
add_pending_files ( * pending_files ) # expects var args use * to expand
commit_release ( release_version )
2014-03-25 03:44:01 -04:00
pending_files = update_reference_docs ( release_version )
version_head_hash = None
# split commits for docs and version to enable easy cherry-picking
if pending_files :
add_pending_files ( * pending_files ) # expects var args use * to expand
commit_feature_flags ( release_version )
version_head_hash = get_head_hash ( )
2013-12-23 04:19:48 -05:00
print ( ' Committed release version [ %s ] ' % release_version )
print ( ' ' . join ( [ ' - ' for _ in range ( 80 ) ] ) )
print ( ' Building Release candidate ' )
input ( ' Press Enter to continue... ' )
2014-01-14 10:11:45 -05:00
if not dry_run :
2014-09-29 04:09:13 -04:00
print ( ' Running maven builds now and publish to Sonatype - run-tests [ %s ] ' % run_tests )
2014-01-14 10:11:45 -05:00
else :
print ( ' Running maven builds now run-tests [ %s ] ' % run_tests )
2014-07-22 04:52:08 -04:00
build_release ( run_tests = run_tests , dry_run = dry_run , cpus = cpus , bwc_version = find_bwc_version ( release_version , bwc_path ) )
2013-12-23 04:19:48 -05:00
artifacts = get_artifacts ( release_version )
2015-02-19 04:15:51 -05:00
print ( ' Checking if all artifacts contain the same jars ' )
check_artifacts_for_same_jars ( artifacts )
2013-12-23 04:19:48 -05:00
artifacts_and_checksum = generate_checksums ( artifacts )
2014-01-14 10:11:45 -05:00
smoke_test_release ( release_version , artifacts , get_head_hash ( ) , PLUGINS )
2013-12-23 04:19:48 -05:00
print ( ' ' . join ( [ ' - ' for _ in range ( 80 ) ] ) )
print ( ' Finish Release -- dry_run: %s ' % dry_run )
input ( ' Press Enter to continue... ' )
print ( ' merge release branch, tag and push to %s %s -- dry_run: %s ' % ( remote , src_branch , dry_run ) )
merge_tag_push ( remote , src_branch , release_version , dry_run )
print ( ' publish artifacts to S3 -- dry_run: %s ' % dry_run )
publish_artifacts ( artifacts_and_checksum , dry_run = dry_run )
2014-03-25 03:44:01 -04:00
cherry_pick_command = ' . '
if version_head_hash :
cherry_pick_command = ' and cherry-pick the documentation changes: \' git cherry-pick %s \' to the development branch ' % ( version_head_hash )
2013-12-23 04:19:48 -05:00
pending_msg = """
Release successful pending steps :
2014-10-02 06:31:45 -04:00
* create a new vX . Y . Z label on github for the next release , with label color #dddddd (https://github.com/elasticsearch/elasticsearch/labels)
2014-09-29 04:09:13 -04:00
* publish the maven artifacts on Sonatype : https : / / oss . sonatype . org / index . html
2014-11-06 04:40:07 -05:00
- here is a guide : http : / / central . sonatype . org / pages / releasing - the - deployment . html
2013-12-23 04:19:48 -05:00
* check if the release is there https : / / oss . sonatype . org / content / repositories / releases / org / elasticsearch / elasticsearch / % ( version ) s
* announce the release on the website / blog post
* tweet about the release
2014-01-16 03:43:56 -05:00
* announce the release in the google group / mailinglist
2014-03-25 03:44:01 -04:00
* Move to a Snapshot version to the current branch for the next point release % ( cherry_pick ) s
2013-12-23 04:19:48 -05:00
"""
2014-03-25 03:44:01 -04:00
print ( pending_msg % { ' version ' : release_version , ' cherry_pick ' : cherry_pick_command } )
2013-12-23 04:19:48 -05:00
success = True
finally :
if not success :
run ( ' git reset --hard HEAD ' )
run ( ' git checkout %s ' % src_branch )
elif dry_run :
run ( ' git reset --hard %s ' % head_hash )
run ( ' git tag -d v %s ' % release_version )
# we delete this one anyways
run ( ' git branch -D %s ' % ( release_branch ( release_version ) ) )
2014-01-14 10:11:45 -05:00
else :
print ( " Skipping build - smoketest only against version %s " % smoke_test_version )
2014-01-14 11:00:32 -05:00
run_mvn ( ' clean ' ) # clean the env!
2014-01-14 10:11:45 -05:00
2013-12-23 04:19:48 -05:00
if smoke_test_version :
2014-01-14 10:11:45 -05:00
fetch ( remote )
download_and_verify ( smoke_test_version , artifact_names ( smoke_test_version ) , plugins = PLUGINS )