mirror of https://github.com/apache/lucene.git
LUCENE-3966: smoke tester can test from local (file://) urls, handle url-shortened base URLs, and build/stage/test unsigned artifacts
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1311067 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6350ed1b8a
commit
879e825083
|
@ -19,17 +19,15 @@ import shutil
|
|||
import os
|
||||
import sys
|
||||
|
||||
# Usage: python -u buildRelease.py /path/to/checkout version(eg: 3.4.0) gpgKey(eg: 6E68DA61) rcNum user [-prepare] [-push]
|
||||
# Usage: python -u buildRelease.py [-sign gpgKey(eg: 6E68DA61)] [-prepare] [-push userName] [-pushLocal dirName] [-smoke tmpDir] /path/to/checkout version(eg: 3.4.0) rcNum(eg: 0)
|
||||
#
|
||||
# EG: python -u buildRelease.py -prepare -push /lucene/34x 3.4.0 6E68DA61 1 mikemccand
|
||||
#
|
||||
# EG: python -u buildRelease.py -prepare -push -sign 6E68DA61 mikemccand /lucene/34x 3.4.0 0
|
||||
|
||||
# TODO: also run smokeTestRelease.py?
|
||||
|
||||
# NOTE: you have to type in your gpg password at some point while this
|
||||
# runs; it's VERY confusing because the output is directed to
|
||||
# /tmp/release.log, so, you have to tail that and when GPG wants your
|
||||
# password, type it!
|
||||
# NOTE: if you specify -sign, you have to type in your gpg password at
|
||||
# some point while this runs; it's VERY confusing because the output
|
||||
# is directed to /tmp/release.log, so, you have to tail that and when
|
||||
# GPG wants your password, type it! Also sometimes you have to type
|
||||
# it twice in a row!
|
||||
|
||||
LOG = '/tmp/release.log'
|
||||
|
||||
|
@ -70,7 +68,7 @@ def getSVNRev():
|
|||
return rev
|
||||
|
||||
|
||||
def prepare(root, version, gpgKeyID):
|
||||
def prepare(root, version, gpgKeyID, doTest):
|
||||
print
|
||||
print 'Prepare release...'
|
||||
if os.path.exists(LOG):
|
||||
|
@ -84,8 +82,10 @@ def prepare(root, version, gpgKeyID):
|
|||
print ' svn rev: %s' % rev
|
||||
log('\nSVN rev: %s\n' % rev)
|
||||
|
||||
print ' ant clean test'
|
||||
run('ant clean test')
|
||||
if doTest:
|
||||
# Don't run tests if we are gonna smoke test after the release...
|
||||
print ' ant clean test'
|
||||
run('ant clean test')
|
||||
|
||||
print ' clean checkout'
|
||||
scrubCheckout()
|
||||
|
@ -93,10 +93,21 @@ def prepare(root, version, gpgKeyID):
|
|||
|
||||
print ' lucene prepare-release'
|
||||
os.chdir('lucene')
|
||||
run('ant -Dversion=%s -Dspecversion=%s -Dgpg.key=%s prepare-release' % (version, version, gpgKeyID))
|
||||
cmd = 'ant -Dversion=%s -Dspecversion=%s' % (version, version)
|
||||
if gpgKeyID is not None:
|
||||
cmd += ' -Dgpg.key=%s prepare-release' % gpgKeyID
|
||||
else:
|
||||
cmd += ' prepare-release-no-sign'
|
||||
run(cmd)
|
||||
|
||||
print ' solr prepare-release'
|
||||
os.chdir('../solr')
|
||||
run('ant -Dversion=%s -Dspecversion=%s -Dgpg.key=%s prepare-release' % (version, version, gpgKeyID))
|
||||
cmd = 'ant -Dversion=%s -Dspecversion=%s' % (version, version)
|
||||
if gpgKeyID is not None:
|
||||
cmd += ' -Dgpg.key=%s prepare-release' % gpgKeyID
|
||||
else:
|
||||
cmd += ' prepare-release-no-sign'
|
||||
run(cmd)
|
||||
print ' done!'
|
||||
print
|
||||
return rev
|
||||
|
@ -149,30 +160,134 @@ def push(version, root, rev, rcNum, username):
|
|||
print ' chmod...'
|
||||
run('ssh %s@people.apache.org "chmod -R a+rX-w public_html/staging_area/%s"' % (username, dir))
|
||||
|
||||
print ' done! URL: https://people.apache.org/~%s/staging_area/%s' % (username, dir)
|
||||
print ' done!'
|
||||
url = 'https://people.apache.org/~%s/staging_area/%s' % (username, dir)
|
||||
return url
|
||||
|
||||
def pushLocal(version, root, rev, rcNum, localDir):
|
||||
print 'Push local [%s]...' % localDir
|
||||
os.makedirs(localDir)
|
||||
|
||||
dir = 'lucene-solr-%s-RC%d-rev%s' % (version, rcNum, rev)
|
||||
os.makedirs('%s/%s/lucene' % (localDir, dir))
|
||||
os.makedirs('%s/%s/solr' % (localDir, dir))
|
||||
print ' Lucene'
|
||||
os.chdir('%s/lucene/dist' % root)
|
||||
print ' zip...'
|
||||
if os.path.exists('lucene.tar.bz2'):
|
||||
os.remove('lucene.tar.bz2')
|
||||
run('tar cjf lucene.tar.bz2 *')
|
||||
|
||||
os.chdir('%s/%s/lucene' % (localDir, dir))
|
||||
print ' unzip...'
|
||||
run('tar xjf "%s/lucene/dist/lucene.tar.bz2"' % root)
|
||||
os.remove('%s/lucene/dist/lucene.tar.bz2' % root)
|
||||
print ' copy changes...'
|
||||
run('cp -r "%s/lucene/build/docs/changes" changes-%s' % (root, version))
|
||||
|
||||
print ' Solr'
|
||||
os.chdir('%s/solr/package' % root)
|
||||
print ' zip...'
|
||||
if os.path.exists('solr.tar.bz2'):
|
||||
os.remove('solr.tar.bz2')
|
||||
run('tar cjf solr.tar.bz2 *')
|
||||
print ' unzip...'
|
||||
os.chdir('%s/%s/solr' % (localDir, dir))
|
||||
run('tar xjf "%s/solr/package/solr.tar.bz2"' % root)
|
||||
os.remove('%s/solr/package/solr.tar.bz2' % root)
|
||||
|
||||
print ' KEYS'
|
||||
run('wget http://people.apache.org/keys/group/lucene.asc')
|
||||
os.rename('lucene.asc', 'KEYS')
|
||||
run('chmod a+r-w KEYS')
|
||||
run('cp KEYS ../lucene')
|
||||
|
||||
print ' chmod...'
|
||||
os.chdir('..')
|
||||
run('chmod -R a+rX-w .')
|
||||
|
||||
print ' done!'
|
||||
return 'file://%s/%s' % (os.path.abspath(localDir), dir)
|
||||
|
||||
def main():
|
||||
doPrepare = '-prepare' in sys.argv
|
||||
if doPrepare:
|
||||
sys.argv.remove('-prepare')
|
||||
doPush = '-push' in sys.argv
|
||||
if doPush:
|
||||
sys.argv.remove('-push')
|
||||
|
||||
try:
|
||||
idx = sys.argv.index('-push')
|
||||
except ValueError:
|
||||
doPushRemote = False
|
||||
else:
|
||||
doPushRemote = True
|
||||
username = sys.argv[idx+1]
|
||||
del sys.argv[idx:idx+2]
|
||||
|
||||
try:
|
||||
idx = sys.argv.index('-smoke')
|
||||
except ValueError:
|
||||
smokeTmpDir = None
|
||||
else:
|
||||
smokeTmpDir = sys.argv[idx+1]
|
||||
del sys.argv[idx:idx+2]
|
||||
if os.path.exists(smokeTmpDir):
|
||||
print
|
||||
print 'ERROR: smoke tmpDir "%s" exists; please remove first' % smokeTmpDir
|
||||
print
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
idx = sys.argv.index('-pushLocal')
|
||||
except ValueError:
|
||||
doPushLocal = False
|
||||
else:
|
||||
doPushLocal = True
|
||||
localStagingDir = sys.argv[idx+1]
|
||||
del sys.argv[idx:idx+2]
|
||||
if os.path.exists(localStagingDir):
|
||||
print
|
||||
print 'ERROR: pushLocal dir "%s" exists; please remove first' % localStagingDir
|
||||
print
|
||||
sys.exit(1)
|
||||
|
||||
if doPushRemote and doPushLocal:
|
||||
print
|
||||
print 'ERROR: specify at most one of -push or -pushLocal (got both)'
|
||||
print
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
idx = sys.argv.index('-sign')
|
||||
except ValueError:
|
||||
gpgKeyID = None
|
||||
else:
|
||||
gpgKeyID = sys.argv[idx+1]
|
||||
del sys.argv[idx:idx+2]
|
||||
|
||||
root = os.path.abspath(sys.argv[1])
|
||||
version = sys.argv[2]
|
||||
gpgKeyID = sys.argv[3]
|
||||
rcNum = int(sys.argv[4])
|
||||
username = sys.argv[5]
|
||||
rcNum = int(sys.argv[3])
|
||||
|
||||
if doPrepare:
|
||||
rev = prepare(root, version, gpgKeyID)
|
||||
rev = prepare(root, version, gpgKeyID, smokeTmpDir is None)
|
||||
else:
|
||||
os.chdir(root)
|
||||
rev = open('rev.txt').read()
|
||||
|
||||
if doPush:
|
||||
push(version, root, rev, rcNum, username)
|
||||
if doPushRemote:
|
||||
url = push(version, root, rev, rcNum, username)
|
||||
elif doPushLocal:
|
||||
url = pushLocal(version, root, rev, rcNum, localStagingDir)
|
||||
else:
|
||||
url = NOne
|
||||
|
||||
if url is not None:
|
||||
print ' URL: %s' % url
|
||||
|
||||
if smokeTmpDir is not None:
|
||||
import smokeTestRelease
|
||||
smokeTestRelease.DEBUG = False
|
||||
smokeTestRelease.smokeTest(url, version, smokeTmpDir, gpgKeyID is not None)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -36,6 +36,16 @@ import checkJavaDocs
|
|||
# must have a working gpg, tar, unzip in your path. This has been
|
||||
# tested on Linux and on Cygwin under Windows 7.
|
||||
|
||||
def unshortenURL(url):
|
||||
parsed = urlparse.urlparse(url)
|
||||
if parsed[0] in ('http', 'https'):
|
||||
h = httplib.HTTPConnection(parsed.netloc)
|
||||
h.request('HEAD', parsed.path)
|
||||
response = h.getresponse()
|
||||
if response.status/100 == 3 and response.getheader('Location'):
|
||||
return response.getheader('Location')
|
||||
return url
|
||||
|
||||
def javaExe(version):
|
||||
if version == '1.5':
|
||||
path = JAVA5_HOME
|
||||
|
@ -143,7 +153,7 @@ def download(name, urlString, tmpDir, quiet=False):
|
|||
def load(urlString):
|
||||
return urllib2.urlopen(urlString).read()
|
||||
|
||||
def checkSigs(project, urlString, version, tmpDir):
|
||||
def checkSigs(project, urlString, version, tmpDir, isSigned):
|
||||
|
||||
print ' test basics...'
|
||||
ents = getDirEntries(urlString)
|
||||
|
@ -151,7 +161,11 @@ def checkSigs(project, urlString, version, tmpDir):
|
|||
keysURL = None
|
||||
changesURL = None
|
||||
mavenURL = None
|
||||
expectedSigs = ['asc', 'md5', 'sha1']
|
||||
expectedSigs = []
|
||||
if isSigned:
|
||||
expectedSigs.append('asc')
|
||||
expectedSigs.extend(['md5', 'sha1'])
|
||||
|
||||
artifacts = []
|
||||
for text, subURL in ents:
|
||||
if text == 'KEYS':
|
||||
|
@ -239,34 +253,35 @@ def checkSigs(project, urlString, version, tmpDir):
|
|||
download(artifact, urlString, tmpDir)
|
||||
verifyDigests(artifact, urlString, tmpDir)
|
||||
|
||||
print ' verify sig'
|
||||
# Test sig (this is done with a clean brand-new GPG world)
|
||||
download(artifact + '.asc', urlString + '.asc', tmpDir)
|
||||
sigFile = '%s/%s.asc' % (tmpDir, artifact)
|
||||
artifactFile = '%s/%s' % (tmpDir, artifact)
|
||||
logFile = '%s/%s.%s.gpg.verify.log' % (tmpDir, project, artifact)
|
||||
run('gpg --homedir %s --verify %s %s' % (gpgHomeDir, sigFile, artifactFile),
|
||||
logFile)
|
||||
# Forward any GPG warnings, except the expected one (since its a clean world)
|
||||
f = open(logFile, 'rb')
|
||||
for line in f.readlines():
|
||||
if line.lower().find('warning') != -1 \
|
||||
and line.find('WARNING: This key is not certified with a trusted signature') == -1:
|
||||
print ' GPG: %s' % line.strip()
|
||||
f.close()
|
||||
if isSigned:
|
||||
print ' verify sig'
|
||||
# Test sig (this is done with a clean brand-new GPG world)
|
||||
download(artifact + '.asc', urlString + '.asc', tmpDir)
|
||||
sigFile = '%s/%s.asc' % (tmpDir, artifact)
|
||||
artifactFile = '%s/%s' % (tmpDir, artifact)
|
||||
logFile = '%s/%s.%s.gpg.verify.log' % (tmpDir, project, artifact)
|
||||
run('gpg --homedir %s --verify %s %s' % (gpgHomeDir, sigFile, artifactFile),
|
||||
logFile)
|
||||
# Forward any GPG warnings, except the expected one (since its a clean world)
|
||||
f = open(logFile, 'rb')
|
||||
for line in f.readlines():
|
||||
if line.lower().find('warning') != -1 \
|
||||
and line.find('WARNING: This key is not certified with a trusted signature') == -1:
|
||||
print ' GPG: %s' % line.strip()
|
||||
f.close()
|
||||
|
||||
# Test trust (this is done with the real users config)
|
||||
run('gpg --import %s' % (keysFile),
|
||||
'%s/%s.gpg.trust.import.log 2>&1' % (tmpDir, project))
|
||||
print ' verify trust'
|
||||
logFile = '%s/%s.%s.gpg.trust.log' % (tmpDir, project, artifact)
|
||||
run('gpg --verify %s %s' % (sigFile, artifactFile), logFile)
|
||||
# Forward any GPG warnings:
|
||||
f = open(logFile, 'rb')
|
||||
for line in f.readlines():
|
||||
if line.lower().find('warning') != -1:
|
||||
print ' GPG: %s' % line.strip()
|
||||
f.close()
|
||||
# Test trust (this is done with the real users config)
|
||||
run('gpg --import %s' % (keysFile),
|
||||
'%s/%s.gpg.trust.import.log 2>&1' % (tmpDir, project))
|
||||
print ' verify trust'
|
||||
logFile = '%s/%s.%s.gpg.trust.log' % (tmpDir, project, artifact)
|
||||
run('gpg --verify %s %s' % (sigFile, artifactFile), logFile)
|
||||
# Forward any GPG warnings:
|
||||
f = open(logFile, 'rb')
|
||||
for line in f.readlines():
|
||||
if line.lower().find('warning') != -1:
|
||||
print ' GPG: %s' % line.strip()
|
||||
f.close()
|
||||
|
||||
def testChanges(project, version, changesURLString):
|
||||
print ' check changes HTML...'
|
||||
|
@ -370,10 +385,24 @@ def verifyDigests(artifact, urlString, tmpDir):
|
|||
raise RuntimeError('SHA1 digest mismatch for %s: expected %s but got %s' % (artifact, sha1Expected, sha1Actual))
|
||||
|
||||
def getDirEntries(urlString):
|
||||
links = getHREFs(urlString)
|
||||
for i, (text, subURL) in enumerate(links):
|
||||
if text == 'Parent Directory' or text == '..':
|
||||
return links[(i+1):]
|
||||
if urlString.startswith('file://'):
|
||||
path = urlString[7:]
|
||||
if path.endswith('/'):
|
||||
path = path[:-1]
|
||||
l = []
|
||||
for ent in os.listdir(path):
|
||||
entPath = '%s/%s' % (path, ent)
|
||||
if os.path.isdir(entPath):
|
||||
entPath += '/'
|
||||
ent += '/'
|
||||
l.append((ent, 'file://%s' % entPath))
|
||||
l.sort()
|
||||
return l
|
||||
else:
|
||||
links = getHREFs(urlString)
|
||||
for i, (text, subURL) in enumerate(links):
|
||||
if text == 'Parent Directory' or text == '..':
|
||||
return links[(i+1):]
|
||||
|
||||
def unpack(project, tmpDir, artifact, version):
|
||||
destDir = '%s/unpack' % tmpDir
|
||||
|
@ -642,7 +671,7 @@ def testDemo(isSrc, version):
|
|||
raise RuntimeError('lucene demo\'s SearchFiles found too few results: %s' % numHits)
|
||||
print ' got %d hits for query "lucene"' % numHits
|
||||
|
||||
def checkMaven(baseURL, tmpDir, version):
|
||||
def checkMaven(baseURL, tmpDir, version, isSigned):
|
||||
# Locate the release branch in subversion
|
||||
m = re.match('(\d+)\.(\d+)', version) # Get Major.minor version components
|
||||
releaseBranchText = 'lucene_solr_%s_%s/' % (m.group(1), m.group(2))
|
||||
|
@ -680,8 +709,9 @@ def checkMaven(baseURL, tmpDir, version):
|
|||
checkJavadocAndSourceArtifacts(nonMavenizedDeps, artifacts, version)
|
||||
print " verify deployed POMs' coordinates..."
|
||||
verifyDeployedPOMsCoordinates(artifacts, version)
|
||||
print ' verify maven artifact sigs',
|
||||
verifyMavenSigs(baseURL, tmpDir, artifacts)
|
||||
if isSigned:
|
||||
print ' verify maven artifact sigs',
|
||||
verifyMavenSigs(baseURL, tmpDir, artifacts)
|
||||
|
||||
distributionFiles = getDistributionsForMavenChecks(tmpDir, version, baseURL)
|
||||
|
||||
|
@ -1004,7 +1034,7 @@ def getPOMtemplates(POMtemplates, tmpDir, releaseBranchSvnURL):
|
|||
if POMtemplates['solr'] is None:
|
||||
raise RuntimeError('No Solr POMs found at %s' % sourceLocation)
|
||||
POMtemplates['grandfather'] = [p for p in allPOMtemplates if '/maven/pom.xml.template' in p]
|
||||
if POMtemplates['grandfather'] is None:
|
||||
if len(POMtemplates['grandfather']) == 0:
|
||||
raise RuntimeError('No Lucene/Solr grandfather POM found at %s' % sourceLocation)
|
||||
|
||||
def crawl(downloadedFiles, urlString, targetDir, exclusions=set()):
|
||||
|
@ -1033,6 +1063,10 @@ def main():
|
|||
version = sys.argv[2]
|
||||
tmpDir = os.path.abspath(sys.argv[3])
|
||||
|
||||
smokeTest(baseURL, version, tmpDir, True)
|
||||
|
||||
def smokeTest(baseURL, version, tmpDir, isSigned):
|
||||
|
||||
if not DEBUG:
|
||||
if os.path.exists(tmpDir):
|
||||
raise RuntimeError('temp dir %s exists; please remove first' % tmpDir)
|
||||
|
@ -1042,7 +1076,13 @@ def main():
|
|||
|
||||
lucenePath = None
|
||||
solrPath = None
|
||||
print 'Load release URL...'
|
||||
print
|
||||
print 'Load release URL "%s"...' % baseURL
|
||||
newBaseURL = unshortenURL(baseURL)
|
||||
if newBaseURL != baseURL:
|
||||
print ' unshortened: %s' % newBaseURL
|
||||
baseURL = newBaseURL
|
||||
|
||||
for text, subURL in getDirEntries(baseURL):
|
||||
if text.lower().find('lucene') != -1:
|
||||
lucenePath = subURL
|
||||
|
@ -1056,20 +1096,20 @@ def main():
|
|||
|
||||
print
|
||||
print 'Test Lucene...'
|
||||
checkSigs('lucene', lucenePath, version, tmpDir)
|
||||
checkSigs('lucene', lucenePath, version, tmpDir, isSigned)
|
||||
for artifact in ('lucene-%s.tgz' % version, 'lucene-%s.zip' % version):
|
||||
unpack('lucene', tmpDir, artifact, version)
|
||||
unpack('lucene', tmpDir, 'lucene-%s-src.tgz' % version, version)
|
||||
|
||||
print
|
||||
print 'Test Solr...'
|
||||
checkSigs('solr', solrPath, version, tmpDir)
|
||||
checkSigs('solr', solrPath, version, tmpDir, isSigned)
|
||||
for artifact in ('apache-solr-%s.tgz' % version, 'apache-solr-%s.zip' % version):
|
||||
unpack('solr', tmpDir, artifact, version)
|
||||
unpack('solr', tmpDir, 'apache-solr-%s-src.tgz' % version, version)
|
||||
|
||||
print 'Test Maven artifacts for Lucene and Solr...'
|
||||
checkMaven(baseURL, tmpDir, version)
|
||||
checkMaven(baseURL, tmpDir, version, isSigned)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -452,7 +452,8 @@
|
|||
<copy-to-stage-macro artifacts.dir="${dist.dir}"/>
|
||||
</target>
|
||||
|
||||
<target name="prepare-release" depends="clean, dist-all, generate-maven-artifacts, sign-artifacts"/>
|
||||
<target name="prepare-release-no-sign" depends="clean, dist-all, generate-maven-artifacts"/>
|
||||
<target name="prepare-release" depends="prepare-release-no-sign, sign-artifacts"/>
|
||||
<target name="stage" depends="prepare-release, copy-to-stage">
|
||||
|
||||
</target>
|
||||
|
|
|
@ -330,8 +330,8 @@
|
|||
</ant>
|
||||
</target>
|
||||
|
||||
<target name="prepare-release"
|
||||
depends="clean, package, generate-maven-artifacts, sign-artifacts"/>
|
||||
<target name="prepare-release-no-sign" depends="clean, package, generate-maven-artifacts"/>
|
||||
<target name="prepare-release" depends="prepare-release-no-sign, sign-artifacts"/>
|
||||
|
||||
<!-- make a distribution -->
|
||||
<target name="package" depends="package-src-tgz,create-package"/>
|
||||
|
|
Loading…
Reference in New Issue