initial cutover to python3 (this script works with neither 3 or nor 2 anymore, and still doesnt)

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1369216 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-08-03 21:18:14 +00:00
parent 917fe18679
commit bc0a8e69d8
1 changed files with 92 additions and 92 deletions

View File

@ -20,12 +20,12 @@ import subprocess
import signal import signal
import shutil import shutil
import hashlib import hashlib
import httplib import http.client
import re import re
import urllib2 import urllib.request, urllib.error, urllib.parse
import urlparse import urllib.parse
import sys import sys
import HTMLParser import html.parser
from collections import defaultdict from collections import defaultdict
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
import filecmp import filecmp
@ -38,9 +38,9 @@ import checkJavadocLinks
# tested on Linux and on Cygwin under Windows 7. # tested on Linux and on Cygwin under Windows 7.
def unshortenURL(url): def unshortenURL(url):
parsed = urlparse.urlparse(url) parsed = urllib.parse.urlparse(url)
if parsed[0] in ('http', 'https'): if parsed[0] in ('http', 'https'):
h = httplib.HTTPConnection(parsed.netloc) h = http.client.HTTPConnection(parsed.netloc)
h.request('HEAD', parsed.path) h.request('HEAD', parsed.path)
response = h.getresponse() response = h.getresponse()
if response.status/100 == 3 and response.getheader('Location'): if response.status/100 == 3 and response.getheader('Location'):
@ -101,8 +101,8 @@ def getHREFs(urlString):
# Deref any redirects # Deref any redirects
while True: while True:
url = urlparse.urlparse(urlString) url = urllib.parse.urlparse(urlString)
h = httplib.HTTPConnection(url.netloc) h = http.client.HTTPConnection(url.netloc)
h.request('GET', url.path) h.request('GET', url.path)
r = h.getresponse() r = h.getresponse()
newLoc = r.getheader('location') newLoc = r.getheader('location')
@ -112,8 +112,8 @@ def getHREFs(urlString):
break break
links = [] links = []
for subUrl, text in reHREF.findall(urllib2.urlopen(urlString).read()): for subUrl, text in reHREF.findall(urllib.request.urlopen(urlString).read()):
fullURL = urlparse.urljoin(urlString, subUrl) fullURL = urllib.parse.urljoin(urlString, subUrl)
links.append((text, fullURL)) links.append((text, fullURL))
return links return links
@ -121,15 +121,15 @@ def download(name, urlString, tmpDir, quiet=False):
fileName = '%s/%s' % (tmpDir, name) fileName = '%s/%s' % (tmpDir, name)
if DEBUG and os.path.exists(fileName): if DEBUG and os.path.exists(fileName):
if not quiet and fileName.find('.asc') == -1: if not quiet and fileName.find('.asc') == -1:
print ' already done: %.1f MB' % (os.path.getsize(fileName)/1024./1024.) print(' already done: %.1f MB' % (os.path.getsize(fileName)/1024./1024.))
return return
fIn = urllib2.urlopen(urlString) fIn = urllib.request.urlopen(urlString)
fOut = open(fileName, 'wb') fOut = open(fileName, 'wb')
success = False success = False
try: try:
while True: while True:
s = fIn.read(65536) s = fIn.read(65536)
if s == '': if s == b'':
break break
fOut.write(s) fOut.write(s)
fOut.close() fOut.close()
@ -141,14 +141,14 @@ def download(name, urlString, tmpDir, quiet=False):
if not success: if not success:
os.remove(fileName) os.remove(fileName)
if not quiet and fileName.find('.asc') == -1: if not quiet and fileName.find('.asc') == -1:
print ' %.1f MB' % (os.path.getsize(fileName)/1024./1024.) print(' %.1f MB' % (os.path.getsize(fileName)/1024./1024.))
def load(urlString): def load(urlString):
return urllib2.urlopen(urlString).read() return urllib.request.urlopen(urlString).read()
def checkSigs(project, urlString, version, tmpDir, isSigned): def checkSigs(project, urlString, version, tmpDir, isSigned):
print ' test basics...' print(' test basics...')
ents = getDirEntries(urlString) ents = getDirEntries(urlString)
artifact = None artifact = None
keysURL = None keysURL = None
@ -210,7 +210,7 @@ def checkSigs(project, urlString, version, tmpDir, isSigned):
if keysURL is None: if keysURL is None:
raise RuntimeError('%s is missing KEYS' % project) raise RuntimeError('%s is missing KEYS' % project)
print ' get KEYS' print(' get KEYS')
download('%s.KEYS' % project, keysURL, tmpDir) download('%s.KEYS' % project, keysURL, tmpDir)
keysFile = '%s/%s.KEYS' % (tmpDir, project) keysFile = '%s/%s.KEYS' % (tmpDir, project)
@ -219,7 +219,7 @@ def checkSigs(project, urlString, version, tmpDir, isSigned):
gpgHomeDir = '%s/%s.gpg' % (tmpDir, project) gpgHomeDir = '%s/%s.gpg' % (tmpDir, project)
if os.path.exists(gpgHomeDir): if os.path.exists(gpgHomeDir):
shutil.rmtree(gpgHomeDir) shutil.rmtree(gpgHomeDir)
os.makedirs(gpgHomeDir, 0700) os.makedirs(gpgHomeDir, 0o700)
run('gpg --homedir %s --import %s' % (gpgHomeDir, keysFile), run('gpg --homedir %s --import %s' % (gpgHomeDir, keysFile),
'%s/%s.gpg.import.log 2>&1' % (tmpDir, project)) '%s/%s.gpg.import.log 2>&1' % (tmpDir, project))
@ -232,12 +232,12 @@ def checkSigs(project, urlString, version, tmpDir, isSigned):
testChanges(project, version, changesURL) testChanges(project, version, changesURL)
for artifact, urlString in artifacts: for artifact, urlString in artifacts:
print ' download %s...' % artifact print(' download %s...' % artifact)
download(artifact, urlString, tmpDir) download(artifact, urlString, tmpDir)
verifyDigests(artifact, urlString, tmpDir) verifyDigests(artifact, urlString, tmpDir)
if isSigned: if isSigned:
print ' verify sig' print(' verify sig')
# Test sig (this is done with a clean brand-new GPG world) # Test sig (this is done with a clean brand-new GPG world)
download(artifact + '.asc', urlString + '.asc', tmpDir) download(artifact + '.asc', urlString + '.asc', tmpDir)
sigFile = '%s/%s.asc' % (tmpDir, artifact) sigFile = '%s/%s.asc' % (tmpDir, artifact)
@ -250,24 +250,24 @@ def checkSigs(project, urlString, version, tmpDir, isSigned):
for line in f.readlines(): for line in f.readlines():
if line.lower().find('warning') != -1 \ if line.lower().find('warning') != -1 \
and line.find('WARNING: This key is not certified with a trusted signature') == -1: and line.find('WARNING: This key is not certified with a trusted signature') == -1:
print ' GPG: %s' % line.strip() print(' GPG: %s' % line.strip())
f.close() f.close()
# Test trust (this is done with the real users config) # Test trust (this is done with the real users config)
run('gpg --import %s' % (keysFile), run('gpg --import %s' % (keysFile),
'%s/%s.gpg.trust.import.log 2>&1' % (tmpDir, project)) '%s/%s.gpg.trust.import.log 2>&1' % (tmpDir, project))
print ' verify trust' print(' verify trust')
logFile = '%s/%s.%s.gpg.trust.log' % (tmpDir, project, artifact) logFile = '%s/%s.%s.gpg.trust.log' % (tmpDir, project, artifact)
run('gpg --verify %s %s' % (sigFile, artifactFile), logFile) run('gpg --verify %s %s' % (sigFile, artifactFile), logFile)
# Forward any GPG warnings: # Forward any GPG warnings:
f = open(logFile, 'rb') f = open(logFile, 'rb')
for line in f.readlines(): for line in f.readlines():
if line.lower().find('warning') != -1: if line.lower().find('warning') != -1:
print ' GPG: %s' % line.strip() print(' GPG: %s' % line.strip())
f.close() f.close()
def testChanges(project, version, changesURLString): def testChanges(project, version, changesURLString):
print ' check changes HTML...' print(' check changes HTML...')
changesURL = None changesURL = None
for text, subURL in getDirEntries(changesURLString): for text, subURL in getDirEntries(changesURLString):
if text == 'Changes.html': if text == 'Changes.html':
@ -336,7 +336,7 @@ def run(command, logFile):
raise RuntimeError('command "%s" failed; see log file %s' % (command, logPath)) raise RuntimeError('command "%s" failed; see log file %s' % (command, logPath))
def verifyDigests(artifact, urlString, tmpDir): def verifyDigests(artifact, urlString, tmpDir):
print ' verify md5/sha1 digests' print(' verify md5/sha1 digests')
md5Expected, t = load(urlString + '.md5').strip().split() md5Expected, t = load(urlString + '.md5').strip().split()
if t != '*'+artifact: if t != '*'+artifact:
raise RuntimeError('MD5 %s.md5 lists artifact %s but expected *%s' % (urlString, t, artifact)) raise RuntimeError('MD5 %s.md5 lists artifact %s but expected *%s' % (urlString, t, artifact))
@ -388,7 +388,7 @@ def unpack(project, tmpDir, artifact, version):
shutil.rmtree(destDir) shutil.rmtree(destDir)
os.makedirs(destDir) os.makedirs(destDir)
os.chdir(destDir) os.chdir(destDir)
print ' unpack %s...' % artifact print(' unpack %s...' % artifact)
unpackLogFile = '%s/%s-unpack-%s.log' % (tmpDir, project, artifact) unpackLogFile = '%s/%s-unpack-%s.log' % (tmpDir, project, artifact)
if artifact.endswith('.tar.gz') or artifact.endswith('.tgz'): if artifact.endswith('.tar.gz') or artifact.endswith('.tgz'):
run('tar xzf %s/%s' % (tmpDir, artifact), unpackLogFile) run('tar xzf %s/%s' % (tmpDir, artifact), unpackLogFile)
@ -453,76 +453,76 @@ def verifyUnpacked(project, artifact, unpackPath, version, tmpDir):
raise RuntimeError('%s: unexpected files/dirs in artifact %s: %s' % (project, artifact, l)) raise RuntimeError('%s: unexpected files/dirs in artifact %s: %s' % (project, artifact, l))
if isSrc: if isSrc:
print ' make sure no JARs/WARs in src dist...' print(' make sure no JARs/WARs in src dist...')
lines = os.popen('find . -name \\*.jar').readlines() lines = os.popen('find . -name \\*.jar').readlines()
if len(lines) != 0: if len(lines) != 0:
print ' FAILED:' print(' FAILED:')
for line in lines: for line in lines:
print ' %s' % line.strip() print(' %s' % line.strip())
raise RuntimeError('source release has JARs...') raise RuntimeError('source release has JARs...')
lines = os.popen('find . -name \\*.war').readlines() lines = os.popen('find . -name \\*.war').readlines()
if len(lines) != 0: if len(lines) != 0:
print ' FAILED:' print(' FAILED:')
for line in lines: for line in lines:
print ' %s' % line.strip() print(' %s' % line.strip())
raise RuntimeError('source release has WARs...') raise RuntimeError('source release has WARs...')
print ' run "ant validate"' print(' run "ant validate"')
run('%s; ant validate' % javaExe('1.7'), '%s/validate.log' % unpackPath) run('%s; ant validate' % javaExe('1.7'), '%s/validate.log' % unpackPath)
if project == 'lucene': if project == 'lucene':
print ' run tests w/ Java 6...' print(' run tests w/ Java 6...')
run('%s; ant test' % javaExe('1.6'), '%s/test.log' % unpackPath) run('%s; ant test' % javaExe('1.6'), '%s/test.log' % unpackPath)
run('%s; ant jar' % javaExe('1.6'), '%s/compile.log' % unpackPath) run('%s; ant jar' % javaExe('1.6'), '%s/compile.log' % unpackPath)
testDemo(isSrc, version) testDemo(isSrc, version)
# test javadocs # test javadocs
print ' generate javadocs w/ Java 6...' print(' generate javadocs w/ Java 6...')
run('%s; ant javadocs' % javaExe('1.6'), '%s/javadocs.log' % unpackPath) run('%s; ant javadocs' % javaExe('1.6'), '%s/javadocs.log' % unpackPath)
checkJavadocpath('%s/build/docs' % unpackPath) checkJavadocpath('%s/build/docs' % unpackPath)
else: else:
print ' run tests w/ Java 6...' print(' run tests w/ Java 6...')
run('%s; ant test' % javaExe('1.6'), '%s/test.log' % unpackPath) run('%s; ant test' % javaExe('1.6'), '%s/test.log' % unpackPath)
# test javadocs # test javadocs
print ' generate javadocs w/ Java 6...' print(' generate javadocs w/ Java 6...')
run('%s; ant javadocs' % javaExe('1.6'), '%s/javadocs.log' % unpackPath) run('%s; ant javadocs' % javaExe('1.6'), '%s/javadocs.log' % unpackPath)
checkJavadocpath('%s/build/docs' % unpackPath) checkJavadocpath('%s/build/docs' % unpackPath)
print ' run tests w/ Java 7...' print(' run tests w/ Java 7...')
run('%s; ant test' % javaExe('1.7'), '%s/test.log' % unpackPath) run('%s; ant test' % javaExe('1.7'), '%s/test.log' % unpackPath)
# test javadocs # test javadocs
print ' generate javadocs w/ Java 7...' print(' generate javadocs w/ Java 7...')
run('%s; ant javadocs' % javaExe('1.7'), '%s/javadocs.log' % unpackPath) run('%s; ant javadocs' % javaExe('1.7'), '%s/javadocs.log' % unpackPath)
checkJavadocpath('%s/build/docs' % unpackPath) checkJavadocpath('%s/build/docs' % unpackPath)
os.chdir('solr') os.chdir('solr')
print ' test solr example w/ Java 6...' print(' test solr example w/ Java 6...')
run('%s; ant clean example' % javaExe('1.6'), '%s/antexample.log' % unpackPath) run('%s; ant clean example' % javaExe('1.6'), '%s/antexample.log' % unpackPath)
testSolrExample(unpackPath, JAVA6_HOME, True) testSolrExample(unpackPath, JAVA6_HOME, True)
print ' test solr example w/ Java 7...' print(' test solr example w/ Java 7...')
run('%s; ant clean example' % javaExe('1.7'), '%s/antexample.log' % unpackPath) run('%s; ant clean example' % javaExe('1.7'), '%s/antexample.log' % unpackPath)
testSolrExample(unpackPath, JAVA7_HOME, True) testSolrExample(unpackPath, JAVA7_HOME, True)
os.chdir('..') os.chdir('..')
print ' check NOTICE' print(' check NOTICE')
testNotice(unpackPath) testNotice(unpackPath)
else: else:
if project == 'lucene': if project == 'lucene':
testDemo(isSrc, version) testDemo(isSrc, version)
else: else:
print ' test solr example w/ Java 6...' print(' test solr example w/ Java 6...')
testSolrExample(unpackPath, JAVA6_HOME, False) testSolrExample(unpackPath, JAVA6_HOME, False)
print ' test solr example w/ Java 7...' print(' test solr example w/ Java 7...')
testSolrExample(unpackPath, JAVA7_HOME, False) testSolrExample(unpackPath, JAVA7_HOME, False)
testChangesText('.', version, project) testChangesText('.', version, project)
if project == 'lucene' and not isSrc: if project == 'lucene' and not isSrc:
print ' check Lucene\'s javadoc JAR' print(' check Lucene\'s javadoc JAR')
checkJavadocpath('%s/docs' % unpackPath) checkJavadocpath('%s/docs' % unpackPath)
def testNotice(unpackPath): def testNotice(unpackPath):
@ -558,7 +558,7 @@ def readSolrOutput(p, startupEvent, logFile):
def testSolrExample(unpackPath, javaPath, isSrc): def testSolrExample(unpackPath, javaPath, isSrc):
logFile = '%s/solr-example.log' % unpackPath logFile = '%s/solr-example.log' % unpackPath
os.chdir('example') os.chdir('example')
print ' start Solr instance (log=%s)...' % logFile print(' start Solr instance (log=%s)...' % logFile)
env = {} env = {}
env.update(os.environ) env.update(os.environ)
env['JAVA_HOME'] = javaPath env['JAVA_HOME'] = javaPath
@ -572,21 +572,21 @@ def testSolrExample(unpackPath, javaPath, isSrc):
# Make sure Solr finishes startup: # Make sure Solr finishes startup:
startupEvent.wait() startupEvent.wait()
print ' startup done' print(' startup done')
try: try:
print ' test utf8...' print(' test utf8...')
run('sh ./exampledocs/test_utf8.sh', 'utf8.log') run('sh ./exampledocs/test_utf8.sh', 'utf8.log')
print ' index example docs...' print(' index example docs...')
run('sh ./exampledocs/post.sh ./exampledocs/*.xml', 'post-example-docs.log') run('sh ./exampledocs/post.sh ./exampledocs/*.xml', 'post-example-docs.log')
print ' run query...' print(' run query...')
s = urllib2.urlopen('http://localhost:8983/solr/select/?q=video').read() s = urllib.request.urlopen('http://localhost:8983/solr/select/?q=video').read()
if s.find('<result name="response" numFound="3" start="0">') == -1: if s.find('<result name="response" numFound="3" start="0">') == -1:
print 'FAILED: response is:\n%s' % s print('FAILED: response is:\n%s' % s)
raise RuntimeError('query on solr example instance failed') raise RuntimeError('query on solr example instance failed')
finally: finally:
# Stop server: # Stop server:
print ' stop server (SIGINT)...' print(' stop server (SIGINT)...')
os.kill(server.pid, signal.SIGINT) os.kill(server.pid, signal.SIGINT)
# Give it 10 seconds to gracefully shut down # Give it 10 seconds to gracefully shut down
@ -594,14 +594,14 @@ def testSolrExample(unpackPath, javaPath, isSrc):
if serverThread.isAlive(): if serverThread.isAlive():
# Kill server: # Kill server:
print '***WARNING***: Solr instance didn\'t respond to SIGINT; using SIGKILL now...' print('***WARNING***: Solr instance didn\'t respond to SIGINT; using SIGKILL now...')
os.kill(server.pid, signal.SIGKILL) os.kill(server.pid, signal.SIGKILL)
serverThread.join(10.0) serverThread.join(10.0)
if serverThread.isAlive(): if serverThread.isAlive():
# Shouldn't happen unless something is seriously wrong... # Shouldn't happen unless something is seriously wrong...
print '***WARNING***: Solr instance didn\'t respond to SIGKILL; ignoring...' print('***WARNING***: Solr instance didn\'t respond to SIGKILL; ignoring...')
os.chdir('..') os.chdir('..')
@ -615,13 +615,13 @@ def checkJavadocpath(path):
if checkJavaDocs.checkPackageSummaries(path): if checkJavaDocs.checkPackageSummaries(path):
# disabled: RM cannot fix all this, see LUCENE-3887 # disabled: RM cannot fix all this, see LUCENE-3887
# raise RuntimeError('javadoc problems') # raise RuntimeError('javadoc problems')
print '\n***WARNING***: javadocs want to fail!\n' print('\n***WARNING***: javadocs want to fail!\n')
if checkJavadocLinks.checkAll(path): if checkJavadocLinks.checkAll(path):
raise RuntimeError('broken javadocs links found!') raise RuntimeError('broken javadocs links found!')
def testDemo(isSrc, version): def testDemo(isSrc, version):
print ' test demo...' print(' test demo...')
sep = ';' if cygwin else ':' sep = ';' if cygwin else ':'
if isSrc: if isSrc:
cp = 'build/core/classes/java{0}build/demo/classes/java{0}build/analysis/common/classes/java{0}build/queryparser/classes/java'.format(sep) cp = 'build/core/classes/java{0}build/demo/classes/java{0}build/analysis/common/classes/java{0}build/queryparser/classes/java'.format(sep)
@ -639,7 +639,7 @@ def testDemo(isSrc, version):
numHits = int(m.group(1)) numHits = int(m.group(1))
if numHits < 100: if numHits < 100:
raise RuntimeError('lucene demo\'s SearchFiles found too few results: %s' % numHits) raise RuntimeError('lucene demo\'s SearchFiles found too few results: %s' % numHits)
print ' got %d hits for query "lucene"' % numHits print(' got %d hits for query "lucene"' % numHits)
def checkMaven(baseURL, tmpDir, version, isSigned): def checkMaven(baseURL, tmpDir, version, isSigned):
# Locate the release branch in subversion # Locate the release branch in subversion
@ -652,11 +652,11 @@ def checkMaven(baseURL, tmpDir, version, isSigned):
if text == releaseBranchText: if text == releaseBranchText:
releaseBranchSvnURL = subURL releaseBranchSvnURL = subURL
print ' get POM templates', print(' get POM templates', end=' ')
POMtemplates = defaultdict() POMtemplates = defaultdict()
getPOMtemplates(POMtemplates, tmpDir, releaseBranchSvnURL) getPOMtemplates(POMtemplates, tmpDir, releaseBranchSvnURL)
print print()
print ' download artifacts', print(' download artifacts', end=' ')
artifacts = {'lucene': [], 'solr': []} artifacts = {'lucene': [], 'solr': []}
for project in ('lucene', 'solr'): for project in ('lucene', 'solr'):
artifactsURL = '%s/%s/maven/org/apache/%s' % (baseURL, project, project) artifactsURL = '%s/%s/maven/org/apache/%s' % (baseURL, project, project)
@ -664,30 +664,30 @@ def checkMaven(baseURL, tmpDir, version, isSigned):
if not os.path.exists(targetDir): if not os.path.exists(targetDir):
os.makedirs(targetDir) os.makedirs(targetDir)
crawl(artifacts[project], artifactsURL, targetDir) crawl(artifacts[project], artifactsURL, targetDir)
print print()
print ' verify that each binary artifact has a deployed POM...' print(' verify that each binary artifact has a deployed POM...')
verifyPOMperBinaryArtifact(artifacts, version) verifyPOMperBinaryArtifact(artifacts, version)
print ' verify that there is an artifact for each POM template...' print(' verify that there is an artifact for each POM template...')
verifyArtifactPerPOMtemplate(POMtemplates, artifacts, tmpDir, version) verifyArtifactPerPOMtemplate(POMtemplates, artifacts, tmpDir, version)
print " verify Maven artifacts' md5/sha1 digests..." print(" verify Maven artifacts' md5/sha1 digests...")
verifyMavenDigests(artifacts) verifyMavenDigests(artifacts)
print ' verify that all non-Mavenized deps are deployed...' print(' verify that all non-Mavenized deps are deployed...')
nonMavenizedDeps = dict() nonMavenizedDeps = dict()
checkNonMavenizedDeps(nonMavenizedDeps, POMtemplates, artifacts, tmpDir, checkNonMavenizedDeps(nonMavenizedDeps, POMtemplates, artifacts, tmpDir,
version, releaseBranchSvnURL) version, releaseBranchSvnURL)
print ' check for javadoc and sources artifacts...' print(' check for javadoc and sources artifacts...')
checkJavadocAndSourceArtifacts(nonMavenizedDeps, artifacts, version) checkJavadocAndSourceArtifacts(nonMavenizedDeps, artifacts, version)
print " verify deployed POMs' coordinates..." print(" verify deployed POMs' coordinates...")
verifyDeployedPOMsCoordinates(artifacts, version) verifyDeployedPOMsCoordinates(artifacts, version)
if isSigned: if isSigned:
print ' verify maven artifact sigs', print(' verify maven artifact sigs', end=' ')
verifyMavenSigs(baseURL, tmpDir, artifacts) verifyMavenSigs(baseURL, tmpDir, artifacts)
distributionFiles = getDistributionsForMavenChecks(tmpDir, version, baseURL) distributionFiles = getDistributionsForMavenChecks(tmpDir, version, baseURL)
print ' verify that non-Mavenized deps are same as in the binary distribution...' print(' verify that non-Mavenized deps are same as in the binary distribution...')
checkIdenticalNonMavenizedDeps(distributionFiles, nonMavenizedDeps) checkIdenticalNonMavenizedDeps(distributionFiles, nonMavenizedDeps)
print ' verify that Maven artifacts are same as in the binary distribution...' print(' verify that Maven artifacts are same as in the binary distribution...')
checkIdenticalMavenArtifacts(distributionFiles, nonMavenizedDeps, artifacts, version) checkIdenticalMavenArtifacts(distributionFiles, nonMavenizedDeps, artifacts, version)
def getDistributionsForMavenChecks(tmpDir, version, baseURL): def getDistributionsForMavenChecks(tmpDir, version, baseURL):
@ -697,19 +697,19 @@ def getDistributionsForMavenChecks(tmpDir, version, baseURL):
if project == 'solr': distribution = 'apache-' + distribution if project == 'solr': distribution = 'apache-' + distribution
if not os.path.exists('%s/%s' % (tmpDir, distribution)): if not os.path.exists('%s/%s' % (tmpDir, distribution)):
distURL = '%s/%s/%s' % (baseURL, project, distribution) distURL = '%s/%s/%s' % (baseURL, project, distribution)
print ' download %s...' % distribution, print(' download %s...' % distribution, end=' ')
download(distribution, distURL, tmpDir) download(distribution, distURL, tmpDir)
destDir = '%s/unpack-%s-maven' % (tmpDir, project) destDir = '%s/unpack-%s-maven' % (tmpDir, project)
if os.path.exists(destDir): if os.path.exists(destDir):
shutil.rmtree(destDir) shutil.rmtree(destDir)
os.makedirs(destDir) os.makedirs(destDir)
os.chdir(destDir) os.chdir(destDir)
print ' unpack %s...' % distribution print(' unpack %s...' % distribution)
unpackLogFile = '%s/unpack-%s-maven-checks.log' % (tmpDir, distribution) unpackLogFile = '%s/unpack-%s-maven-checks.log' % (tmpDir, distribution)
run('tar xzf %s/%s' % (tmpDir, distribution), unpackLogFile) run('tar xzf %s/%s' % (tmpDir, distribution), unpackLogFile)
if project == 'solr': # unpack the Solr war if project == 'solr': # unpack the Solr war
unpackLogFile = '%s/unpack-solr-war-maven-checks.log' % tmpDir unpackLogFile = '%s/unpack-solr-war-maven-checks.log' % tmpDir
print ' unpack Solr war...' print(' unpack Solr war...')
run('jar xvf */dist/*.war', unpackLogFile) run('jar xvf */dist/*.war', unpackLogFile)
distributionFiles[project] = [] distributionFiles[project] = []
for root, dirs, files in os.walk(destDir): for root, dirs, files in os.walk(destDir):
@ -719,7 +719,7 @@ def getDistributionsForMavenChecks(tmpDir, version, baseURL):
def checkJavadocAndSourceArtifacts(nonMavenizedDeps, artifacts, version): def checkJavadocAndSourceArtifacts(nonMavenizedDeps, artifacts, version):
for project in ('lucene', 'solr'): for project in ('lucene', 'solr'):
for artifact in artifacts[project]: for artifact in artifacts[project]:
if artifact.endswith(version + '.jar') and artifact not in nonMavenizedDeps.keys(): if artifact.endswith(version + '.jar') and artifact not in list(nonMavenizedDeps.keys()):
javadocJar = artifact[:-4] + '-javadoc.jar' javadocJar = artifact[:-4] + '-javadoc.jar'
if javadocJar not in artifacts[project]: if javadocJar not in artifacts[project]:
raise RuntimeError('missing: %s' % javadocJar) raise RuntimeError('missing: %s' % javadocJar)
@ -732,7 +732,7 @@ def checkIdenticalNonMavenizedDeps(distributionFiles, nonMavenizedDeps):
distFilenames = dict() distFilenames = dict()
for file in distributionFiles[project]: for file in distributionFiles[project]:
distFilenames[os.path.basename(file)] = file distFilenames[os.path.basename(file)] = file
for dep in nonMavenizedDeps.keys(): for dep in list(nonMavenizedDeps.keys()):
if ('/%s/' % project) in dep: if ('/%s/' % project) in dep:
depOrigFilename = os.path.basename(nonMavenizedDeps[dep]) depOrigFilename = os.path.basename(nonMavenizedDeps[dep])
if not depOrigFilename in distFilenames: if not depOrigFilename in distFilenames:
@ -753,9 +753,9 @@ def checkIdenticalMavenArtifacts(distributionFiles, nonMavenizedDeps, artifacts,
distFilenames[baseName] = file distFilenames[baseName] = file
for artifact in artifacts[project]: for artifact in artifacts[project]:
if reJarWar.search(artifact): if reJarWar.search(artifact):
if artifact not in nonMavenizedDeps.keys(): if artifact not in list(nonMavenizedDeps.keys()):
artifactFilename = os.path.basename(artifact) artifactFilename = os.path.basename(artifact)
if artifactFilename not in distFilenames.keys(): if artifactFilename not in list(distFilenames.keys()):
raise RuntimeError('Maven artifact %s is not present in %s binary distribution' raise RuntimeError('Maven artifact %s is not present in %s binary distribution'
% (artifact, project)) % (artifact, project))
# TODO: Either fix the build to ensure that maven artifacts *are* identical, or recursively compare contents # TODO: Either fix the build to ensure that maven artifacts *are* identical, or recursively compare contents
@ -891,7 +891,7 @@ def verifyMavenSigs(baseURL, tmpDir, artifacts):
gpgHomeDir = '%s/%s.gpg' % (tmpDir, project) gpgHomeDir = '%s/%s.gpg' % (tmpDir, project)
if os.path.exists(gpgHomeDir): if os.path.exists(gpgHomeDir):
shutil.rmtree(gpgHomeDir) shutil.rmtree(gpgHomeDir)
os.makedirs(gpgHomeDir, 0700) os.makedirs(gpgHomeDir, 0o700)
run('gpg --homedir %s --import %s' % (gpgHomeDir, keysFile), run('gpg --homedir %s --import %s' % (gpgHomeDir, keysFile),
'%s/%s.gpg.import.log' % (tmpDir, project)) '%s/%s.gpg.import.log' % (tmpDir, project))
@ -909,7 +909,7 @@ def verifyMavenSigs(baseURL, tmpDir, artifacts):
if line.lower().find('warning') != -1 \ if line.lower().find('warning') != -1 \
and line.find('WARNING: This key is not certified with a trusted signature') == -1 \ and line.find('WARNING: This key is not certified with a trusted signature') == -1 \
and line.find('WARNING: using insecure memory') == -1: and line.find('WARNING: using insecure memory') == -1:
print ' GPG: %s' % line.strip() print(' GPG: %s' % line.strip())
f.close() f.close()
# Test trust (this is done with the real users config) # Test trust (this is done with the real users config)
@ -923,11 +923,11 @@ def verifyMavenSigs(baseURL, tmpDir, artifacts):
if line.lower().find('warning') != -1 \ if line.lower().find('warning') != -1 \
and line.find('WARNING: This key is not certified with a trusted signature') == -1 \ and line.find('WARNING: This key is not certified with a trusted signature') == -1 \
and line.find('WARNING: using insecure memory') == -1: and line.find('WARNING: using insecure memory') == -1:
print ' GPG: %s' % line.strip() print(' GPG: %s' % line.strip())
f.close() f.close()
sys.stdout.write('.') sys.stdout.write('.')
print print()
def verifyPOMperBinaryArtifact(artifacts, version): def verifyPOMperBinaryArtifact(artifacts, version):
"""verify that each binary jar and war has a corresponding POM file""" """verify that each binary jar and war has a corresponding POM file"""
@ -1024,9 +1024,9 @@ def crawl(downloadedFiles, urlString, targetDir, exclusions=set()):
def main(): def main():
if len(sys.argv) != 4: if len(sys.argv) != 4:
print print()
print 'Usage python -u %s BaseURL version tmpDir' % sys.argv[0] print('Usage python -u %s BaseURL version tmpDir' % sys.argv[0])
print print()
sys.exit(1) sys.exit(1)
baseURL = sys.argv[1] baseURL = sys.argv[1]
@ -1046,11 +1046,11 @@ def smokeTest(baseURL, version, tmpDir, isSigned):
lucenePath = None lucenePath = None
solrPath = None solrPath = None
print print()
print 'Load release URL "%s"...' % baseURL print('Load release URL "%s"...' % baseURL)
newBaseURL = unshortenURL(baseURL) newBaseURL = unshortenURL(baseURL)
if newBaseURL != baseURL: if newBaseURL != baseURL:
print ' unshortened: %s' % newBaseURL print(' unshortened: %s' % newBaseURL)
baseURL = newBaseURL baseURL = newBaseURL
for text, subURL in getDirEntries(baseURL): for text, subURL in getDirEntries(baseURL):
@ -1064,21 +1064,21 @@ def smokeTest(baseURL, version, tmpDir, isSigned):
if solrPath is None: if solrPath is None:
raise RuntimeError('could not find solr subdir') raise RuntimeError('could not find solr subdir')
print print()
print 'Test Lucene...' print('Test Lucene...')
checkSigs('lucene', lucenePath, version, tmpDir, isSigned) checkSigs('lucene', lucenePath, version, tmpDir, isSigned)
for artifact in ('lucene-%s.tgz' % version, 'lucene-%s.zip' % version): for artifact in ('lucene-%s.tgz' % version, 'lucene-%s.zip' % version):
unpack('lucene', tmpDir, artifact, version) unpack('lucene', tmpDir, artifact, version)
unpack('lucene', tmpDir, 'lucene-%s-src.tgz' % version, version) unpack('lucene', tmpDir, 'lucene-%s-src.tgz' % version, version)
print print()
print 'Test Solr...' print('Test Solr...')
checkSigs('solr', solrPath, version, tmpDir, isSigned) checkSigs('solr', solrPath, version, tmpDir, isSigned)
for artifact in ('apache-solr-%s.tgz' % version, 'apache-solr-%s.zip' % version): for artifact in ('apache-solr-%s.tgz' % version, 'apache-solr-%s.zip' % version):
unpack('solr', tmpDir, artifact, version) unpack('solr', tmpDir, artifact, version)
unpack('solr', tmpDir, 'apache-solr-%s-src.tgz' % version, version) unpack('solr', tmpDir, 'apache-solr-%s-src.tgz' % version, version)
print 'Test Maven artifacts for Lucene and Solr...' print('Test Maven artifacts for Lucene and Solr...')
checkMaven(baseURL, tmpDir, version, isSigned) checkMaven(baseURL, tmpDir, version, isSigned)
if __name__ == '__main__': if __name__ == '__main__':