mirror of https://github.com/apache/lucene.git
LUCENE-7935: Publish .sha512 hash files with the release artifacts
This commit is contained in:
parent
ab32506243
commit
9e780ba564
|
@ -54,7 +54,7 @@ cd lucene
|
|||
# 1 2 3 4 5 6 7 8 9
|
||||
# ^- Dirs start here
|
||||
wget -r -np -l 0 -nH -erobots=off --cut-dirs=5 \
|
||||
--reject="*.md5,*.sha1,maven-metadata.xml*,index.html*" "${RC_URL}/lucene/maven/"
|
||||
--reject="*.md5,*.sha1,*.sha512,maven-metadata.xml*,index.html*" "${RC_URL}/lucene/maven/"
|
||||
|
||||
cd ..
|
||||
|
||||
|
@ -62,7 +62,7 @@ mkdir solr
|
|||
cd solr
|
||||
|
||||
wget -r -np -l 0 -nH -erobots=off --cut-dirs=5 \
|
||||
--reject="*.md5,*.sha1,maven-metadata.xml*,index.html*" "${RC_URL}/solr/maven/"
|
||||
--reject="*.md5,*.sha1,*.sha512,maven-metadata.xml*,index.html*" "${RC_URL}/solr/maven/"
|
||||
|
||||
cd ..
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
# Prepares an RC of the Solr Ref Guide by doing local file operations to:
|
||||
# - create a directory for the RC files
|
||||
# - move the PDF files into the RC directory with the appropriate name
|
||||
# - generate a SHA1 of the PDF file
|
||||
# - generate SHA1 and SHA512 of the PDF file
|
||||
# - GPG sign the PDF files
|
||||
#
|
||||
# See: https://cwiki.apache.org/confluence/display/solr/Internal+-+How+To+Publish+This+Documentation
|
||||
|
@ -36,16 +36,10 @@ if [ $# -lt 2 ] || [ 3 -lt $# ] ; then
|
|||
exit 1;
|
||||
fi
|
||||
|
||||
sha_sum() {
|
||||
if hash sha1sum 2>/dev/null; then
|
||||
sha1sum "$@"
|
||||
elif hash shasum 2>/dev/null; then
|
||||
shasum "$@"
|
||||
else
|
||||
echo "Can't find sha1sum or shasum, aborting"
|
||||
exit 1;
|
||||
fi
|
||||
}
|
||||
if ! hash shasum 2>/dev/null ; then
|
||||
echo "Can't find shasum, aborting"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
SRC_FILE=$1
|
||||
VER_RC=$2
|
||||
|
@ -70,6 +64,7 @@ PREFIX="apache-solr-ref-guide"
|
|||
DIR="$PREFIX-$VER_RC"
|
||||
PDF="$PREFIX-$VER.pdf"
|
||||
SHA="$PDF.sha1"
|
||||
SHA512="$PDF.sha512"
|
||||
GPG="$PDF.asc"
|
||||
|
||||
if [ ! -e $SRC_FILE ] ; then
|
||||
|
@ -90,6 +85,7 @@ set -x
|
|||
mkdir $DIR || exit 1
|
||||
mv $SRC_FILE $DIR/$PDF || exit 1
|
||||
cd $DIR || exit 1
|
||||
sha_sum $PDF > $SHA || exit 1
|
||||
shasum $PDF > $SHA || exit 1
|
||||
shasum -a 512 $PDF > $SHA512 || exit 1
|
||||
gpg $GPG_ID_ARG --armor --output $GPG --detach-sig $PDF|| exit 1
|
||||
|
||||
|
|
|
@ -296,7 +296,7 @@ def checkSigs(project, urlString, version, tmpDir, isSigned):
|
|||
expectedSigs = []
|
||||
if isSigned:
|
||||
expectedSigs.append('asc')
|
||||
expectedSigs.extend(['md5', 'sha1'])
|
||||
expectedSigs.extend(['sha1', 'sha512'])
|
||||
|
||||
artifacts = []
|
||||
for text, subURL in ents:
|
||||
|
@ -547,31 +547,31 @@ def run(command, logFile):
|
|||
raise RuntimeError('command "%s" failed; see log file %s' % (command, logPath))
|
||||
|
||||
def verifyDigests(artifact, urlString, tmpDir):
|
||||
print(' verify md5/sha1 digests')
|
||||
md5Expected, t = load(urlString + '.md5').strip().split()
|
||||
if t != '*'+artifact:
|
||||
raise RuntimeError('MD5 %s.md5 lists artifact %s but expected *%s' % (urlString, t, artifact))
|
||||
|
||||
print(' verify sha1/sha512 digests')
|
||||
sha1Expected, t = load(urlString + '.sha1').strip().split()
|
||||
if t != '*'+artifact:
|
||||
raise RuntimeError('SHA1 %s.sha1 lists artifact %s but expected *%s' % (urlString, t, artifact))
|
||||
|
||||
sha512Expected, t = load(urlString + '.sha512').strip().split()
|
||||
if t != '*'+artifact:
|
||||
raise RuntimeError('SHA512 %s.sha512 lists artifact %s but expected *%s' % (urlString, t, artifact))
|
||||
|
||||
m = hashlib.md5()
|
||||
s = hashlib.sha1()
|
||||
s512 = hashlib.sha512()
|
||||
f = open('%s/%s' % (tmpDir, artifact), 'rb')
|
||||
while True:
|
||||
x = f.read(65536)
|
||||
if len(x) == 0:
|
||||
break
|
||||
m.update(x)
|
||||
s.update(x)
|
||||
s512.update(x)
|
||||
f.close()
|
||||
md5Actual = m.hexdigest()
|
||||
sha1Actual = s.hexdigest()
|
||||
if md5Actual != md5Expected:
|
||||
raise RuntimeError('MD5 digest mismatch for %s: expected %s but got %s' % (artifact, md5Expected, md5Actual))
|
||||
sha512Actual = s512.hexdigest()
|
||||
if sha1Actual != sha1Expected:
|
||||
raise RuntimeError('SHA1 digest mismatch for %s: expected %s but got %s' % (artifact, sha1Expected, sha1Actual))
|
||||
if sha512Actual != sha512Expected:
|
||||
raise RuntimeError('SHA512 digest mismatch for %s: expected %s but got %s' % (artifact, sha512Expected, sha512Actual))
|
||||
|
||||
def getDirEntries(urlString):
|
||||
if urlString.startswith('file:/') and not urlString.startswith('file://'):
|
||||
|
@ -1071,36 +1071,36 @@ def checkIdenticalMavenArtifacts(distFiles, artifacts, version):
|
|||
% (artifact, distFilenames[artifactFilename], project))
|
||||
|
||||
def verifyMavenDigests(artifacts):
|
||||
print(" verify Maven artifacts' md5/sha1 digests...")
|
||||
print(" verify Maven artifacts' sha1/sha512 digests...")
|
||||
reJarWarPom = re.compile(r'\.(?:[wj]ar|pom)$')
|
||||
for project in ('lucene', 'solr'):
|
||||
for artifactFile in [a for a in artifacts[project] if reJarWarPom.search(a)]:
|
||||
if artifactFile + '.md5' not in artifacts[project]:
|
||||
raise RuntimeError('missing: MD5 digest for %s' % artifactFile)
|
||||
if artifactFile + '.sha1' not in artifacts[project]:
|
||||
raise RuntimeError('missing: SHA1 digest for %s' % artifactFile)
|
||||
with open(artifactFile + '.md5', encoding='UTF-8') as md5File:
|
||||
md5Expected = md5File.read().strip()
|
||||
if artifactFile + '.sha512' not in artifacts[project]:
|
||||
raise RuntimeError('missing: SHA512 digest for %s' % artifactFile)
|
||||
with open(artifactFile + '.sha1', encoding='UTF-8') as sha1File:
|
||||
sha1Expected = sha1File.read().strip()
|
||||
md5 = hashlib.md5()
|
||||
with open(artifactFile + '.sha512', encoding='UTF-8') as sha512File:
|
||||
sha512Expected = sha512File.read().strip()
|
||||
sha1 = hashlib.sha1()
|
||||
sha512 = hashlib.sha512()
|
||||
inputFile = open(artifactFile, 'rb')
|
||||
while True:
|
||||
bytes = inputFile.read(65536)
|
||||
if len(bytes) == 0:
|
||||
break
|
||||
md5.update(bytes)
|
||||
sha1.update(bytes)
|
||||
sha512.update(bytes)
|
||||
inputFile.close()
|
||||
md5Actual = md5.hexdigest()
|
||||
sha1Actual = sha1.hexdigest()
|
||||
if md5Actual != md5Expected:
|
||||
raise RuntimeError('MD5 digest mismatch for %s: expected %s but got %s'
|
||||
% (artifactFile, md5Expected, md5Actual))
|
||||
sha512Actual = sha512.hexdigest()
|
||||
if sha1Actual != sha1Expected:
|
||||
raise RuntimeError('SHA1 digest mismatch for %s: expected %s but got %s'
|
||||
% (artifactFile, sha1Expected, sha1Actual))
|
||||
if sha512Actual != sha512Expected:
|
||||
raise RuntimeError('SHA512 digest mismatch for %s: expected %s but got %s'
|
||||
% (artifactFile, sha512Expected, sha512Actual))
|
||||
|
||||
def getPOMcoordinate(treeRoot):
|
||||
namespace = '{http://maven.apache.org/POM/4.0.0}'
|
||||
|
|
|
@ -125,6 +125,10 @@ Other
|
|||
* LUCENE-8122, LUCENE-8175: Upgrade analysis/icu to ICU 61.1.
|
||||
(Robert Muir, Adrien Grand, Uwe Schindler)
|
||||
|
||||
Build
|
||||
|
||||
* LUCENE-7935: Publish .sha512 hash files with the release artifacts and stop
|
||||
publishing .md5 hashes since the algorithm is broken (janhoy)
|
||||
|
||||
======================= Lucene 7.3.0 =======================
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@
|
|||
</condition>
|
||||
|
||||
<!-- we exclude ext/*.jar because we don't want example/lib/ext logging jars on the cp -->
|
||||
<property name="common.classpath.excludes" value="**/*.txt,**/*.template,**/*.sha1,ext/*.jar" />
|
||||
<property name="common.classpath.excludes" value="**/*.txt,**/*.template,**/*.sha1,**/*.sha512,ext/*.jar" />
|
||||
|
||||
<property name="build.dir" location="build"/>
|
||||
<!-- Needed in case a module needs the original build, also for compile-tools to be called from a module -->
|
||||
|
@ -2299,8 +2299,8 @@ ${ant.project.name}.test.dependencies=${test.classpath.list}
|
|||
<attribute name="file"/>
|
||||
<sequential>
|
||||
<echo>Building checksums for '@{file}'</echo>
|
||||
<checksum file="@{file}" algorithm="md5" format="MD5SUM" forceoverwrite="yes" readbuffersize="65536"/>
|
||||
<checksum file="@{file}" algorithm="sha1" format="MD5SUM" forceoverwrite="yes" readbuffersize="65536"/>
|
||||
<checksum file="@{file}" algorithm="sha1" fileext=".sha1" format="MD5SUM" forceoverwrite="yes" readbuffersize="65536"/>
|
||||
<checksum file="@{file}" algorithm="SHA-512" fileext=".sha512" format="MD5SUM" forceoverwrite="yes" readbuffersize="65536"/>
|
||||
</sequential>
|
||||
</macrodef>
|
||||
|
||||
|
|
|
@ -590,7 +590,7 @@
|
|||
includes="dist/*.jar
|
||||
dist/solrj-lib/*
|
||||
dist/test-framework/**"
|
||||
excludes="**/*.tgz **/*.zip **/*.md5 **/*src*.jar **/*docs*.jar **/*.sha1" />
|
||||
excludes="**/*.tgz **/*.zip **/*.md5 **/*src*.jar **/*docs*.jar **/*.sha1 **/*.sha512" />
|
||||
<tarfileset dir="${javadoc-online.dir}"
|
||||
prefix="${fullnamever}/docs" />
|
||||
</tar>
|
||||
|
|
|
@ -61,7 +61,8 @@ These steps walk through checking out this directory and uploading the Guide to
|
|||
IMPORTANT: The next step requires that you have already generated your GPG keys. Your GPG passphrase will be required.
|
||||
|
||||
[start=3]
|
||||
. Run the Prep Ref Guide script to prepare the RC. This script ensures proper naming of the PDF file, generates `.sha1` and `.asc` files and creates the proper RC sub-directories under `solr-ref-guide-rc`.
|
||||
. Run the Prep Ref Guide script to prepare the RC. This script ensures proper naming of the PDF file, generates `.sha1`,
|
||||
`.sha512` and `.asc` files and creates the proper RC sub-directories under `solr-ref-guide-rc`.
|
||||
.. The structure of the input is: `prep-solr-ref-guide-rc.sh <path/PDFfilename> <Solrversion-RC#> GPGkey`.
|
||||
.. From the `solr-ref-guide-rc` directory, it will look something like this:
|
||||
+
|
||||
|
@ -72,7 +73,8 @@ $ ~/lucene-source/dev-tools/scripts/prep-solr-ref-guide-rc.sh apache-solr-ref-gu
|
|||
+ mkdir apache-solr-ref-guide-7.0-RC0
|
||||
+ mv apache-solr-ref-guide-7.0.pdf apache-solr-ref-guide-7.0-RC0/apache-solr-ref-guide-7.0.pdf
|
||||
+ cd apache-solr-ref-guide-7.0-RC0
|
||||
+ sha1sum apache-solr-ref-guide-7.0.pdf
|
||||
+ shasum apache-solr-ref-guide-7.0.pdf
|
||||
+ shasum -a 512 apache-solr-ref-guide-7.0.pdf
|
||||
+ gpg -u DEADBEEF --armor --output apache-solr-ref-guide-7.0.pdf.asc --detach-sig apache-solr-ref-guide-7.0.pdf
|
||||
|
||||
You need a passphrase to unlock the secret key for
|
||||
|
@ -107,7 +109,7 @@ Once at least three PMC members have voted for release (see https://www.apache.o
|
|||
$ ~/lucene-source/dev-tools/scripts/publish-solr-ref-guide-rc.sh X.Y-RCZ
|
||||
|
||||
## Run the following commands when ready...
|
||||
svn move -m 'publishing apache-solr-ref-guide-X.Y-RCZ' https://dist.apache.org/repos/dist/dev/lucene/solr/ref-guide/apache-solr-ref-guide-X.Y-RCZ/apache-solr-ref-guide-X.Y.pdf https://dist.apache.org/repos/dist/dev/lucene/solr/ref-guide/apache-solr-ref-guide-X.Y-RCZ/apache-solr-ref-guide-X.Y.pdf.asc https://dist.apache.org/repos/dist/dev/lucene/solr/ref-guide/apache-solr-ref-guide-X.Y-RCZ/apache-solr-ref-guide-X.Y.pdf.sha1 https://dist.apache.org/repos/dist/release/lucene/solr/ref-guide/
|
||||
svn move -m 'publishing apache-solr-ref-guide-X.Y-RCZ' https://dist.apache.org/repos/dist/dev/lucene/solr/ref-guide/apache-solr-ref-guide-X.Y-RCZ/apache-solr-ref-guide-X.Y.pdf https://dist.apache.org/repos/dist/dev/lucene/solr/ref-guide/apache-solr-ref-guide-X.Y-RCZ/apache-solr-ref-guide-X.Y.pdf.asc https://dist.apache.org/repos/dist/dev/lucene/solr/ref-guide/apache-solr-ref-guide-X.Y-RCZ/apache-solr-ref-guide-X.Y.pdf.sha1 https://dist.apache.org/repos/dist/dev/lucene/solr/ref-guide/apache-solr-ref-guide-X.Y-RCZ/apache-solr-ref-guide-X.Y.pdf.sha512 https://dist.apache.org/repos/dist/release/lucene/solr/ref-guide/
|
||||
|
||||
svn rm -m 'cleaning up apache-solr-ref-guide-X.Y-RCZ' https://dist.apache.org/repos/dist/dev/lucene/solr/ref-guide/apache-solr-ref-guide-X.Y-RCZ
|
||||
----
|
||||
|
|
Loading…
Reference in New Issue