Smoke tester: Adjust to latest changes (#20611)

* Removed the check for the correct hash in the `/` response, otherwise two hashes would be needed
* Adapted to new URLs
* Added two missing plugins
* Downloaded gpg key only once
This commit is contained in:
Alexander Reelsen 2016-09-23 11:28:46 +02:00 committed by GitHub
parent fe1803c957
commit 56cc87b7cc
1 changed files with 17 additions and 17 deletions

View File

@ -69,8 +69,10 @@ DEFAULT_PLUGINS = ["analysis-icu",
"discovery-gce",
"ingest-attachment",
"ingest-geoip",
"ingest-user-agent",
"lang-javascript",
"lang-python",
"mapper-attachments",
"mapper-murmur3",
"mapper-size",
"repository-azure",
@ -131,6 +133,13 @@ def download_and_verify(version, hash, files, base_url, plugins=DEFAULT_PLUGINS)
try:
downloaded_files = []
print(' ' + '*' * 80)
# here we create a temp gpg home where we download the release key as the only key into
# when we verify the signature it will fail if the signed key is not in the keystore and that
# way we keep the executing host unmodified since we don't have to import the key into the default keystore
gpg_home_dir = os.path.join(tmp_dir, "gpg_home_dir")
os.makedirs(gpg_home_dir, 0o700)
run('gpg --homedir %s --keyserver pool.sks-keyservers.net --recv-key D88E42B4' % gpg_home_dir)
for file in files:
name = os.path.basename(file)
print(' Smoketest file: %s' % name)
@ -139,7 +148,6 @@ def download_and_verify(version, hash, files, base_url, plugins=DEFAULT_PLUGINS)
artifact_path = os.path.join(tmp_dir, file)
downloaded_files.append(artifact_path)
current_artifact_dir = os.path.dirname(artifact_path)
os.makedirs(current_artifact_dir)
urllib.request.urlretrieve(url, os.path.join(tmp_dir, file))
sha1_url = ''.join([url, '.sha1'])
checksum_file = artifact_path + ".sha1"
@ -155,12 +163,6 @@ def download_and_verify(version, hash, files, base_url, plugins=DEFAULT_PLUGINS)
print(' Downloading %s' % (gpg_url))
urllib.request.urlretrieve(gpg_url, gpg_file)
print(' Verifying gpg signature %s' % (gpg_file))
# here we create a temp gpg home where we download the release key as the only key into
# when we verify the signature it will fail if the signed key is not in the keystore and that
# way we keep the executing host unmodified since we don't have to import the key into the default keystore
gpg_home_dir = os.path.join(current_artifact_dir, "gpg_home_dir")
os.makedirs(gpg_home_dir, 0o700)
run('gpg --homedir %s --keyserver pool.sks-keyservers.net --recv-key D88E42B4' % gpg_home_dir)
run('cd %s && gpg --homedir %s --verify %s' % (current_artifact_dir, gpg_home_dir, os.path.basename(gpg_file)))
print(' ' + '*' * 80)
print()
@ -172,7 +174,7 @@ def download_and_verify(version, hash, files, base_url, plugins=DEFAULT_PLUGINS)
def get_host_from_ports_file(es_dir):
return read_fully(os.path.join(es_dir, 'logs/http.ports')).splitlines()[0]
def smoke_test_release(release, files, expected_hash, plugins):
def smoke_test_release(release, files, hash, plugins):
for release_file in files:
if not os.path.isfile(release_file):
raise RuntimeError('Smoketest failed missing file %s' % (release_file))
@ -191,7 +193,7 @@ def smoke_test_release(release, files, expected_hash, plugins):
plugin_names = {}
for plugin in plugins:
print(' Install plugin [%s]' % (plugin))
run('%s; export ES_JAVA_OPTS="-Des.plugins.staging=%s"; %s %s %s' % (java_exe(), expected_hash, es_plugin_path, 'install -b', plugin))
run('%s; export ES_JAVA_OPTS="-Des.plugins.staging=%s"; %s %s %s' % (java_exe(), hash, es_plugin_path, 'install -b', plugin))
plugin_names[plugin] = True
if 'x-pack' in plugin_names:
headers = { 'Authorization' : 'Basic %s' % base64.b64encode(b"es_admin:foobar").decode("UTF-8") }
@ -222,8 +224,6 @@ def smoke_test_release(release, files, expected_hash, plugins):
raise RuntimeError('Expected version [%s] but was [%s]' % (release, version['number']))
if version['build_snapshot']:
raise RuntimeError('Expected non snapshot version')
if expected_hash != version['build_hash'].strip():
raise RuntimeError('HEAD hash does not match expected [%s] but got [%s]' % (expected_hash, version['build_hash']))
print(' Verify if plugins are listed in _nodes')
conn.request('GET', '/_nodes?plugin=true&pretty=true', headers=headers)
res = conn.getresponse()
@ -262,7 +262,7 @@ if __name__ == "__main__":
parser.add_argument('--version', '-v', dest='version', default=None,
help='The Elasticsearch Version to smoke-tests', required=True)
parser.add_argument('--hash', '-s', dest='hash', default=None, required=True,
help='The sha1 short hash of the git commit to smoketest')
help='The hash of the unified release')
parser.add_argument('--plugins', '-p', dest='plugins', default=[], required=False, type=parse_list,
help='A list of additional plugins to smoketest')
parser.add_argument('--fetch_url', '-u', dest='url', default=None,
@ -277,16 +277,16 @@ if __name__ == "__main__":
hash = args.hash
url = args.url
files = [ x % {'version': version} for x in [
'org/elasticsearch/distribution/tar/elasticsearch/%(version)s/elasticsearch-%(version)s.tar.gz',
'org/elasticsearch/distribution/zip/elasticsearch/%(version)s/elasticsearch-%(version)s.zip',
'org/elasticsearch/distribution/deb/elasticsearch/%(version)s/elasticsearch-%(version)s.deb',
'org/elasticsearch/distribution/rpm/elasticsearch/%(version)s/elasticsearch-%(version)s.rpm'
'elasticsearch-%(version)s.tar.gz',
'elasticsearch-%(version)s.zip',
'elasticsearch-%(version)s.deb',
'elasticsearch-%(version)s.rpm'
]]
verify_java_version('1.8')
if url:
download_url = url
else:
download_url = '%s/%s-%s' % ('http://download.elasticsearch.org/elasticsearch/staging', version, hash)
download_url = 'https://staging.elastic.co/%s-%s/downloads/elasticsearch' % (version, hash)
download_and_verify(version, hash, files, download_url, plugins=DEFAULT_PLUGINS + plugins)