OpenSearch/dev-tools/upload-s3.py
Simon Willnauer 35f33c4bbc Added tool to build a complete Elasticsearch release.
This tool builds a release and runs several checks to make sure the
release is in a reasonable shape (smoke test). From a top level
perspective it runs the following steps:

 * clean the build environment `mvn clean`
 * check if a Java 6 JDK is available
 * run the tests with network and local
 * generates the checksums for the binary packages
 * uploads the binary packages to Amazon S3
 * runs a 'mvn deploy' to publish the maven artifacts

The script will create an intermediate branch from a given 'release
branch' updates all versions based on the version we are currently
releasing. Updates the 'pom.xml' file as well as the 'Version.java'
class. Once this is done it commits the changes and rebase with the
branch we want to release from, merges the changes from the intermediate
branch and pushes to the given remote repository including the release
tag.
2013-11-04 14:08:59 +01:00

67 lines
2.3 KiB
Python

# Licensed to ElasticSearch and Shay Banon under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the 'License'); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import argparse
try:
import boto.s3
except:
raise RuntimeError("""
S3 upload requires boto to be installed
Use one of:
'pip install -U boto'
'apt-get install python-boto'
'easy_install boto'
""")
import boto.s3
def list_buckets(conn):
return conn.get_all_buckets()
def upload_s3(conn, path, key, file, bucket):
print 'Uploading %s to Amazon S3 bucket %s/%s' % \
(file, bucket, os.path.join(path, key))
def percent_cb(complete, total):
sys.stdout.write('.')
sys.stdout.flush()
bucket = conn.create_bucket(bucket)
k = bucket.new_key(os.path.join(path, key))
k.set_contents_from_filename(file, cb=percent_cb, num_cb=100)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Uploads files to Amazon S3')
parser.add_argument('--file', '-f', metavar='path to file',
help='the branch to release from', required=True)
parser.add_argument('--bucket', '-b', metavar='B42', default='download.elasticsearch.org',
help='The S3 Bucket to upload to')
parser.add_argument('--path', '-p', metavar='elasticsearch/elasticsearch', default='elasticsearch/elasticsearch',
help='The key path to use')
parser.add_argument('--key', '-k', metavar='key', default=None,
help='The key - uses the file name as default key')
args = parser.parse_args()
if args.key:
key = args.key
else:
key = os.path.basename(args.file)
connection = boto.connect_s3()
upload_s3(connection, args.path, key, args.file, args.bucket);