Adds task to upload cpp distribution zip

Original commit: elastic/x-pack-elasticsearch@c4fee26b37
This commit is contained in:
Colin Goodheart-Smithe 2016-12-01 13:40:11 +00:00
parent 6fee7a21b5
commit bfb72d0a96
5 changed files with 214 additions and 1 deletions

View File

@ -9,6 +9,9 @@ import java.nio.file.Files
import java.nio.file.Paths import java.nio.file.Paths
import java.nio.file.StandardCopyOption import java.nio.file.StandardCopyOption
project.ext.awsAccessKey = AWS_ACCESS_KEY
project.ext.awsSecretKey = AWS_SECRET_KEY
boolean isWindows = OperatingSystem.current().isWindows() boolean isWindows = OperatingSystem.current().isWindows()
boolean isLinux = OperatingSystem.current().isLinux() boolean isLinux = OperatingSystem.current().isLinux()
boolean isMacOsX = OperatingSystem.current().isMacOsX() boolean isMacOsX = OperatingSystem.current().isMacOsX()

12
buildSrc/build.gradle Normal file
View File

@ -0,0 +1,12 @@
apply plugin: 'java'
apply plugin: 'idea'
repositories {
mavenCentral()
}
dependencies {
compile gradleApi()
compile localGroovy()
compile 'com.amazonaws:aws-java-sdk-s3:1.10.33'
}

View File

@ -0,0 +1,86 @@
package org.elastic.gradle
import com.amazonaws.ClientConfiguration
import com.amazonaws.auth.AWSCredentials
import com.amazonaws.auth.BasicAWSCredentials
import com.amazonaws.services.s3.AmazonS3Client
import com.amazonaws.services.s3.model.GetObjectRequest
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.TaskAction
import org.gradle.logging.ProgressLogger
import org.gradle.logging.ProgressLoggerFactory
import javax.inject.Inject
/**
* A task to download files to s3, which allows delayed resolution of the s3 path
*/
class DownloadS3Task extends DefaultTask {
private List<Object> toDownload = new ArrayList<>()
@Input
String bucket
@Input
File destDir
/** True if the file paths should be flattened into a single directory when downloaded, false otherwise */
@Input
boolean flatten = false
DownloadS3Task() {
ext.set('needs.aws', true)
}
@Inject
public ProgressLoggerFactory getProgressLoggerFactory() {
throw new UnsupportedOperationException()
}
/**
* Add a file to be uploaded to s3. The key object will be evaluated at runtime.
*
* If file is a directory, all files in the directory will be uploaded to the key as a prefix.
*/
public void download(Object key) {
toDownload.add(key)
}
@TaskAction
public void downloadFromS3() {
AWSCredentials creds = new BasicAWSCredentials(project.awsAccessKey, project.awsSecretKey)
ClientConfiguration clientConfiguration = new ClientConfiguration();
// the response metadata cache is only there for diagnostics purposes,
// but can force objects from every response to the old generation.
clientConfiguration.setResponseMetadataCacheSize(0);
AmazonS3Client client = new AmazonS3Client(creds, clientConfiguration);
ProgressLogger progressLogger = getProgressLoggerFactory().newOperation("s3upload")
progressLogger.description = "download files from s3"
progressLogger.started()
for (Object entry : toDownload) {
String key = entry.toString()
downloadFile(client, progressLogger, destDir, key)
}
progressLogger.completed()
}
/** Download a single file */
private void downloadFile(AmazonS3Client client, ProgressLogger progressLogger, File destDir, String key) {
String destPath
if (flatten) {
destPath = key.substring(key.lastIndexOf('/') + 1)
} else {
destPath = key
}
logger.info("Downloading ${destPath} from ${bucket}")
progressLogger.progress("downloading ${destPath}")
client.getObject(new GetObjectRequest(bucket, key),
new File(destDir, destPath))
}
}

View File

@ -0,0 +1,102 @@
package org.elastic.gradle
import com.amazonaws.ClientConfiguration
import com.amazonaws.auth.AWSCredentials
import com.amazonaws.auth.BasicAWSCredentials
import com.amazonaws.services.s3.AmazonS3Client
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.TaskAction
import org.gradle.logging.ProgressLogger
import org.gradle.logging.ProgressLoggerFactory
import javax.inject.Inject
/**
* A task to upload files to s3, which allows delayed resolution of the s3 path
*/
class UploadS3Task extends DefaultTask {
private Map<File, Object> toUpload = new LinkedHashMap<>()
@Input
String bucket
/** True if a sha1 hash of each file should exist and be uploaded. This is ignored for uploading directories. */
@Input
boolean addSha1Hash = false
/** True if a signature of each file should exist and be uploaded. This is ignored for uploading directories. */
@Input
boolean addSignature = false
UploadS3Task() {
ext.set('needs.aws', true)
}
@Inject
public ProgressLoggerFactory getProgressLoggerFactory() {
throw new UnsupportedOperationException()
}
/**
* Add a file to be uploaded to s3. The key object will be evaluated at runtime.
*
* If file is a directory, all files in the directory will be uploaded to the key as a prefix.
*/
public void upload(File file, Object key) {
toUpload.put(file, key)
}
@TaskAction
public void uploadToS3() {
AWSCredentials creds = new BasicAWSCredentials(project.awsAccessKey, project.awsSecretKey)
ClientConfiguration clientConfiguration = new ClientConfiguration();
// the response metadata cache is only there for diagnostics purposes,
// but can force objects from every response to the old generation.
clientConfiguration.setResponseMetadataCacheSize(0);
AmazonS3Client client = new AmazonS3Client(creds, clientConfiguration);
ProgressLogger progressLogger = getProgressLoggerFactory().newOperation("s3upload")
progressLogger.description = "upload files to s3"
progressLogger.started()
for (Map.Entry<File, Object> entry : toUpload) {
File file = entry.getKey()
String key = entry.getValue().toString()
if (file.isDirectory()) {
uploadDir(client, progressLogger, file, key)
} else {
uploadFile(client, progressLogger, file, key)
if (addSha1Hash) {
uploadFile(client, progressLogger, new File(file.path + '.sha1'), key + '.sha1')
}
if (addSignature) {
uploadFile(client, progressLogger, new File(file.path + '.asc'), key + '.asc')
}
}
}
progressLogger.completed()
}
/** Recursively upload all files in a directory. */
private void uploadDir(AmazonS3Client client, ProgressLogger progressLogger, File dir, String prefix) {
for (File subfile : dir.listFiles()) {
if (subfile.isDirectory()) {
uploadDir(client, progressLogger, subfile, "${prefix}/${subfile.name}")
} else {
String subkey = "${prefix}/${subfile.name}"
uploadFile(client, progressLogger, subfile, subkey)
}
}
}
/** Upload a single file */
private void uploadFile(AmazonS3Client client, ProgressLogger progressLogger, File file, String key) {
logger.info("Uploading ${file.name} to ${key}")
progressLogger.progress("uploading ${file.name}")
client.putObject(bucket, key, file)
}
}

View File

@ -1,4 +1,5 @@
import org.elasticsearch.gradle.precommit.PrecommitTasks import org.elasticsearch.gradle.precommit.PrecommitTasks
import org.elastic.gradle.DownloadS3Task
apply plugin: 'elasticsearch.esplugin' apply plugin: 'elasticsearch.esplugin'
@ -39,6 +40,15 @@ integTest {
integTest.mustRunAfter noBootstrapTest integTest.mustRunAfter noBootstrapTest
task downloadCppDist(type: DownloadS3Task) {
enabled project.cppEnabled == false
description = 'download cpp zips from S3 Bucket'
bucket 'prelert-artifacts'
destDir file("${buildDir}/cppDist")
flatten true
download "maven/${project.group}/prelert_cpp_darwin-x86_64/${elasticsearchVersion}/prelert_cpp_darwin-x86_64.zip"
}
bundlePlugin { bundlePlugin {
if (project.cppEnabled) { if (project.cppEnabled) {
from { zipTree(project(':cpp').buildZip.outputs.files.singleFile) } from { zipTree(project(':cpp').buildZip.outputs.files.singleFile) }
@ -54,4 +64,4 @@ bundlePlugin {
} }
} }
bundlePlugin.dependsOn(':cpp:buildZip') bundlePlugin.dependsOn([':cpp:buildZip', 'downloadCppDist'])