Removes upload pack task from build (elastic/elasticsearch#757)

* removes upload pack task from build

This is preventing us from being an elasticsearch-extra project and we cannot have this task when we move to x-pack. Once we are in X-Pack the unified build will be uploading the final artifact so for now we will change the CI build to add a build step to upload the pack artifact.

* Removes OS specific stuff from the build

the CPP_LOCAL_DIST will now look for any `ml-cpp` artifacts for the same version in the specified directory.

* review corrections

Original commit: elastic/x-pack-elasticsearch@be15e55ddb
This commit is contained in:
Colin Goodheart-Smithe 2017-01-19 16:14:08 +00:00 committed by GitHub
parent 55dd438557
commit 3fa87c0994
4 changed files with 17 additions and 159 deletions

View File

@ -1,23 +1,18 @@
description = 'Builds the Machine Learning Java classes and UI'
import org.gradle.internal.os.OperatingSystem
import org.gradle.plugins.ide.eclipse.model.SourceFolder
import org.elasticsearch.gradle.precommit.LicenseHeadersTask
import org.elasticsearch.gradle.VersionProperties
import org.elastic.gradle.UploadS3Task
import org.apache.tools.ant.taskdefs.condition.Os
String envMlAwsAccessKey = System.env.PRELERT_AWS_ACCESS_KEY_ID
if (envMlAwsAccessKey != null) {
project.ext.mlAwsAccessKey = envMlAwsAccessKey
} else if (project.hasProperty("PRELERT_AWS_ACCESS_KEY_ID")) {
project.ext.mlAwsAccessKey = PRELERT_AWS_ACCESS_KEY_ID
String mlAwsAccessKey = System.env.PRELERT_AWS_ACCESS_KEY_ID
if (mlAwsAccessKey == null && project.hasProperty("PRELERT_AWS_ACCESS_KEY_ID")) {
mlAwsAccessKey = PRELERT_AWS_ACCESS_KEY_ID
}
String envMlAwsSecretKey = System.env.PRELERT_AWS_SECRET_ACCESS_KEY
if (envMlAwsSecretKey != null) {
project.ext.mlAwsSecretKey = envMlAwsSecretKey
} else if (project.hasProperty("PRELERT_AWS_SECRET_ACCESS_KEY")) {
project.ext.mlAwsSecretKey = PRELERT_AWS_SECRET_ACCESS_KEY
String mlAwsSecretKey = System.env.PRELERT_AWS_SECRET_ACCESS_KEY
if (mlAwsSecretKey == null && project.hasProperty("PRELERT_AWS_SECRET_ACCESS_KEY")) {
mlAwsSecretKey = PRELERT_AWS_SECRET_ACCESS_KEY
}
String envCppLocalDists = System.env.CPP_LOCAL_DISTS
@ -29,23 +24,11 @@ if (envCppLocalDists != null) {
project.ext.cppLocalDists = ''
}
project.ext.isWindows = OperatingSystem.current().isWindows()
project.ext.isLinux = OperatingSystem.current().isLinux()
project.ext.isMacOsX = OperatingSystem.current().isMacOsX()
String uploadEnabledStr = properties.get('upload', 'false')
if (['true', 'false'].contains(uploadEnabledStr) == false) {
throw new GradleException("upload must be true or false, got ${uploadEnabledStr}")
}
project.ext.uploadEnabled = uploadEnabledStr == 'true'
allprojects {
group = 'org.elasticsearch.ml'
version = VersionProperties.elasticsearch
}
String packArtifactName = 'ml'
configurations.all {
// check for updates every build
resolutionStrategy.cacheChangingModulesFor 0, 'seconds'
@ -86,7 +69,7 @@ task bundlePack(type: Zip) {
from { zipTree(project('elasticsearch').bundlePlugin.outputs.files.singleFile) }
from { zipTree(project('kibana').bundlePlugin.outputs.files.singleFile) }
destinationDir file('build/distributions')
baseName = packArtifactName
baseName = 'ml'
version = project.version
}
@ -111,19 +94,6 @@ task clean(type: Delete) {
delete 'build'
}
String projectGroupPath = project.group.replaceAll("\\.", "/")
task uploadPackToS3(type: UploadS3Task, dependsOn: build) {
enabled project.uploadEnabled
description = 'upload pack zip to S3 Bucket'
bucket 'prelert-artifacts'
upload bundlePack.outputs.files.singleFile, "maven/${projectGroupPath}/${packArtifactName}/${project.version}/${bundlePack.outputs.files.singleFile.name}"
}
task deploy(dependsOn: uploadPackToS3) {
}
subprojects {
apply plugin: 'eclipse'
apply plugin: 'idea'
@ -153,8 +123,8 @@ subprojects {
maven {
url "s3://prelert-artifacts/maven"
credentials(AwsCredentials) {
accessKey "${project.mlAwsAccessKey}"
secretKey "${project.mlAwsSecretKey}"
accessKey "${mlAwsAccessKey}"
secretKey "${mlAwsSecretKey}"
}
}
mavenCentral()
@ -229,7 +199,7 @@ allprojects {
// Name all the non-root projects after their path so that paths get grouped together when imported into eclipse.
if (path != ':') {
eclipse.project.name = path
if (isWindows) {
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
eclipse.project.name = eclipse.project.name.replace(':', '_')
}
}

View File

@ -1,12 +0,0 @@
apply plugin: 'java'
apply plugin: 'idea'
repositories {
mavenCentral()
}
dependencies {
compile gradleApi()
compile localGroovy()
compile 'com.amazonaws:aws-java-sdk-s3:1.10.33'
}

View File

@ -1,102 +0,0 @@
package org.elastic.gradle
import com.amazonaws.ClientConfiguration
import com.amazonaws.auth.AWSCredentials
import com.amazonaws.auth.BasicAWSCredentials
import com.amazonaws.services.s3.AmazonS3Client
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.TaskAction
import org.gradle.logging.ProgressLogger
import org.gradle.logging.ProgressLoggerFactory
import javax.inject.Inject
/**
* A task to upload files to s3, which allows delayed resolution of the s3 path
*/
class UploadS3Task extends DefaultTask {
private Map<File, Object> toUpload = new LinkedHashMap<>()
@Input
String bucket
/** True if a sha1 hash of each file should exist and be uploaded. This is ignored for uploading directories. */
@Input
boolean addSha1Hash = false
/** True if a signature of each file should exist and be uploaded. This is ignored for uploading directories. */
@Input
boolean addSignature = false
UploadS3Task() {
ext.set('needs.aws', true)
}
@Inject
public ProgressLoggerFactory getProgressLoggerFactory() {
throw new UnsupportedOperationException()
}
/**
* Add a file to be uploaded to s3. The key object will be evaluated at runtime.
*
* If file is a directory, all files in the directory will be uploaded to the key as a prefix.
*/
public void upload(File file, Object key) {
toUpload.put(file, key)
}
@TaskAction
public void uploadToS3() {
AWSCredentials creds = new BasicAWSCredentials(project.mlAwsAccessKey, project.mlAwsSecretKey)
ClientConfiguration clientConfiguration = new ClientConfiguration();
// the response metadata cache is only there for diagnostics purposes,
// but can force objects from every response to the old generation.
clientConfiguration.setResponseMetadataCacheSize(0);
AmazonS3Client client = new AmazonS3Client(creds, clientConfiguration);
ProgressLogger progressLogger = getProgressLoggerFactory().newOperation("s3upload")
progressLogger.description = "upload files to s3"
progressLogger.started()
for (Map.Entry<File, Object> entry : toUpload) {
File file = entry.getKey()
String key = entry.getValue().toString()
if (file.isDirectory()) {
uploadDir(client, progressLogger, file, key)
} else {
uploadFile(client, progressLogger, file, key)
if (addSha1Hash) {
uploadFile(client, progressLogger, new File(file.path + '.sha1'), key + '.sha1')
}
if (addSignature) {
uploadFile(client, progressLogger, new File(file.path + '.asc'), key + '.asc')
}
}
}
progressLogger.completed()
}
/** Recursively upload all files in a directory. */
private void uploadDir(AmazonS3Client client, ProgressLogger progressLogger, File dir, String prefix) {
for (File subfile : dir.listFiles()) {
if (subfile.isDirectory()) {
uploadDir(client, progressLogger, subfile, "${prefix}/${subfile.name}")
} else {
String subkey = "${prefix}/${subfile.name}"
uploadFile(client, progressLogger, subfile, subkey)
}
}
}
/** Upload a single file */
private void uploadFile(AmazonS3Client client, ProgressLogger progressLogger, File file, String key) {
logger.info("Uploading ${file.name} to ${key}")
progressLogger.progress("uploading ${file.name}")
client.putObject(bucket, key, file)
}
}

View File

@ -48,10 +48,12 @@ integTest.mustRunAfter noBootstrapTest
bundlePlugin {
if (project.cppLocalDists) {
String localZipFile = 'ml-cpp-${project.version}-' +
(project.isWindows ? "windows-x86_64" : (project.isMacOsX ? "darwin-x86_64" :
(project.isLinux ? "linux-x86_64" : "sunos-x86_64"))) + ".zip"
from { zipTree(cppLocalDists + '/' + localZipFile) }
def localCppBundles = fileTree(dir: cppLocalDists).matching { include "ml-cpp-${project.version}-*.zip" }
for (outputFile in localCppBundles) {
from(zipTree(outputFile)) {
duplicatesStrategy 'exclude'
}
}
} else {
for (outputFile in configurations.nativeBundle) {
from(zipTree(outputFile)) {