372 lines
13 KiB
Groovy
372 lines
13 KiB
Groovy
/*
|
|
* Licensed to Elasticsearch under one or more contributor
|
|
* license agreements. See the NOTICE file distributed with
|
|
* this work for additional information regarding copyright
|
|
* ownership. Elasticsearch licenses this file to you under
|
|
* the Apache License, Version 2.0 (the "License"); you may
|
|
* not use this file except in compliance with the License.
|
|
* You may obtain a copy of the License at
|
|
*
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
*
|
|
* Unless required by applicable law or agreed to in writing,
|
|
* software distributed under the License is distributed on an
|
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
* KIND, either express or implied. See the License for the
|
|
* specific language governing permissions and limitations
|
|
* under the License.
|
|
*/
|
|
|
|
|
|
import org.apache.tools.ant.taskdefs.condition.Os
|
|
import org.elasticsearch.gradle.BuildPlugin
|
|
import org.elasticsearch.gradle.EmptyDirTask
|
|
import org.elasticsearch.gradle.LoggedExec
|
|
import org.elasticsearch.gradle.MavenFilteringHack
|
|
import org.elasticsearch.gradle.VersionProperties
|
|
import org.elasticsearch.gradle.info.BuildParams
|
|
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
|
import org.elasticsearch.gradle.tar.SymbolicLinkPreservingTar
|
|
import groovy.io.FileType
|
|
import java.nio.file.Files
|
|
import java.nio.file.Path
|
|
|
|
// need this so Zip/Tar tasks get basic defaults...
|
|
apply plugin: 'base'
|
|
|
|
// CopySpec does not make it easy to create an empty directory so we
|
|
// create the directory that we want, and then point CopySpec to its
|
|
// parent to copy to the root of the distribution
|
|
ext.logsDir = new File(buildDir, 'logs-hack/logs')
|
|
tasks.register('createLogsDir', EmptyDirTask) {
|
|
dir = "${logsDir}"
|
|
dirMode = 0755
|
|
}
|
|
ext.pluginsDir = new File(buildDir, 'plugins-hack/plugins')
|
|
tasks.register('createPluginsDir', EmptyDirTask) {
|
|
dir = "${pluginsDir}"
|
|
dirMode = 0755
|
|
}
|
|
ext.jvmOptionsDir = new File(buildDir, 'jvm-options-hack/jvm.options.d')
|
|
tasks.register('createJvmOptionsDir', EmptyDirTask) {
|
|
dir = "${jvmOptionsDir}"
|
|
dirMode = 0750
|
|
}
|
|
|
|
CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, String architecture, boolean oss, boolean jdk) {
|
|
return copySpec {
|
|
into("elasticsearch-${version}") {
|
|
into('lib') {
|
|
with libFiles(oss)
|
|
}
|
|
into('config') {
|
|
dirMode 0750
|
|
fileMode 0660
|
|
with configFiles(distributionType, oss, jdk)
|
|
from {
|
|
dirMode 0750
|
|
jvmOptionsDir.getParent()
|
|
}
|
|
}
|
|
into('bin') {
|
|
with binFiles(distributionType, oss, jdk)
|
|
}
|
|
if (jdk) {
|
|
into("darwin".equals(platform) ? 'jdk.app' : 'jdk') {
|
|
with jdkFiles(project, platform, architecture)
|
|
}
|
|
}
|
|
into('') {
|
|
from {
|
|
dirMode 0755
|
|
logsDir.getParent()
|
|
}
|
|
}
|
|
into('') {
|
|
from {
|
|
dirMode 0755
|
|
pluginsDir.getParent()
|
|
}
|
|
}
|
|
from(rootProject.projectDir) {
|
|
include 'README.asciidoc'
|
|
}
|
|
from(rootProject.file('licenses')) {
|
|
include oss ? 'APACHE-LICENSE-2.0.txt' : 'ELASTIC-LICENSE.txt'
|
|
rename { 'LICENSE.txt' }
|
|
}
|
|
|
|
with noticeFile(oss, jdk)
|
|
into('modules') {
|
|
with modulesFiles
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// common config across all zip/tar
|
|
tasks.withType(AbstractArchiveTask).configureEach {
|
|
dependsOn createLogsDir, createPluginsDir, createJvmOptionsDir
|
|
String subdir = it.name.substring('build'.size()).replaceAll(/[A-Z]/) { '-' + it.toLowerCase() }.substring(1)
|
|
destinationDirectory = file("${subdir}/build/distributions")
|
|
archiveBaseName = "elasticsearch${subdir.contains('oss') ? '-oss' : ''}"
|
|
}
|
|
|
|
Closure commonZipConfig = {
|
|
dirMode 0755
|
|
fileMode 0644
|
|
}
|
|
|
|
tasks.register('buildIntegTestZip', Zip) {
|
|
configure(commonZipConfig)
|
|
with archiveFiles(transportModulesFiles, 'zip', null, 'x64', true, false)
|
|
}
|
|
|
|
tasks.register('buildWindowsZip', Zip) {
|
|
configure(commonZipConfig)
|
|
archiveClassifier = 'windows-x86_64'
|
|
with archiveFiles(modulesFiles(false, 'windows-x86_64'), 'zip', 'windows', 'x64', false, true)
|
|
}
|
|
|
|
tasks.register('buildOssWindowsZip', Zip) {
|
|
configure(commonZipConfig)
|
|
archiveClassifier = 'windows-x86_64'
|
|
with archiveFiles(modulesFiles(true, 'windows-x86_64'), 'zip', 'windows', 'x64', true, true)
|
|
}
|
|
|
|
tasks.register('buildNoJdkWindowsZip', Zip) {
|
|
configure(commonZipConfig)
|
|
archiveClassifier = 'no-jdk-windows-x86_64'
|
|
with archiveFiles(modulesFiles(false, 'windows-x86_64'), 'zip', 'windows', 'x64', false, false)
|
|
}
|
|
|
|
tasks.register('buildOssNoJdkWindowsZip', Zip) {
|
|
configure(commonZipConfig)
|
|
archiveClassifier = 'no-jdk-windows-x86_64'
|
|
with archiveFiles(modulesFiles(true, 'windows-x86_64'), 'zip', 'windows', 'x64', true, false)
|
|
}
|
|
|
|
Closure commonTarConfig = {
|
|
archiveExtension = 'tar.gz'
|
|
compression = Compression.GZIP
|
|
dirMode 0755
|
|
fileMode 0644
|
|
}
|
|
|
|
tasks.register('buildDarwinTar', SymbolicLinkPreservingTar) {
|
|
configure(commonTarConfig)
|
|
archiveClassifier = 'darwin-x86_64'
|
|
with archiveFiles(modulesFiles(false, 'darwin-x86_64'), 'tar', 'darwin', 'x64', false, true)
|
|
}
|
|
|
|
tasks.register('buildOssDarwinTar', SymbolicLinkPreservingTar) {
|
|
configure(commonTarConfig)
|
|
archiveClassifier = 'darwin-x86_64'
|
|
with archiveFiles(modulesFiles(true, 'darwin-x86_64'), 'tar', 'darwin', 'x64', true, true)
|
|
}
|
|
|
|
tasks.register('buildNoJdkDarwinTar', SymbolicLinkPreservingTar) {
|
|
configure(commonTarConfig)
|
|
archiveClassifier = 'no-jdk-darwin-x86_64'
|
|
with archiveFiles(modulesFiles(false, 'darwin-x86_64'), 'tar', 'darwin', 'x64', false, false)
|
|
}
|
|
|
|
tasks.register('buildOssNoJdkDarwinTar', SymbolicLinkPreservingTar) {
|
|
configure(commonTarConfig)
|
|
archiveClassifier = 'no-jdk-darwin-x86_64'
|
|
with archiveFiles(modulesFiles(true, 'darwin-x86_64'), 'tar', 'darwin', 'x64', true, false)
|
|
}
|
|
|
|
tasks.register('buildLinuxAarch64Tar', SymbolicLinkPreservingTar) {
|
|
configure(commonTarConfig)
|
|
archiveClassifier = 'linux-aarch64'
|
|
with archiveFiles(modulesFiles(false, 'linux-aarch64'), 'tar', 'linux', 'aarch64', false, true)
|
|
}
|
|
|
|
tasks.register('buildLinuxTar', SymbolicLinkPreservingTar) {
|
|
configure(commonTarConfig)
|
|
archiveClassifier = 'linux-x86_64'
|
|
with archiveFiles(modulesFiles(false, 'linux-x86_64'), 'tar', 'linux', 'x64', false, true)
|
|
}
|
|
|
|
tasks.register('buildOssLinuxAarch64Tar', SymbolicLinkPreservingTar) {
|
|
configure(commonTarConfig)
|
|
archiveClassifier = 'linux-aarch64'
|
|
with archiveFiles(modulesFiles(true, 'linux-aarch64'), 'tar', 'linux', 'aarch64', true, true)
|
|
}
|
|
|
|
tasks.register('buildOssLinuxTar', SymbolicLinkPreservingTar) {
|
|
configure(commonTarConfig)
|
|
archiveClassifier = 'linux-x86_64'
|
|
with archiveFiles(modulesFiles(true, 'linux-x86_64'), 'tar', 'linux', 'x64', true, true)
|
|
}
|
|
|
|
tasks.register('buildNoJdkLinuxTar', SymbolicLinkPreservingTar) {
|
|
configure(commonTarConfig)
|
|
archiveClassifier = 'no-jdk-linux-x86_64'
|
|
with archiveFiles(modulesFiles(false, 'linux-x86_64'), 'tar', 'linux', 'x64', false, false)
|
|
}
|
|
|
|
tasks.register('buildOssNoJdkLinuxTar', SymbolicLinkPreservingTar) {
|
|
configure(commonTarConfig)
|
|
archiveClassifier = 'no-jdk-linux-x86_64'
|
|
with archiveFiles(modulesFiles(true, 'linux-x86_64'), 'tar', 'linux', 'x64', true, false)
|
|
}
|
|
|
|
Closure tarExists = { it -> new File('/bin/tar').exists() || new File('/usr/bin/tar').exists() || new File('/usr/local/bin/tar').exists() }
|
|
Closure unzipExists = { it -> new File('/bin/unzip').exists() || new File('/usr/bin/unzip').exists() || new File('/usr/local/bin/unzip').exists() }
|
|
|
|
// This configures the default artifact for the distribution specific
|
|
// subprojects. We have subprojects for two reasons:
|
|
// 1. Gradle project substitutions can only bind to the default
|
|
// configuration of a project
|
|
// 2. The integ-test-zip and zip distributions have the exact same
|
|
// filename, so they must be placed in different directories.
|
|
subprojects {
|
|
apply plugin: 'distribution'
|
|
|
|
String buildTask = "build${it.name.replaceAll(/-[a-z]/) { it.substring(1).toUpperCase() }.capitalize()}"
|
|
ext.buildDist = parent.tasks.named(buildTask)
|
|
artifacts {
|
|
'default' buildDist
|
|
}
|
|
|
|
// sanity checks if archives can be extracted
|
|
File archiveExtractionDir
|
|
if (project.name.contains('tar')) {
|
|
archiveExtractionDir = new File(buildDir, 'tar-extracted')
|
|
} else {
|
|
assert project.name.contains('zip')
|
|
archiveExtractionDir = new File(buildDir, 'zip-extracted')
|
|
}
|
|
def checkExtraction = tasks.register('checkExtraction', LoggedExec) {
|
|
dependsOn buildDist
|
|
doFirst {
|
|
project.delete(archiveExtractionDir)
|
|
archiveExtractionDir.mkdirs()
|
|
}
|
|
// common sanity checks on extracted archive directly as part of checkExtraction
|
|
doLast {
|
|
// check no plain class files are packaged
|
|
archiveExtractionDir.eachFileRecurse (FileType.FILES) { file ->
|
|
assert file.name.endsWith(".class") == false
|
|
}
|
|
}
|
|
}
|
|
tasks.named('check').configure { dependsOn checkExtraction }
|
|
if (project.name.contains('tar')) {
|
|
checkExtraction.configure {
|
|
onlyIf tarExists
|
|
commandLine 'tar', '-xvzf', "${-> buildDist.get().outputs.files.singleFile}", '-C', archiveExtractionDir
|
|
}
|
|
} else {
|
|
assert project.name.contains('zip')
|
|
checkExtraction.configure {
|
|
onlyIf unzipExists
|
|
commandLine 'unzip', "${-> buildDist.get().outputs.files.singleFile}", '-d', archiveExtractionDir
|
|
}
|
|
}
|
|
|
|
Closure toolExists
|
|
if (project.name.contains('tar')) {
|
|
toolExists = tarExists
|
|
} else {
|
|
assert project.name.contains('zip')
|
|
toolExists = unzipExists
|
|
}
|
|
|
|
|
|
tasks.register('checkLicense') {
|
|
dependsOn buildDist, checkExtraction
|
|
onlyIf toolExists
|
|
doLast {
|
|
String licenseFilename = null
|
|
if (project.name.contains('oss-') || project.name == 'integ-test-zip') {
|
|
licenseFilename = "APACHE-LICENSE-2.0.txt"
|
|
} else {
|
|
licenseFilename = "ELASTIC-LICENSE.txt"
|
|
}
|
|
final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename))
|
|
final Path licensePath = archiveExtractionDir.toPath().resolve("elasticsearch-${VersionProperties.elasticsearch}/LICENSE.txt")
|
|
assertLinesInFile(licensePath, licenseLines)
|
|
}
|
|
}
|
|
tasks.named('check').configure { dependsOn checkLicense }
|
|
|
|
tasks.register('checkNotice') {
|
|
dependsOn buildDist, checkExtraction
|
|
onlyIf toolExists
|
|
doLast {
|
|
final List<String> noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch")
|
|
final Path noticePath = archiveExtractionDir.toPath().resolve("elasticsearch-${VersionProperties.elasticsearch}/NOTICE.txt")
|
|
assertLinesInFile(noticePath, noticeLines)
|
|
}
|
|
}
|
|
tasks.named('check').configure { dependsOn checkNotice }
|
|
|
|
if (project.name == 'zip' || project.name == 'tar') {
|
|
project.ext.licenseName = 'Elastic License'
|
|
project.ext.licenseUrl = ext.elasticLicenseUrl
|
|
def checkMlCppNotice = tasks.register('checkMlCppNotice') {
|
|
dependsOn buildDist, checkExtraction
|
|
onlyIf toolExists
|
|
doLast {
|
|
// this is just a small sample from the C++ notices, the idea being that if we've added these lines we've probably added all the required lines
|
|
final List<String> expectedLines = Arrays.asList("Apache log4cxx", "Boost Software License - Version 1.0 - August 17th, 2003")
|
|
final Path noticePath = archiveExtractionDir.toPath().resolve("elasticsearch-${VersionProperties.elasticsearch}/modules/x-pack-ml/NOTICE.txt")
|
|
final List<String> actualLines = Files.readAllLines(noticePath)
|
|
for (final String expectedLine : expectedLines) {
|
|
if (actualLines.contains(expectedLine) == false) {
|
|
throw new GradleException("expected [${noticePath}] to contain [${expectedLine}] but it did not")
|
|
}
|
|
}
|
|
}
|
|
}
|
|
tasks.named('check').configure { dependsOn checkMlCppNotice }
|
|
}
|
|
}
|
|
|
|
subprojects {
|
|
group = "org.elasticsearch.distribution.${name.startsWith("oss-") ? "oss" : "default"}"
|
|
}
|
|
|
|
/*****************************************************************************
|
|
* Rest test config *
|
|
*****************************************************************************/
|
|
configure(subprojects.findAll { it.name == 'integ-test-zip' }) {
|
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
|
apply plugin: 'elasticsearch.rest-test'
|
|
|
|
group = "org.elasticsearch.distribution.integ-test-zip"
|
|
|
|
integTest {
|
|
dependsOn assemble
|
|
runner {
|
|
if (Os.isFamily(Os.FAMILY_WINDOWS) && System.getProperty('tests.timeoutSuite') == null) {
|
|
// override the suite timeout to 30 mins for windows, because it has the most inefficient filesystem known to man
|
|
systemProperty 'tests.timeoutSuite', '1800000!'
|
|
}
|
|
}
|
|
}
|
|
|
|
processTestResources {
|
|
inputs.properties(project(':distribution').restTestExpansions)
|
|
MavenFilteringHack.filter(it, project(':distribution').restTestExpansions)
|
|
}
|
|
|
|
// The integ-test-distribution is published to maven
|
|
apply plugin: 'elasticsearch.publish'
|
|
|
|
// make the pom file name use elasticsearch instead of the project name
|
|
archivesBaseName = "elasticsearch${it.name.contains('oss') ? '-oss' : ''}"
|
|
|
|
publishing {
|
|
publications {
|
|
nebula {
|
|
pom.packaging = 'zip'
|
|
artifact(buildDist.flatMap { it.archiveFile })
|
|
}
|
|
}
|
|
}
|
|
}
|