OpenSearch/distribution/build.gradle

451 lines
17 KiB
Groovy

/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.tools.ant.filters.FixCrLfFilter
import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.EmptyDirTask
import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.precommit.DependencyLicensesTask
import org.elasticsearch.gradle.precommit.UpdateShasTask
import org.elasticsearch.gradle.test.RunTask
// for deb/rpm
buildscript {
repositories {
maven {
url "https://plugins.gradle.org/m2/"
}
}
dependencies {
classpath 'com.netflix.nebula:gradle-ospackage-plugin:3.4.0'
}
}
// this is common configuration for distributions, but we also add it here for the license check to use
ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive()
/*****************************************************************************
* Modules *
*****************************************************************************/
task buildModules(type: Sync) {
into 'build/modules'
}
ext.restTestExpansions = [
'expected.modules.count': 0,
]
// we create the buildModules task above so the distribution subprojects can
// depend on it, but we don't actually configure it until here so we can do a single
// loop over modules to also setup cross task dependencies and increment our modules counter
project.rootProject.subprojects.findAll { it.path.startsWith(':modules:') }.each { Project module ->
buildModules {
dependsOn({ project(module.path).bundlePlugin })
into(module.name) {
from { zipTree(project(module.path).bundlePlugin.outputs.files.singleFile) }
}
}
// We would like to make sure integ tests for the distribution run after
// integ tests for the modules included in the distribution. However, gradle
// has a bug where depending on a task with a finalizer can sometimes not make
// the finalizer task follow the original task immediately. To work around this,
// we make the mustRunAfter the finalizer task itself.
// See https://discuss.gradle.org/t/cross-project-task-dependencies-ordering-screws-up-finalizers/13190
project.configure(project.subprojects.findAll { it.name != 'integ-test-zip' }) { Project distribution ->
distribution.afterEvaluate({
distribution.integTest.mustRunAfter("${module.path}:integTest#stop")
})
}
// also want to make sure the module's integration tests run after the integ-test-zip (ie rest tests)
module.afterEvaluate({
module.integTest.mustRunAfter(':distribution:integ-test-zip:integTest#stop')
})
restTestExpansions['expected.modules.count'] += 1
}
// make sure we have a clean task since we aren't a java project, but we have tasks that
// put stuff in the build dir
task clean(type: Delete) {
delete 'build'
}
subprojects {
/*****************************************************************************
* Rest test config *
*****************************************************************************/
apply plugin: 'elasticsearch.rest-test'
project.integTest {
dependsOn project.assemble
includePackaged project.name == 'integ-test-zip'
cluster {
distribution = project.name
}
if (project.name != 'integ-test-zip') {
// see note above with module mustRunAfter about why integTest#stop is used here
mustRunAfter ':distribution:integ-test-zip:integTest#stop'
}
}
processTestResources {
inputs.properties(project(':distribution').restTestExpansions)
MavenFilteringHack.filter(it, project(':distribution').restTestExpansions)
}
/*****************************************************************************
* Maven config *
*****************************************************************************/
// note: the group must be correct before applying the nexus plugin, or it will capture the wrong value...
project.group = "org.elasticsearch.distribution.${project.name}"
apply plugin: 'com.bmuschko.nexus'
// we must create our own install task, because it is only added when the java plugin is added
task install(type: Upload, description: "Installs the 'archives' artifacts into the local Maven repository.", group: 'Upload') {
configuration = configurations.archives
MavenRepositoryHandlerConvention repositoriesHandler = (MavenRepositoryHandlerConvention)getRepositories().getConvention().getPlugin(MavenRepositoryHandlerConvention)
repositoriesHandler.mavenInstaller()
}
// TODO: the map needs to be an input of the tasks, so that when it changes, the task will re-run...
/*****************************************************************************
* Properties to expand when copying packaging files *
*****************************************************************************/
project.ext {
expansions = expansionsForDistribution(project.name)
/*****************************************************************************
* Common files in all distributions *
*****************************************************************************/
libFiles = copySpec {
into 'lib'
from project(':core').jar
from project(':distribution').dependencyFiles
}
modulesFiles = copySpec {
into 'modules'
from project(':distribution').buildModules
}
configFiles = copySpec {
from '../src/main/resources/config'
MavenFilteringHack.filter(it, expansions)
}
commonFiles = copySpec {
// everything except windows files, and config is separate
from '../src/main/resources'
exclude 'bin/*.bat'
exclude 'bin/*.exe'
exclude 'config/**'
filesMatching('bin/*') { it.setMode(0755) }
MavenFilteringHack.filter(it, expansions)
}
}
}
/*****************************************************************************
* Zip and tgz configuration *
*****************************************************************************/
configure(subprojects.findAll { ['zip', 'tar', 'integ-test-zip'].contains(it.name) }) {
project.ext.archivesFiles = copySpec {
into("elasticsearch-${version}") {
with libFiles
into('config') {
with configFiles
}
with copySpec {
with commonFiles
from('../src/main/resources') {
include 'bin/*.bat'
filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf'))
}
MavenFilteringHack.filter(it, expansions)
}
from('../src/main/resources') {
include 'bin/*.exe'
}
if (project.name != 'integ-test-zip') {
with modulesFiles
}
}
}
}
/*****************************************************************************
* Deb and rpm configuration *
*****************************************************************************
*
* The general strategy here is to build a directory on disk, packagingFiles
* that contains stuff that needs to be copied into the distributions. This is
* important for two reasons:
* 1. ospackage wants to copy the directory permissions that it sees off of the
* filesystem. If you ask it to create a directory that doesn't already
* exist on disk it petulantly creates it with 0755 permissions, no matter
* how hard you try to convince it otherwise.
* 2. Convincing ospackage to pick up an empty directory as part of a set of
* directories on disk is reasonably easy. Convincing it to just create an
* empty directory requires more wits than I have.
* 3. ospackage really wants to suck up some of the debian control scripts
* directly from the filesystem. It doesn't want to process them through
* MavenFilteringHack or any other copy-style action.
*/
configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) {
integTest.enabled = Os.isFamily(Os.FAMILY_WINDOWS) == false
File packagingFiles = new File(buildDir, 'packaging')
project.ext.packagingFiles = packagingFiles
task processPackagingFiles(type: Copy) {
from '../src/main/packaging'
from 'src/main/packaging'
MavenFilteringHack.filter(it, expansions)
into packagingFiles
/* Explicitly declare the outputs so that gradle won't skip this task if
one of the other tasks like createEtc run first and create the packaging
directory as a side effect. */
outputs.dir("${packagingFiles}/scripts")
outputs.dir("${packagingFiles}/env")
outputs.dir("${packagingFiles}/systemd")
}
task createEtc(type: EmptyDirTask) {
dir "${packagingFiles}/etc/elasticsearch"
dirMode 0750
}
task createEtcScripts(type: EmptyDirTask) {
dependsOn createEtc
dir "${packagingFiles}/etc/elasticsearch/scripts"
dirMode 0750
}
task fillEtc(type: Copy) {
dependsOn createEtc, createEtcScripts
with configFiles
into "${packagingFiles}/etc/elasticsearch"
/* Explicitly declare the output files so this task doesn't consider itself
up to date when the directory is created, which it would by default. And
that'll happen when createEtc runs. */
outputs.file "${packagingFiles}/etc/elasticsearch/elasticsearch.yml"
outputs.file "${packagingFiles}/etc/elasticsearch/jvm.options"
outputs.file "${packagingFiles}/etc/elasticsearch/logging.yml"
}
task createPidDir(type: EmptyDirTask) {
dir "${packagingFiles}/var/run/elasticsearch"
}
task createLogDir(type: EmptyDirTask) {
dir "${packagingFiles}/var/log/elasticsearch"
}
task createDataDir(type: EmptyDirTask) {
dir "${packagingFiles}/var/lib/elasticsearch"
}
task createPluginsDir(type: EmptyDirTask) {
dir "${packagingFiles}/usr/share/elasticsearch/plugins"
}
/**
* Setup the build/packaging directory to be like the target filesystem
* because ospackage will use arbitrary permissions if you try to create a
* directory that doesn't exist on the filesystem.
*/
task preparePackagingFiles {
dependsOn processPackagingFiles, fillEtc, createPidDir, createLogDir,
createDataDir, createPluginsDir
}
apply plugin: 'nebula.ospackage-base'
ospackage {
packageName 'elasticsearch'
maintainer 'Elasticsearch Team <info@elastic.co>'
summary '''
Elasticsearch is a distributed RESTful search engine built for the cloud.
Reference documentation can be found at
https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html
and the 'Elasticsearch: The Definitive Guide' book can be found at
https://www.elastic.co/guide/en/elasticsearch/guide/current/index.html
'''.stripIndent().replace('\n', ' ').trim()
url 'https://www.elastic.co/'
/* The version of the package can't contain -SNAPSHOT so we rip it off if
we see it. We'll add it back on to the file name though. */
version project.version.replace('-SNAPSHOT', '')
String scripts = "${packagingFiles}/scripts"
preInstall file("${scripts}/preinst")
postInstall file("${scripts}/postinst")
preUninstall file("${scripts}/prerm")
postUninstall file("${scripts}/postrm")
into '/usr/share/elasticsearch'
user 'root'
permissionGroup 'root'
with libFiles
with modulesFiles
with copySpec {
with commonFiles
if (project.name == 'deb') {
// Deb gets a copyright file instead.
exclude 'LICENSE.txt'
}
}
configurationFile '/etc/elasticsearch/elasticsearch.yml'
configurationFile '/etc/elasticsearch/jvm.options'
configurationFile '/etc/elasticsearch/logging.yml'
into('/etc') {
from "${packagingFiles}/etc"
fileMode 0750
permissionGroup 'elasticsearch'
includeEmptyDirs true
createDirectoryEntry true
}
into('/usr/lib/tmpfiles.d') {
from "${packagingFiles}/systemd/elasticsearch.conf"
}
configurationFile '/usr/lib/systemd/system/elasticsearch.service'
into('/usr/lib/systemd/system') {
from "${packagingFiles}/systemd/elasticsearch.service"
}
into('/usr/lib/sysctl.d') {
from "${packagingFiles}/systemd/sysctl/elasticsearch.conf"
}
configurationFile '/etc/init.d/elasticsearch'
into('/etc/init.d') {
from "${packagingFiles}/init.d/elasticsearch"
fileMode 0755
}
configurationFile project.expansions['path.env']
into(new File(project.expansions['path.env']).getParent()) {
from "${project.packagingFiles}/env/elasticsearch"
}
/**
* Suck up all the empty directories that we need to install into the path.
*/
Closure suckUpEmptyDirectories = { path, u, g ->
into(path) {
from "${packagingFiles}/${path}"
includeEmptyDirs true
createDirectoryEntry true
user u
permissionGroup g
}
}
suckUpEmptyDirectories('/var/run', 'elasticsearch', 'elasticsearch')
suckUpEmptyDirectories('/var/log', 'elasticsearch', 'elasticsearch')
suckUpEmptyDirectories('/var/lib', 'elasticsearch', 'elasticsearch')
suckUpEmptyDirectories('/usr/share/elasticsearch', 'root', 'root')
}
}
// TODO: dependency checks should really be when building the jar itself, which would remove the need
// for this hackery and instead we can do this inside the BuildPlugin
task dependencyLicenses(type: DependencyLicensesTask) {
dependsOn = [dependencyFiles]
dependencies = dependencyFiles
mapping from: /lucene-.*/, to: 'lucene'
mapping from: /jackson-.*/, to: 'jackson'
}
task check(group: 'Verification', description: 'Runs all checks.', dependsOn: dependencyLicenses) {} // dummy task!
task updateShas(type: UpdateShasTask) {
parentTask = dependencyLicenses
}
task run(type: RunTask) {}
/**
* Build some variables that are replaced in the packages. This includes both
* scripts like bin/elasticsearch and bin/elasticsearch-plugin that a user might run and also
* scripts like postinst which are run as part of the installation.
*
* <dl>
* <dt>package.name</dt>
* <dd>The name of the project. Its sprinkled throughout the scripts.</dd>
* <dt>package.version</dt>
* <dd>The version of the project. Its mostly used to find the exact jar name.
* </dt>
* <dt>path.conf</dt>
* <dd>The default directory from which to load configuration. This is used in
* the packaging scripts, but in that context it is always
* /etc/elasticsearch. Its also used in bin/elasticsearch-plugin, where it is
* /etc/elasticsearch for the os packages but $ESHOME/config otherwise.</dd>
* <dt>path.env</dt>
* <dd>The env file sourced before bin/elasticsearch to set environment
* variables. Think /etc/defaults/elasticsearch.</dd>
* <dt>heap.min and heap.max</dt>
* <dd>Default min and max heap</dd>
* <dt>scripts.footer</dt>
* <dd>Footer appended to control scripts embedded in the distribution that is
* (almost) entirely there for cosmetic reasons.</dd>
* <dt>stopping.timeout</dt>
* <dd>RPM's init script needs to wait for elasticsearch to stop before
* returning from stop and it needs a maximum time to wait. This is it. One
* day. DEB retries forever.</dd>
* </dl>
*/
Map<String, String> expansionsForDistribution(distributionType) {
String footer = "# Built for ${project.name}-${project.version} " +
"(${distributionType})"
Map<String, Object> expansions = [
'project.name': project.name,
'project.version': version,
'path.conf': [
'tar': '$ES_HOME/config',
'zip': '$ES_HOME/config',
'def': '/etc/elasticsearch',
],
'path.env': [
'deb': '/etc/default/elasticsearch',
'rpm': '/etc/sysconfig/elasticsearch',
/* There isn't one of these files for tar or zip but its important to
make an empty string here so the script can properly skip it. */
'def': '',
],
'heap.min': '256m',
'heap.max': '1g',
'stopping.timeout': [
'rpm': 86400,
],
'scripts.footer': [
/* Debian needs exit 0 on these scripts so we add it here and preserve
the pretty footer. */
'deb': "exit 0\n${footer}",
'def': footer
],
]
Map<String, String> result = [:]
expansions = expansions.each { key, value ->
if (value instanceof Map) {
// 'def' is for default but its three characters like 'rpm' and 'deb'
value = value[distributionType] ?: value['def']
if (value == null) {
return
}
}
result[key] = value
}
return result
}