/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.tools.ant.filters.FixCrLfFilter import org.elasticsearch.gradle.ConcatFilesTask import org.elasticsearch.gradle.DependenciesInfoTask import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.VersionProperties import java.nio.file.Files import java.nio.file.Path plugins { id 'base' } /***************************************************************************** * Third party dependencies report * *****************************************************************************/ // Concatenates the dependencies CSV files into a single file tasks.register("generateDependenciesReport", ConcatFilesTask) { dependsOn rootProject.allprojects.collect { it.tasks.withType(DependenciesInfoTask) } files = fileTree(dir: project.rootDir, include: '**/dependencies.csv') headerLine = "name,version,url,license,sourceURL" target = new File(System.getProperty('csv') ?: "${project.buildDir}/reports/dependencies/es-dependencies.csv") } /***************************************************************************** * Notice file * *****************************************************************************/ // integ test zip only uses server, so a different notice file is needed there task buildServerNotice(type: NoticeTask) { licensesDir new File(project(':server').projectDir, 'licenses') } // other distributions include notices from modules as well, which are added below later task buildDefaultNotice(type: NoticeTask) { licensesDir new File(project(':server').projectDir, 'licenses') licensesDir new File(project(':distribution').projectDir, 'licenses') } task buildOssNotice(type: NoticeTask) { licensesDir new File(project(':server').projectDir, 'licenses') licensesDir new File(project(':distribution').projectDir, 'licenses') } task buildDefaultNoJdkNotice(type: NoticeTask) { licensesDir new File(project(':server').projectDir, 'licenses') } task buildOssNoJdkNotice(type: NoticeTask) { licensesDir new File(project(':server').projectDir, 'licenses') } /***************************************************************************** * Modules * *****************************************************************************/ String ossOutputs = 'build/outputs/oss' String defaultOutputs = 'build/outputs/default' String systemdOutputs = 'build/outputs/systemd' String transportOutputs = 'build/outputs/transport-only' tasks.register("processOssOutputs", Sync) { into ossOutputs } tasks.register("processDefaultOutputs", Sync) { into defaultOutputs from processOssOutputs } tasks.register("processSystemdOutputs", Sync) { into systemdOutputs } // Integ tests work over the rest http layer, so we need a transport included with the integ test zip. // All transport modules are included so that they may be randomized for testing tasks.register("processTransportOutputs", Sync) { into transportOutputs } // these are dummy tasks that can be used to depend on the relevant sub output dir tasks.register("buildOssModules") { dependsOn "processOssOutputs" outputs.dir "${ossOutputs}/modules" } tasks.register("buildOssBin") { dependsOn "processOssOutputs" outputs.dir "${ossOutputs}/bin" } tasks.register("buildOssConfig") { dependsOn "processOssOutputs" outputs.dir "${ossOutputs}/config" } tasks.register("buildDefaultModules") { dependsOn "processDefaultOutputs" outputs.dir "${defaultOutputs}/modules" } tasks.register("buildDefaultBin") { dependsOn "processDefaultOutputs" outputs.dir "${defaultOutputs}/bin" } tasks.register("buildDefaultConfig") { dependsOn "processDefaultOutputs" outputs.dir "${defaultOutputs}/config" } tasks.register("buildSystemdModule") { dependsOn "processSystemdOutputs" outputs.dir "${systemdOutputs}/modules" } tasks.register("buildTransportModules") { dependsOn "processTransportOutputs" outputs.dir "${transportOutputs}/modules" } void copyModule(Sync copyTask, Project module) { copyTask.configure { dependsOn "${module.path}:bundlePlugin" from({ zipTree(module.bundlePlugin.outputs.files.singleFile) }) { includeEmptyDirs false // these are handled separately in the log4j config tasks below exclude '*/config/log4j2.properties' exclude 'config/log4j2.properties' eachFile { details -> String name = module.plugins.hasPlugin('elasticsearch.esplugin') ? module.esplugin.name : module.es_meta_plugin.name // Copy all non config/bin files // Note these might be unde a subdirectory in the case of a meta plugin if ((details.relativePath.pathString ==~ /([^\/]+\/)?(config|bin)\/.*/) == false) { details.relativePath = details.relativePath.prepend('modules', name) } else if ((details.relativePath.pathString ==~ /([^\/]+\/)(config|bin)\/.*/)) { // this is the meta plugin case, in which we need to remove the intermediate dir String[] segments = details.relativePath.segments details.relativePath = new RelativePath(true, segments.takeRight(segments.length - 1)) } } } } } // log4j config could be contained in modules, so we must join it together using these tasks tasks.register("buildOssLog4jConfig") { dependsOn "processOssOutputs" ext.contents = [] ext.log4jFile = file("${ossOutputs}/log4j2.properties") outputs.file log4jFile } tasks.register("buildDefaultLog4jConfig") { dependsOn "processDefaultOutputs" ext.contents = [] ext.log4jFile = file("${defaultOutputs}/log4j2.properties") outputs.file log4jFile } Closure writeLog4jProperties = { String mainLog4jProperties = file('src/config/log4j2.properties').getText('UTF-8') it.log4jFile.setText(mainLog4jProperties, 'UTF-8') for (String moduleLog4jProperties : it.contents.reverse()) { it.log4jFile.append(moduleLog4jProperties, 'UTF-8') } } tasks.named("buildOssLog4jConfig").configure { doLast(writeLog4jProperties) } tasks.named("buildDefaultLog4jConfig").configure { doLast(writeLog4jProperties) } // copy log4j2.properties from modules that have it void copyLog4jProperties(Task buildTask, Project module) { buildTask.dependsOn "${module.path}:bundlePlugin" buildTask.doFirst { FileTree tree = zipTree(module.bundlePlugin.outputs.files.singleFile) FileTree filtered = tree.matching { include 'config/log4j2.properties' include '*/config/log4j2.properties' // could be in a bundled plugin } if (filtered.isEmpty() == false) { buildTask.contents.add('\n\n' + filtered.singleFile.getText('UTF-8')) } } } ext.restTestExpansions = [ 'expected.modules.count': 0, ] // we create the buildOssModules task above but fill it here so we can do a single // loop over modules to also setup cross task dependencies and increment our modules counter project.rootProject.subprojects.findAll { it.parent.path == ':modules' }.each { Project module -> if (module.name == 'systemd') { // the systemd module is only included in the package distributions return } File licenses = new File(module.projectDir, 'licenses') if (licenses.exists()) { buildDefaultNotice.licensesDir licenses buildOssNotice.licensesDir licenses } copyModule(processOssOutputs, module) if (module.name.startsWith('transport-')) { copyModule(processTransportOutputs, module) } copyLog4jProperties(buildOssLog4jConfig, module) copyLog4jProperties(buildDefaultLog4jConfig, module) // make sure the module's integration tests run after the integ-test-zip (ie rest tests) module.afterEvaluate({ module.integTest.mustRunAfter(':distribution:archives:integ-test-zip:integTest') }) restTestExpansions['expected.modules.count'] += 1 } // use licenses from each of the bundled xpack plugins Project xpack = project(':x-pack:plugin') xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule -> File licenses = new File(xpackModule.projectDir, 'licenses') if (licenses.exists()) { buildDefaultNotice.licensesDir licenses } copyModule(processDefaultOutputs, xpackModule) copyLog4jProperties(buildDefaultLog4jConfig, xpackModule) } copyModule(processSystemdOutputs, project(':modules:systemd')) configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { apply plugin: 'elasticsearch.jdk-download' // Setup all required JDKs project.jdks { ['darwin', 'windows', 'linux'].each { platform -> (platform == 'linux' ? ['x64', 'aarch64'] : ['x64']).each { architecture -> "bundled_${platform}_${architecture}" { it.platform = platform it.version = VersionProperties.getBundledJdk(platform) it.vendor = VersionProperties.bundledJdkVendor it.architecture = architecture } } } } // TODO: the map needs to be an input of the tasks, so that when it changes, the task will re-run... /***************************************************************************** * Properties to expand when copying packaging files * *****************************************************************************/ project.ext { /***************************************************************************** * Common files in all distributions * *****************************************************************************/ libFiles = { oss -> copySpec { // delay by using closures, since they have not yet been configured, so no jar task exists yet from { project(':server').jar } from { project(':server').configurations.runtime } from { project(':libs:elasticsearch-plugin-classloader').jar } from { project(':distribution:tools:java-version-checker').jar } from { project(':distribution:tools:launchers').jar } into('tools/plugin-cli') { from { project(':distribution:tools:plugin-cli').jar } from { project(':distribution:tools:plugin-cli').configurations.runtime } } into('tools/keystore-cli') { from { project(':distribution:tools:keystore-cli').jar } } if (oss == false) { into('tools/security-cli') { from { project(':x-pack:plugin:security:cli').jar } from { project(':x-pack:plugin:security:cli').configurations.compile } } } } } modulesFiles = { oss, platform -> copySpec { eachFile { if (it.relativePath.segments[-2] == 'bin' || (platform == 'darwin-x86_64' && it.relativePath.segments[-2] == 'MacOS')) { // bin files, wherever they are within modules (eg platform specific) should be executable // and MacOS is an alternative to bin on macOS it.mode = 0755 } else { it.mode = 0644 } } Task buildModules if (oss) { buildModules = project(':distribution').buildOssModules } else { buildModules = project(':distribution').buildDefaultModules } List excludePlatforms = ['linux-x86_64', 'linux-aarch64', 'windows-x86_64', 'darwin-x86_64'] if (platform != null) { excludePlatforms.remove(excludePlatforms.indexOf(platform)) } else { excludePlatforms = [] } from(buildModules) { // geo registers the geo_shape mapper that is overridden by // the geo_shape mapper registered in the x-pack-spatial plugin if (oss == false) { exclude "**/geo/**" } for (String excludePlatform : excludePlatforms) { exclude "**/platform/${excludePlatform}/**" } } if (project.path.startsWith(':distribution:packages')) { from(project(':distribution').buildSystemdModule) } } } transportModulesFiles = copySpec { from project(':distribution').buildTransportModules } configFiles = { distributionType, oss, jdk -> copySpec { with copySpec { // main config files, processed with distribution specific substitutions from '../src/config' exclude 'log4j2.properties' // this is handled separately below MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss, jdk)) } if (oss) { from project(':distribution').buildOssLog4jConfig from project(':distribution').buildOssConfig } else { from project(':distribution').buildDefaultLog4jConfig from project(':distribution').buildDefaultConfig } } } binFiles = { distributionType, oss, jdk -> copySpec { // non-windows files, for all distributions with copySpec { from '../src/bin' exclude '*.exe' exclude '*.bat' eachFile { it.setMode(0755) } MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss, jdk)) } // windows files, only for zip if (distributionType == 'zip') { with copySpec { from '../src/bin' include '*.bat' filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf')) MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss, jdk)) } with copySpec { from '../src/bin' include '*.exe' } } // module provided bin files with copySpec { eachFile { it.setMode(0755) } if (oss) { from project(':distribution').buildOssBin } else { from project(':distribution').buildDefaultBin } if (distributionType != 'zip') { exclude '*.bat' } } } } noticeFile = { oss, jdk -> copySpec { if (project.name == 'integ-test-zip') { from buildServerNotice } else { if (oss && jdk) { from buildOssNotice } else if (oss) { from buildOssNoJdkNotice } else if (jdk) { from buildDefaultNotice } else { from buildDefaultNoJdkNotice } } } } jdkFiles = { Project project, String platform, String architecture -> return copySpec { from project.jdks."bundled_${platform}_${architecture}" exclude "demo/**" /* * The Contents/MacOS directory interferes with notarization, and is unused by our distribution, so we exclude * it from the build. */ if ("darwin".equals(platform)) { exclude "Contents/MacOS" } eachFile { FileCopyDetails details -> if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') { details.mode = 0755 } if (details.name == 'src.zip') { details.exclude() } } } } } } /** * Build some variables that are replaced in the packages. This includes both * scripts like bin/elasticsearch and bin/elasticsearch-plugin that a user might run and also * scripts like postinst which are run as part of the installation. * *
*
package.name
*
The name of the project. Its sprinkled throughout the scripts.
*
package.version
*
The version of the project. Its mostly used to find the exact jar name. * *
path.conf
*
The default directory from which to load configuration. This is used in * the packaging scripts, but in that context it is always * /etc/elasticsearch. Its also used in bin/elasticsearch-plugin, where it is * /etc/elasticsearch for the os packages but $ESHOME/config otherwise.
*
path.env
*
The env file sourced before bin/elasticsearch to set environment * variables. Think /etc/defaults/elasticsearch.
*
heap.min and heap.max
*
Default min and max heap
*
scripts.footer
*
Footer appended to control scripts embedded in the distribution that is * (almost) entirely there for cosmetic reasons.
*
stopping.timeout
*
RPM's init script needs to wait for elasticsearch to stop before * returning from stop and it needs a maximum time to wait. This is it. One * day. DEB retries forever.
*
*/ subprojects { ext.expansionsForDistribution = { distributionType, oss, jdk -> final String defaultHeapSize = "1g" final String packagingPathData = "path.data: /var/lib/elasticsearch" final String pathLogs = "/var/log/elasticsearch" final String packagingPathLogs = "path.logs: ${pathLogs}" final String packagingLoggc = "${pathLogs}/gc.log" String licenseText if (oss) { licenseText = rootProject.file('licenses/APACHE-LICENSE-2.0.txt').getText('UTF-8') } else { licenseText = rootProject.file('licenses/ELASTIC-LICENSE.txt').getText('UTF-8') } // license text needs to be indented with a single space licenseText = ' ' + licenseText.replace('\n', '\n ') String footer = "# Built for ${project.name}-${project.version} " + "(${distributionType})" Map expansions = [ 'project.name': project.name, 'project.version': version, 'path.conf': [ 'deb': '/etc/elasticsearch', 'rpm': '/etc/elasticsearch', 'def': '"$ES_HOME"/config' ], 'path.data': [ 'deb': packagingPathData, 'rpm': packagingPathData, 'def': '#path.data: /path/to/data' ], 'path.env': [ 'deb': '/etc/default/elasticsearch', 'rpm': '/etc/sysconfig/elasticsearch', /* There isn't one of these files for tar or zip but its important to make an empty string here so the script can properly skip it. */ 'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; done', ], 'source.path.env': [ 'deb': 'source /etc/default/elasticsearch', 'rpm': 'source /etc/sysconfig/elasticsearch', 'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; fi', ], 'path.logs': [ 'deb': packagingPathLogs, 'rpm': packagingPathLogs, 'def': '#path.logs: /path/to/logs' ], 'loggc': [ 'deb': packagingLoggc, 'rpm': packagingLoggc, 'def': 'logs/gc.log' ], 'heap.min': defaultHeapSize, 'heap.max': defaultHeapSize, 'heap.dump.path': [ 'deb': "-XX:HeapDumpPath=/var/lib/elasticsearch", 'rpm': "-XX:HeapDumpPath=/var/lib/elasticsearch", 'def': "-XX:HeapDumpPath=data" ], 'error.file': [ 'deb': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log", 'rpm': "-XX:ErrorFile=/var/log/elasticsearch/hs_err_pid%p.log", 'def': "-XX:ErrorFile=logs/hs_err_pid%p.log" ], 'stopping.timeout': [ 'rpm': 86400, ], 'scripts.footer': [ /* Debian needs exit 0 on these scripts so we add it here and preserve the pretty footer. */ 'deb': "exit 0\n${footer}", 'def': footer ], 'es.distribution.flavor': [ 'def': oss ? 'oss' : 'default' ], 'es.distribution.type': [ 'deb': 'deb', 'rpm': 'rpm', 'tar': 'tar', 'zip': 'zip' ], 'es.bundled_jdk': [ 'def': jdk ? 'true' : 'false' ], 'license.name': [ 'deb': oss ? 'ASL-2.0' : 'Elastic-License' ], 'license.text': [ 'deb': licenseText, ], ] Map result = [:] expansions = expansions.each { key, value -> if (value instanceof Map) { // 'def' is for default but its three characters like 'rpm' and 'deb' value = value[distributionType] ?: value['def'] if (value == null) { return } } result[key] = value } return result } ext.assertLinesInFile = { Path path, List expectedLines -> final List actualLines = Files.readAllLines(path) int line = 0 for (final String expectedLine : expectedLines) { final String actualLine = actualLines.get(line) if (expectedLine != actualLine) { throw new GradleException("expected line [${line + 1}] in [${path}] to be [${expectedLine}] but was [${actualLine}]") } line++ } } } ['archives:windows-zip', 'archives:oss-windows-zip', 'archives:darwin-tar', 'archives:oss-darwin-tar', 'archives:linux-aarch64-tar', 'archives:oss-linux-aarch64-tar', 'archives:linux-tar', 'archives:oss-linux-tar', 'archives:integ-test-zip', 'packages:rpm', 'packages:deb', 'packages:aarch64-rpm', 'packages:aarch64-deb', 'packages:oss-rpm', 'packages:oss-deb', 'packages:aarch64-oss-rpm', 'packages:aarch64-oss-deb' ].forEach { subName -> Project subproject = project("${project.path}:${subName}") Configuration configuration = configurations.create(subproject.name) dependencies { "${configuration.name}" project(subproject.path) } }