mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-21 04:15:02 +00:00
* Smarter copying of the rest specs and tests (#52114) This PR addresses the unnecessary copying of the rest specs and allows for better semantics for which specs and tests are copied. By default the rest specs will get copied if the project applies `elasticsearch.standalone-rest-test` or `esplugin` and the project has rest tests or you configure the custom extension `restResources`. This PR also removes the need for dozens of places where the x-pack specs were copied by supporting copying of the x-pack rest specs too. The plugin/task introduced here can also copy the rest tests to the local project through a similar configuration. The new plugin/task allows a user to minimize the surface area of which rest specs are copied. Per project can be configured to include only a subset of the specs (or tests). Configuring a project to only copy the specs when actually needed should help with build cache hit rates since we can better define what is actually in use. However, project level optimizations for build cache hit rates are not included with this PR. Also, with this PR you can no longer use the includePackaged flag on integTest task. The following items are included in this PR: * new plugin: `elasticsearch.rest-resources` * new tasks: CopyRestApiTask and CopyRestTestsTask - performs the copy * new extension 'restResources' ``` restResources { restApi { includeCore 'foo' , 'bar' //will include the core specs that start with foo and bar includeXpack 'baz' //will include x-pack specs that start with baz } restTests { includeCore 'foo', 'bar' //will include the core tests that start with foo and bar includeXpack 'baz' //will include the x-pack tests that start with baz } } ```
231 lines
7.2 KiB
Groovy
231 lines
7.2 KiB
Groovy
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor
|
|
import org.elasticsearch.gradle.LoggedExec
|
|
import org.elasticsearch.gradle.VersionProperties
|
|
import org.elasticsearch.gradle.info.BuildParams
|
|
import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin
|
|
|
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
|
apply plugin: 'elasticsearch.test.fixtures'
|
|
apply plugin: 'elasticsearch.distribution-download'
|
|
|
|
testFixtures.useFixture()
|
|
|
|
configurations {
|
|
dockerPlugins
|
|
dockerSource
|
|
ossDockerSource
|
|
}
|
|
|
|
dependencies {
|
|
dockerSource project(path: ":distribution:archives:linux-tar")
|
|
ossDockerSource project(path: ":distribution:archives:oss-linux-tar")
|
|
}
|
|
|
|
ext.expansions = { oss, local ->
|
|
final String classifier = 'linux-x86_64'
|
|
final String elasticsearch = oss ? "elasticsearch-oss-${VersionProperties.elasticsearch}-${classifier}.tar.gz" : "elasticsearch-${VersionProperties.elasticsearch}-${classifier}.tar.gz"
|
|
return [
|
|
'build_date' : BuildParams.buildDate,
|
|
'elasticsearch' : elasticsearch,
|
|
'git_revision' : BuildParams.gitRevision,
|
|
'license' : oss ? 'Apache-2.0' : 'Elastic-License',
|
|
'source_elasticsearch': local ? "COPY $elasticsearch /opt/" : "RUN cd /opt && curl --retry 8 -s -L -O https://artifacts.elastic.co/downloads/elasticsearch/${elasticsearch} && cd -",
|
|
'version' : VersionProperties.elasticsearch
|
|
]
|
|
}
|
|
|
|
private static String buildPath(final boolean oss) {
|
|
return "build/${oss ? 'oss-' : ''}docker"
|
|
}
|
|
|
|
private static String taskName(final String prefix, final boolean oss, final String suffix) {
|
|
return "${prefix}${oss ? 'Oss' : ''}${suffix}"
|
|
}
|
|
|
|
project.ext {
|
|
dockerBuildContext = { boolean oss, boolean local ->
|
|
copySpec {
|
|
into('bin') {
|
|
from project.projectDir.toPath().resolve("src/docker/bin")
|
|
}
|
|
|
|
into('config') {
|
|
/*
|
|
* Oss and default distribution can have different configuration, therefore we want to allow overriding the default configuration
|
|
* by creating config files in oss or default build-context sub-modules.
|
|
*/
|
|
from project.projectDir.toPath().resolve("src/docker/config")
|
|
if (oss) {
|
|
from project.projectDir.toPath().resolve("src/docker/config/oss")
|
|
}
|
|
}
|
|
|
|
from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) {
|
|
expand(expansions(oss, local))
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
void addCopyDockerContextTask(final boolean oss) {
|
|
task(taskName("copy", oss, "DockerContext"), type: Sync) {
|
|
expansions(oss, true).findAll { it.key != 'build_date' }.each { k, v ->
|
|
inputs.property(k, { v.toString() })
|
|
}
|
|
into buildPath(oss)
|
|
|
|
with dockerBuildContext(oss, true)
|
|
|
|
if (oss) {
|
|
from configurations.ossDockerSource
|
|
} else {
|
|
from configurations.dockerSource
|
|
}
|
|
|
|
from configurations.dockerPlugins
|
|
}
|
|
}
|
|
|
|
def createAndSetWritable(Object... locations) {
|
|
locations.each { location ->
|
|
File file = file(location)
|
|
file.mkdirs()
|
|
file.setWritable(true, false)
|
|
}
|
|
}
|
|
|
|
task copyKeystore(type: Sync) {
|
|
from project(':x-pack:plugin:core')
|
|
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
|
|
into "${buildDir}/certs"
|
|
doLast {
|
|
file("${buildDir}/certs").setReadable(true, false)
|
|
file("${buildDir}/certs/testnode.jks").setReadable(true, false)
|
|
}
|
|
}
|
|
|
|
elasticsearch_distributions {
|
|
Flavor.values().each { distroFlavor ->
|
|
"docker_$distroFlavor" {
|
|
flavor = distroFlavor
|
|
type = 'docker'
|
|
version = VersionProperties.getElasticsearch()
|
|
failIfUnavailable = false // This ensures we don't attempt to build images if docker is unavailable
|
|
}
|
|
}
|
|
}
|
|
|
|
preProcessFixture {
|
|
dependsOn elasticsearch_distributions.docker_default, elasticsearch_distributions.docker_oss
|
|
dependsOn copyKeystore
|
|
doLast {
|
|
// tests expect to have an empty repo
|
|
project.delete(
|
|
"${buildDir}/repo",
|
|
"${buildDir}/oss-repo"
|
|
)
|
|
createAndSetWritable(
|
|
"${buildDir}/repo",
|
|
"${buildDir}/oss-repo",
|
|
"${buildDir}/logs/default-1",
|
|
"${buildDir}/logs/default-2",
|
|
"${buildDir}/logs/oss-1",
|
|
"${buildDir}/logs/oss-2"
|
|
)
|
|
}
|
|
}
|
|
|
|
processTestResources {
|
|
from project(':x-pack:plugin:core')
|
|
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
|
|
}
|
|
|
|
task integTest(type: Test) {
|
|
outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
|
|
maxParallelForks = '1'
|
|
include '**/*IT.class'
|
|
}
|
|
|
|
check.dependsOn integTest
|
|
|
|
void addBuildDockerImage(final boolean oss) {
|
|
final Task buildDockerImageTask = task(taskName("build", oss, "DockerImage"), type: LoggedExec) {
|
|
ext.requiresDocker = true // mark this task as requiring docker to execute
|
|
inputs.files(tasks.named(taskName("copy", oss, "DockerContext")))
|
|
List<String> tags
|
|
if (oss) {
|
|
tags = [
|
|
"docker.elastic.co/elasticsearch/elasticsearch-oss:${VersionProperties.elasticsearch}",
|
|
"elasticsearch-oss:test"
|
|
]
|
|
} else {
|
|
tags = [
|
|
"elasticsearch:${VersionProperties.elasticsearch}",
|
|
"docker.elastic.co/elasticsearch/elasticsearch:${VersionProperties.elasticsearch}",
|
|
"docker.elastic.co/elasticsearch/elasticsearch-full:${VersionProperties.elasticsearch}",
|
|
"elasticsearch:test",
|
|
]
|
|
}
|
|
executable 'docker'
|
|
final List<String> dockerArgs = ['build', buildPath(oss), '--pull', '--no-cache']
|
|
for (final String tag : tags) {
|
|
dockerArgs.add('--tag')
|
|
dockerArgs.add(tag)
|
|
}
|
|
args dockerArgs.toArray()
|
|
File markerFile = file("build/markers/${it.name}.marker")
|
|
outputs.file(markerFile)
|
|
doLast {
|
|
markerFile.setText('', 'UTF-8')
|
|
}
|
|
}
|
|
assemble.dependsOn(buildDockerImageTask)
|
|
}
|
|
|
|
for (final boolean oss : [false, true]) {
|
|
addCopyDockerContextTask(oss)
|
|
addBuildDockerImage(oss)
|
|
}
|
|
|
|
// We build the images used in compose locally, but the pull command insists on using a repository
|
|
// thus we must disable it to prevent it from doing so.
|
|
// Everything will still be pulled since we will build the local images on a pull
|
|
if (tasks.findByName("composePull")) {
|
|
tasks.composePull.enabled = false
|
|
}
|
|
|
|
/*
|
|
* The export subprojects write out the generated Docker images to disk, so
|
|
* that they can be easily reloaded, for example into a VM.
|
|
*/
|
|
subprojects { Project subProject ->
|
|
if (subProject.name.contains('docker-export')) {
|
|
apply plugin: 'distribution'
|
|
|
|
final boolean oss = subProject.name.contains('oss-')
|
|
|
|
def exportTaskName = taskName("export", oss, "DockerImage")
|
|
def buildTaskName = taskName("build", oss, "DockerImage")
|
|
def tarFile = "${parent.projectDir}/build/elasticsearch${oss ? '-oss' : ''}_test.${VersionProperties.elasticsearch}.docker.tar"
|
|
|
|
final Task exportDockerImageTask = task(exportTaskName, type: LoggedExec) {
|
|
executable 'docker'
|
|
outputs.file(tarFile)
|
|
args "save",
|
|
"-o",
|
|
tarFile,
|
|
"elasticsearch${oss ? '-oss' : ''}:test"
|
|
}
|
|
|
|
exportDockerImageTask.dependsOn(parent.tasks.getByName(buildTaskName))
|
|
|
|
artifacts.add('default', file(tarFile)) {
|
|
type 'tar'
|
|
name "elasticsearch${oss ? '-oss' : ''}"
|
|
builtBy exportTaskName
|
|
}
|
|
|
|
assemble.dependsOn exportTaskName
|
|
}
|
|
}
|