Remove any non oss from build, package, and distribution (#102)
This commit changes the building, packaging, and testing framework to only support OSS on different distributions. Next steps: completely remove -oss flag dependencies in package and build tests move 6.x bwc testing to be an explicit option remove any references to elastic.co download site (or replace with downloads from the OSS website) Co-authored-by: Himanshu Setia <setiah@amazon.com> Co-authored-by: Rabi Panda <pandarab@amazon.com> Co-authored-by: Himanshu Setia <58999915+setiah@users.noreply.github.com> Co-authored-by: Sarat Vemulapalli <vemsarat@amazon.com> Signed-off-by: Peter Nied <petern@amazon.com>
This commit is contained in:
parent
5544bc6caa
commit
83e87f7e54
|
@ -171,12 +171,8 @@ if (project != rootProject) {
|
|||
|
||||
dependencies {
|
||||
reaper project('reaper')
|
||||
distribution project(':distribution:archives:windows-zip')
|
||||
distribution project(':distribution:archives:oss-windows-zip')
|
||||
distribution project(':distribution:archives:darwin-tar')
|
||||
distribution project(':distribution:archives:oss-darwin-tar')
|
||||
distribution project(':distribution:archives:linux-aarch64-tar')
|
||||
distribution project(':distribution:archives:linux-tar')
|
||||
distribution project(':distribution:archives:oss-linux-tar')
|
||||
distribution project(':distribution:archives:oss-linux-aarch64-tar')
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ class DistributionDownloadPluginFuncTest extends AbstractGradleFuncTest {
|
|||
|
||||
then:
|
||||
result.tasks.size() == 3
|
||||
result.output.count("Unpacking elasticsearch-${version}-linux-x86_64.tar.gz " +
|
||||
result.output.count("Unpacking elasticsearch-oss-${version}-linux-x86_64.tar.gz " +
|
||||
"using SymbolicLinkPreservingUntarTransform.") == 1
|
||||
}
|
||||
|
||||
|
@ -155,4 +155,4 @@ class DistributionDownloadPluginFuncTest extends AbstractGradleFuncTest {
|
|||
}
|
||||
"""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,6 +112,7 @@ abstract class AbstractGradleFuncTest extends Specification {
|
|||
}
|
||||
|
||||
void setupLocalGitRepo() {
|
||||
//TODO: cleanup
|
||||
execute("git init")
|
||||
execute('git config user.email "build-tool@elastic.co"')
|
||||
execute('git config user.name "Build tool"')
|
||||
|
|
|
@ -54,7 +54,7 @@ class DistributionDownloadFixture {
|
|||
private static String urlPath(String version,ElasticsearchDistribution.Platform platform) {
|
||||
String fileType = ((platform == ElasticsearchDistribution.Platform.LINUX ||
|
||||
platform == ElasticsearchDistribution.Platform.DARWIN)) ? "tar.gz" : "zip"
|
||||
"/downloads/elasticsearch/elasticsearch-${version}-${platform}-x86_64.$fileType"
|
||||
"/downloads/elasticsearch/elasticsearch-oss-${version}-${platform}-x86_64.$fileType"
|
||||
}
|
||||
|
||||
private static byte[] filebytes(String urlPath) throws IOException {
|
||||
|
|
|
@ -33,7 +33,7 @@ class InternalDistributionArchiveSetupPluginFuncTest extends AbstractGradleFuncT
|
|||
def setup() {
|
||||
buildFile << """
|
||||
import org.elasticsearch.gradle.tar.SymbolicLinkPreservingTar
|
||||
|
||||
|
||||
plugins {
|
||||
id 'elasticsearch.internal-distribution-archive-setup'
|
||||
}
|
||||
|
@ -60,7 +60,6 @@ class InternalDistributionArchiveSetupPluginFuncTest extends AbstractGradleFuncT
|
|||
|
||||
where:
|
||||
buildTaskName | expectedOutputArchivePath
|
||||
"buildDarwinTar" | "darwin-tar/build/distributions/elasticsearch.tar.gz"
|
||||
"buildOssDarwinTar" | "oss-darwin-tar/build/distributions/elasticsearch-oss.tar.gz"
|
||||
}
|
||||
|
||||
|
@ -82,7 +81,6 @@ class InternalDistributionArchiveSetupPluginFuncTest extends AbstractGradleFuncT
|
|||
|
||||
where:
|
||||
buildTaskName | expectedOutputArchivePath
|
||||
"buildDarwinZip" | "darwin-zip/build/distributions/elasticsearch.zip"
|
||||
"buildOssDarwinZip" | "oss-darwin-zip/build/distributions/elasticsearch-oss.zip"
|
||||
}
|
||||
|
||||
|
@ -111,23 +109,23 @@ class InternalDistributionArchiveSetupPluginFuncTest extends AbstractGradleFuncT
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
project('consumer') { p ->
|
||||
configurations {
|
||||
consumeArchive {}
|
||||
consumeDir {}
|
||||
}
|
||||
|
||||
|
||||
dependencies {
|
||||
consumeDir project(path: ':producer-tar', configuration:'extracted')
|
||||
consumeArchive project(path: ':producer-tar', configuration:'default' )
|
||||
}
|
||||
|
||||
|
||||
tasks.register("copyDir", Copy) {
|
||||
from(configurations.consumeDir)
|
||||
into('build/dir')
|
||||
}
|
||||
|
||||
|
||||
tasks.register("copyArchive", Copy) {
|
||||
from(configurations.consumeArchive)
|
||||
into('build/archives')
|
||||
|
@ -140,8 +138,8 @@ class InternalDistributionArchiveSetupPluginFuncTest extends AbstractGradleFuncT
|
|||
then: "tar task executed and target folder contains plain tar"
|
||||
result.task(':buildProducerTar').outcome == TaskOutcome.SUCCESS
|
||||
result.task(':consumer:copyArchive').outcome == TaskOutcome.SUCCESS
|
||||
file("producer-tar/build/distributions/elasticsearch.tar.gz").exists()
|
||||
file("consumer/build/archives/elasticsearch.tar.gz").exists()
|
||||
file("producer-tar/build/distributions/elasticsearch-oss.tar.gz").exists()
|
||||
file("consumer/build/archives/elasticsearch-oss.tar.gz").exists()
|
||||
|
||||
when:
|
||||
result = gradleRunner("copyDir", "-Pversion=1.0").build()
|
||||
|
|
|
@ -46,32 +46,29 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGradleFuncTest
|
|||
def "builds distribution from branches via archives assemble"() {
|
||||
when:
|
||||
def result = gradleRunner(new File(testProjectDir.root, "remote"),
|
||||
":distribution:bwc:bugfix:buildBwcDarwinTar",
|
||||
":distribution:bwc:bugfix:buildBwcOssDarwinTar",
|
||||
"-DtestRemoteRepo=" + remoteGitRepo,
|
||||
"-Dbwc.remote=origin")
|
||||
.build()
|
||||
then:
|
||||
result.task(":distribution:bwc:bugfix:buildBwcDarwinTar").outcome == TaskOutcome.SUCCESS
|
||||
result.task(":distribution:bwc:bugfix:buildBwcOssDarwinTar").outcome == TaskOutcome.SUCCESS
|
||||
|
||||
and: "assemble task triggered"
|
||||
result.output.contains("[8.0.1] > Task :distribution:archives:darwin-tar:assemble")
|
||||
result.output.contains("[8.0.1] > Task :distribution:archives:oss-darwin-tar:assemble")
|
||||
}
|
||||
|
||||
def "bwc distribution archives can be resolved as bwc project artifact"() {
|
||||
setup:
|
||||
new File(testProjectDir.root, 'remote/build.gradle') << """
|
||||
|
||||
|
||||
configurations {
|
||||
dists
|
||||
}
|
||||
|
||||
|
||||
dependencies {
|
||||
dists project(path: ":distribution:bwc:bugfix", configuration:"darwin-tar")
|
||||
dists project(path: ":distribution:bwc:bugfix", configuration:"oss-darwin-tar")
|
||||
}
|
||||
|
||||
|
||||
tasks.register("resolveDistributionArchive") {
|
||||
inputs.files(configurations.dists)
|
||||
doLast {
|
||||
|
@ -89,27 +86,27 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGradleFuncTest
|
|||
.build()
|
||||
then:
|
||||
result.task(":resolveDistributionArchive").outcome == TaskOutcome.SUCCESS
|
||||
result.task(":distribution:bwc:bugfix:buildBwcDarwinTar").outcome == TaskOutcome.SUCCESS
|
||||
result.task(":distribution:bwc:bugfix:buildBwcOssDarwinTar").outcome == TaskOutcome.SUCCESS
|
||||
|
||||
and: "assemble task triggered"
|
||||
result.output.contains("[8.0.1] > Task :distribution:archives:darwin-tar:assemble")
|
||||
result.output.contains("[8.0.1] > Task :distribution:archives:oss-darwin-tar:assemble")
|
||||
normalizedOutput(result.output)
|
||||
.contains("distfile /distribution/bwc/bugfix/build/bwc/checkout-8.0/distribution/archives/darwin-tar/" +
|
||||
"build/distributions/elasticsearch-8.0.1-SNAPSHOT-darwin-x86_64.tar.gz")
|
||||
.contains("distfile /distribution/bwc/bugfix/build/bwc/checkout-8.0/distribution/archives/oss-darwin-tar/" +
|
||||
"build/distributions/elasticsearch-oss-8.0.1-SNAPSHOT-darwin-x86_64.tar.gz")
|
||||
}
|
||||
|
||||
def "bwc expanded distribution folder can be resolved as bwc project artifact"() {
|
||||
setup:
|
||||
new File(testProjectDir.root, 'remote/build.gradle') << """
|
||||
|
||||
|
||||
configurations {
|
||||
expandedDist
|
||||
}
|
||||
|
||||
|
||||
dependencies {
|
||||
expandedDist project(path: ":distribution:bwc:bugfix", configuration:"expanded-darwin-tar")
|
||||
expandedDist project(path: ":distribution:bwc:bugfix", configuration:"expanded-oss-darwin-tar")
|
||||
}
|
||||
|
||||
|
||||
tasks.register("resolveExpandedDistribution") {
|
||||
inputs.files(configurations.expandedDist)
|
||||
doLast {
|
||||
|
@ -127,13 +124,13 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGradleFuncTest
|
|||
.build()
|
||||
then:
|
||||
result.task(":resolveExpandedDistribution").outcome == TaskOutcome.SUCCESS
|
||||
result.task(":distribution:bwc:bugfix:buildBwcDarwinTar").outcome == TaskOutcome.SUCCESS
|
||||
result.task(":distribution:bwc:bugfix:buildBwcOssDarwinTar").outcome == TaskOutcome.SUCCESS
|
||||
|
||||
and: "assemble task triggered"
|
||||
result.output.contains("[8.0.1] > Task :distribution:archives:darwin-tar:assemble")
|
||||
result.output.contains("[8.0.1] > Task :distribution:archives:oss-darwin-tar:assemble")
|
||||
normalizedOutput(result.output)
|
||||
.contains("distfile /distribution/bwc/bugfix/build/bwc/checkout-8.0/" +
|
||||
"distribution/archives/darwin-tar/build/install")
|
||||
"distribution/archives/oss-darwin-tar/build/install")
|
||||
}
|
||||
|
||||
File setupGitRemote() {
|
||||
|
|
|
@ -72,7 +72,7 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest
|
|||
def result = gradleRunner("setupDistro", '-g', testProjectDir.newFolder('GUH').path).build()
|
||||
|
||||
then:
|
||||
result.task(":distribution:archives:linux-tar:buildExpanded").outcome == TaskOutcome.SUCCESS
|
||||
result.task(":distribution:archives:oss-linux-tar:buildExpanded").outcome == TaskOutcome.SUCCESS
|
||||
result.task(":setupDistro").outcome == TaskOutcome.SUCCESS
|
||||
assertExtractedDistroIsCreated("build/distro", 'current-marker.txt')
|
||||
}
|
||||
|
@ -144,24 +144,24 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest
|
|||
apply plugin:'base'
|
||||
|
||||
// packed distro
|
||||
configurations.create("linux-tar")
|
||||
configurations.create("oss-linux-tar")
|
||||
tasks.register("buildBwcTask", Tar) {
|
||||
from('bwc-marker.txt')
|
||||
archiveExtension = "tar.gz"
|
||||
compression = Compression.GZIP
|
||||
}
|
||||
artifacts {
|
||||
it.add("linux-tar", buildBwcTask)
|
||||
it.add("oss-linux-tar", buildBwcTask)
|
||||
}
|
||||
|
||||
// expanded distro
|
||||
configurations.create("expanded-linux-tar")
|
||||
configurations.create("expanded-oss-linux-tar")
|
||||
def expandedTask = tasks.register("buildBwcExpandedTask", Copy) {
|
||||
from('bwc-marker.txt')
|
||||
into('build/install/elastic-distro')
|
||||
}
|
||||
artifacts {
|
||||
it.add("expanded-linux-tar", file('build/install')) {
|
||||
it.add("expanded-oss-linux-tar", file('build/install')) {
|
||||
builtBy expandedTask
|
||||
type = 'directory'
|
||||
}
|
||||
|
@ -171,9 +171,9 @@ class InternalDistributionDownloadPluginFuncTest extends AbstractGradleFuncTest
|
|||
|
||||
private void localDistroSetup() {
|
||||
settingsFile << """
|
||||
include ":distribution:archives:linux-tar"
|
||||
include ":distribution:archives:oss-linux-tar"
|
||||
"""
|
||||
def bwcSubProjectFolder = testProjectDir.newFolder("distribution", "archives", "linux-tar")
|
||||
def bwcSubProjectFolder = testProjectDir.newFolder("distribution", "archives", "oss-linux-tar")
|
||||
new File(bwcSubProjectFolder, 'current-marker.txt') << "current"
|
||||
new File(bwcSubProjectFolder, 'build.gradle') << """
|
||||
import org.gradle.api.internal.artifacts.ArtifactAttributes;
|
||||
|
|
|
@ -23,7 +23,7 @@ subprojects {
|
|||
tasks.register('tar', Tar) {
|
||||
from('.')
|
||||
destinationDirectory.set(file('build/distributions'))
|
||||
archiveBaseName.set("elasticsearch${project.name.startsWith('oss')?'-oss':''}")
|
||||
archiveBaseName.set("elasticsearch-oss")
|
||||
archiveVersion.set("8.0.1-SNAPSHOT")
|
||||
archiveClassifier.set("darwin-x86_64")
|
||||
archiveExtension.set('tar.gz')
|
||||
|
|
|
@ -19,5 +19,4 @@
|
|||
|
||||
include ":distribution:bwc:bugfix"
|
||||
include ":distribution:bwc:minor"
|
||||
include ":distribution:archives:darwin-tar"
|
||||
include ":distribution:archives:oss-darwin-tar"
|
||||
|
|
|
@ -23,8 +23,6 @@ import org.elasticsearch.gradle.Version
|
|||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.tasks.TaskProvider
|
||||
|
||||
/**
|
||||
* Sets up tests for documentation.
|
||||
|
@ -37,9 +35,6 @@ class DocsTestPlugin implements Plugin<Project> {
|
|||
project.pluginManager.apply('elasticsearch.standalone-rest-test')
|
||||
project.pluginManager.apply('elasticsearch.rest-test')
|
||||
|
||||
String distribution = System.getProperty('tests.distribution', 'default')
|
||||
// The distribution can be configured with -Dtests.distribution on the command line
|
||||
project.testClusters.integTest.testDistribution = distribution.toUpperCase()
|
||||
project.testClusters.integTest.nameCustomization = { it.replace("integTest", "node") }
|
||||
// Docs are published separately so no need to assemble
|
||||
project.tasks.assemble.enabled = false
|
||||
|
@ -51,7 +46,6 @@ class DocsTestPlugin implements Plugin<Project> {
|
|||
'\\{version\\}': Version.fromString(VersionProperties.elasticsearch).toString(),
|
||||
'\\{version_qualified\\}': VersionProperties.elasticsearch,
|
||||
'\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''),
|
||||
'\\{build_flavor\\}' : distribution,
|
||||
'\\{build_type\\}' : OS.conditionalString().onWindows({"zip"}).onUnix({"tar"}).supply(),
|
||||
]
|
||||
project.tasks.register('listSnippets', SnippetsTask) {
|
||||
|
|
|
@ -193,35 +193,20 @@ class ClusterFormationTasks {
|
|||
}
|
||||
return
|
||||
}
|
||||
// TEMP HACK
|
||||
// The oss docs CI build overrides the distro on the command line. This hack handles backcompat until CI is updated.
|
||||
if (distro.equals('oss-zip')) {
|
||||
distro = 'oss'
|
||||
}
|
||||
if (distro.equals('zip')) {
|
||||
distro = 'default'
|
||||
}
|
||||
// END TEMP HACK
|
||||
if (['oss', 'default'].contains(distro) == false) {
|
||||
throw new GradleException("Unknown distribution: ${distro} in project ${project.path}")
|
||||
}
|
||||
distro = 'oss'
|
||||
|
||||
Version version = Version.fromString(elasticsearchVersion)
|
||||
String os = getOs()
|
||||
String classifier = "-${os}-x86_64"
|
||||
String packaging = os.equals('windows') ? 'zip' : 'tar.gz'
|
||||
String artifactName = 'elasticsearch'
|
||||
if (distro.equals('oss') && Version.fromString(elasticsearchVersion).onOrAfter('6.3.0')) {
|
||||
artifactName += '-oss'
|
||||
}
|
||||
String artifactName = 'elasticsearch-oss'
|
||||
Object dependency
|
||||
String snapshotProject = "${os}-${os.equals('windows') ? 'zip' : 'tar'}"
|
||||
if (version.before("7.0.0")) {
|
||||
snapshotProject = "zip"
|
||||
packaging = "zip"
|
||||
}
|
||||
if (distro.equals("oss")) {
|
||||
snapshotProject = "oss-" + snapshotProject
|
||||
}
|
||||
snapshotProject = "oss-" + snapshotProject
|
||||
|
||||
BwcVersions.UnreleasedVersionInfo unreleasedInfo = null
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor;
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution.Platform;
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution.Type;
|
||||
import org.elasticsearch.gradle.docker.DockerSupportPlugin;
|
||||
|
@ -38,7 +37,6 @@ import org.gradle.api.internal.artifacts.ArtifactAttributes;
|
|||
import org.gradle.api.provider.Provider;
|
||||
|
||||
import java.util.Comparator;
|
||||
import static org.elasticsearch.gradle.util.Util.capitalize;
|
||||
|
||||
/**
|
||||
* A plugin to manage getting and extracting distributions of Elasticsearch.
|
||||
|
@ -193,43 +191,8 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
|
|||
} else if (distribution.getType() == Type.RPM && distroVersion.before("7.0.0")) {
|
||||
classifier = "";
|
||||
}
|
||||
String flavor = "";
|
||||
if (distribution.getFlavor() == Flavor.OSS && distroVersion.onOrAfter("6.3.0")) {
|
||||
flavor = "-oss";
|
||||
}
|
||||
|
||||
String group = distribution.getVersion().endsWith("-SNAPSHOT") ? FAKE_SNAPSHOT_IVY_GROUP : FAKE_IVY_GROUP;
|
||||
return group + ":elasticsearch" + flavor + ":" + distribution.getVersion() + classifier + "@" + extension;
|
||||
}
|
||||
|
||||
private static String configName(String prefix, ElasticsearchDistribution distribution) {
|
||||
return String.format(
|
||||
"%s_%s_%s_%s%s%s",
|
||||
prefix,
|
||||
distribution.getVersion(),
|
||||
distribution.getType(),
|
||||
distribution.getPlatform() == null ? "" : distribution.getPlatform() + "_",
|
||||
distribution.getFlavor(),
|
||||
distribution.getBundledJdk() ? "" : "_nojdk"
|
||||
);
|
||||
}
|
||||
|
||||
private static String extractTaskName(ElasticsearchDistribution distribution) {
|
||||
String taskName = "extractElasticsearch";
|
||||
if (distribution.getType() != Type.INTEG_TEST_ZIP) {
|
||||
if (distribution.getFlavor() == Flavor.OSS) {
|
||||
taskName += "Oss";
|
||||
}
|
||||
if (distribution.getBundledJdk() == false) {
|
||||
taskName += "NoJdk";
|
||||
}
|
||||
}
|
||||
if (distribution.getType() == Type.ARCHIVE) {
|
||||
taskName += capitalize(distribution.getPlatform().toString());
|
||||
} else if (distribution.getType() != Type.INTEG_TEST_ZIP) {
|
||||
taskName += capitalize(distribution.getType().toString());
|
||||
}
|
||||
taskName += distribution.getVersion();
|
||||
return taskName;
|
||||
return group + ":elasticsearch-oss" + ":" + distribution.getVersion() + classifier + "@" + extension;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,9 +23,7 @@ package org.elasticsearch.gradle;
|
|||
* This class models the different Docker base images that are used to build Docker distributions of Elasticsearch.
|
||||
*/
|
||||
public enum DockerBase {
|
||||
CENTOS("centos:8"),
|
||||
// "latest" here is intentional, since the image name specifies "8"
|
||||
UBI("docker.elastic.co/ubi8/ubi-minimal:latest");
|
||||
CENTOS("centos:8");
|
||||
|
||||
private final String image;
|
||||
|
||||
|
|
|
@ -50,9 +50,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
ARCHIVE,
|
||||
RPM,
|
||||
DEB,
|
||||
DOCKER,
|
||||
// This is a different flavour of Docker image
|
||||
DOCKER_UBI;
|
||||
DOCKER;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
@ -63,7 +61,6 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
switch (this) {
|
||||
case DEB:
|
||||
case DOCKER:
|
||||
case DOCKER_UBI:
|
||||
case RPM:
|
||||
return false;
|
||||
|
||||
|
@ -73,16 +70,6 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
}
|
||||
}
|
||||
|
||||
public enum Flavor {
|
||||
DEFAULT,
|
||||
OSS;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return super.toString().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
|
||||
// package private to tests can use
|
||||
public static final Platform CURRENT_PLATFORM = OS.<Platform>conditional()
|
||||
.onLinux(() -> Platform.LINUX)
|
||||
|
@ -99,7 +86,6 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
private final Property<String> version;
|
||||
private final Property<Type> type;
|
||||
private final Property<Platform> platform;
|
||||
private final Property<Flavor> flavor;
|
||||
private final Property<Boolean> bundledJdk;
|
||||
private final Property<Boolean> failIfUnavailable;
|
||||
private final Configuration extracted;
|
||||
|
@ -119,7 +105,6 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
this.type = objectFactory.property(Type.class);
|
||||
this.type.convention(Type.ARCHIVE);
|
||||
this.platform = objectFactory.property(Platform.class);
|
||||
this.flavor = objectFactory.property(Flavor.class);
|
||||
this.bundledJdk = objectFactory.property(Boolean.class);
|
||||
this.failIfUnavailable = objectFactory.property(Boolean.class).convention(true);
|
||||
this.extracted = extractedConfiguration;
|
||||
|
@ -154,21 +139,13 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
this.type.set(type);
|
||||
}
|
||||
|
||||
public Flavor getFlavor() {
|
||||
return flavor.getOrNull();
|
||||
}
|
||||
|
||||
public void setFlavor(Flavor flavor) {
|
||||
this.flavor.set(flavor);
|
||||
}
|
||||
|
||||
public boolean getBundledJdk() {
|
||||
return bundledJdk.getOrElse(true);
|
||||
}
|
||||
|
||||
public boolean isDocker() {
|
||||
final Type type = this.type.get();
|
||||
return type == Type.DOCKER || type == Type.DOCKER_UBI;
|
||||
return type == Type.DOCKER;
|
||||
}
|
||||
|
||||
public void setBundledJdk(Boolean bundledJdk) {
|
||||
|
@ -204,7 +181,6 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
switch (getType()) {
|
||||
case DEB:
|
||||
case DOCKER:
|
||||
case DOCKER_UBI:
|
||||
case RPM:
|
||||
throw new UnsupportedOperationException(
|
||||
"distribution type [" + getType() + "] for " + "elasticsearch distribution [" + name + "] cannot be extracted"
|
||||
|
@ -239,11 +215,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
"platform cannot be set on elasticsearch distribution [" + name + "] of type [integ_test_zip]"
|
||||
);
|
||||
}
|
||||
if (flavor.getOrNull() != null) {
|
||||
throw new IllegalArgumentException(
|
||||
"flavor [" + flavor.get() + "] not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]"
|
||||
);
|
||||
}
|
||||
|
||||
if (bundledJdk.getOrNull() != null) {
|
||||
throw new IllegalArgumentException(
|
||||
"bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type [integ_test_zip]"
|
||||
|
@ -275,15 +247,9 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
"bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type " + "[docker]"
|
||||
);
|
||||
}
|
||||
if (flavor.get() == Flavor.OSS && type.get() == Type.DOCKER_UBI) {
|
||||
throw new IllegalArgumentException("Cannot build a UBI docker image for the OSS distribution");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (flavor.isPresent() == false) {
|
||||
flavor.set(Flavor.DEFAULT);
|
||||
}
|
||||
if (bundledJdk.isPresent() == false) {
|
||||
bundledJdk.set(true);
|
||||
}
|
||||
|
@ -291,7 +257,6 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
version.finalizeValue();
|
||||
platform.finalizeValue();
|
||||
type.finalizeValue();
|
||||
flavor.finalizeValue();
|
||||
bundledJdk.finalizeValue();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -79,6 +79,7 @@ public class RepositoriesSetupPlugin implements Plugin<Project> {
|
|||
throw new GradleException("Malformed lucene snapshot version: " + luceneVersion);
|
||||
}
|
||||
String revision = matcher.group(1);
|
||||
// TODO(cleanup) - Setup own lucene snapshot repo
|
||||
MavenArtifactRepository luceneRepo = repos.maven(repo -> {
|
||||
repo.setName("lucene-snapshots");
|
||||
repo.setUrl("https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + revision);
|
||||
|
|
|
@ -108,7 +108,7 @@ public class InternalDistributionArchiveSetupPlugin implements Plugin<Project> {
|
|||
project.getTasks().withType(AbstractArchiveTask.class).configureEach(t -> {
|
||||
String subdir = archiveTaskToSubprojectName(t.getName());
|
||||
t.getDestinationDirectory().set(project.file(subdir + "/build/distributions"));
|
||||
t.getArchiveBaseName().set(subdir.contains("oss") ? "elasticsearch-oss" : "elasticsearch");
|
||||
t.getArchiveBaseName().set("elasticsearch-oss");
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -119,7 +119,7 @@ public class InternalDistributionBwcSetupPlugin implements Plugin<Project> {
|
|||
|
||||
private void registerDistributionArchiveArtifact(Project bwcProject, DistributionProject distributionProject, String buildBwcTask) {
|
||||
String artifactFileName = distributionProject.getDistFile().getName();
|
||||
String artifactName = artifactFileName.contains("oss") ? "elasticsearch-oss" : "elasticsearch";
|
||||
String artifactName = "elasticsearch-oss";
|
||||
|
||||
String suffix = artifactFileName.endsWith("tar.gz") ? "tar.gz" : artifactFileName.substring(artifactFileName.length() - 3);
|
||||
int archIndex = artifactFileName.indexOf("x86_64");
|
||||
|
@ -142,12 +142,12 @@ public class InternalDistributionBwcSetupPlugin implements Plugin<Project> {
|
|||
private static List<DistributionProject> resolveArchiveProjects(File checkoutDir, Version bwcVersion) {
|
||||
List<String> projects = new ArrayList<>();
|
||||
// All active BWC branches publish default and oss variants of rpm and deb packages
|
||||
projects.addAll(asList("deb", "rpm", "oss-deb", "oss-rpm"));
|
||||
projects.addAll(asList("oss-deb", "oss-rpm"));
|
||||
|
||||
if (bwcVersion.onOrAfter("7.0.0")) { // starting with 7.0 we bundle a jdk which means we have platform-specific archives
|
||||
projects.addAll(asList("oss-windows-zip", "windows-zip", "oss-darwin-tar", "darwin-tar", "oss-linux-tar", "linux-tar"));
|
||||
projects.addAll(asList("oss-windows-zip", "oss-darwin-tar", "oss-linux-tar"));
|
||||
} else { // prior to 7.0 we published only a single zip and tar archives for oss and default distributions
|
||||
projects.addAll(asList("oss-zip", "zip", "tar", "oss-tar"));
|
||||
projects.addAll(asList("oss-zip", "oss-tar"));
|
||||
}
|
||||
|
||||
return projects.stream().map(name -> {
|
||||
|
@ -157,7 +157,7 @@ public class InternalDistributionBwcSetupPlugin implements Plugin<Project> {
|
|||
if (bwcVersion.onOrAfter("7.0.0")) {
|
||||
if (name.contains("zip") || name.contains("tar")) {
|
||||
int index = name.lastIndexOf('-');
|
||||
String baseName = name.startsWith("oss-") ? name.substring(4, index) : name.substring(0, index);
|
||||
String baseName = name.substring(4, index); // oss-
|
||||
classifier = "-" + baseName + "-x86_64";
|
||||
extension = name.substring(index + 1);
|
||||
if (extension.equals("tar")) {
|
||||
|
@ -168,7 +168,7 @@ public class InternalDistributionBwcSetupPlugin implements Plugin<Project> {
|
|||
} else if (name.contains("rpm")) {
|
||||
classifier = "-x86_64";
|
||||
}
|
||||
} else if (name.contains("oss-")) {
|
||||
} else {
|
||||
extension = name.substring(4);
|
||||
}
|
||||
return new DistributionProject(name, baseDir, bwcVersion, classifier, extension, checkoutDir);
|
||||
|
@ -228,16 +228,7 @@ public class InternalDistributionBwcSetupPlugin implements Plugin<Project> {
|
|||
this.projectPath = baseDir + "/" + name;
|
||||
this.distFile = new File(
|
||||
checkoutDir,
|
||||
baseDir
|
||||
+ "/"
|
||||
+ name
|
||||
+ "/build/distributions/elasticsearch-"
|
||||
+ (name.startsWith("oss") ? "oss-" : "")
|
||||
+ version
|
||||
+ "-SNAPSHOT"
|
||||
+ classifier
|
||||
+ "."
|
||||
+ extension
|
||||
baseDir + "/" + name + "/build/distributions/elasticsearch-oss-" + version + "-SNAPSHOT" + classifier + "." + extension
|
||||
);
|
||||
// we only ported this down to the 7.x branch.
|
||||
if (version.onOrAfter("7.10.0") && (name.endsWith("zip") || name.endsWith("tar"))) {
|
||||
|
|
|
@ -127,7 +127,6 @@ public class InternalDistributionDownloadPlugin implements Plugin<Project> {
|
|||
break;
|
||||
|
||||
case DOCKER:
|
||||
case DOCKER_UBI:
|
||||
projectPath += ":docker:";
|
||||
projectPath += distributionProjectName(distribution);
|
||||
break;
|
||||
|
@ -155,9 +154,7 @@ public class InternalDistributionDownloadPlugin implements Plugin<Project> {
|
|||
? ""
|
||||
: "-" + architecture.toString().toLowerCase();
|
||||
|
||||
if (distribution.getFlavor() == ElasticsearchDistribution.Flavor.OSS) {
|
||||
projectName += "oss-";
|
||||
}
|
||||
projectName += "oss-";
|
||||
|
||||
if (distribution.getBundledJdk() == false) {
|
||||
projectName += "no-jdk-";
|
||||
|
@ -169,7 +166,7 @@ public class InternalDistributionDownloadPlugin implements Plugin<Project> {
|
|||
? "-zip"
|
||||
: "-tar");
|
||||
} else {
|
||||
projectName = distribution.getFlavor().equals(ElasticsearchDistribution.Flavor.DEFAULT) ? "zip" : "oss-zip";
|
||||
projectName = "oss-zip";
|
||||
}
|
||||
break;
|
||||
|
||||
|
@ -177,10 +174,6 @@ public class InternalDistributionDownloadPlugin implements Plugin<Project> {
|
|||
projectName += "docker" + archString + "-export";
|
||||
break;
|
||||
|
||||
case DOCKER_UBI:
|
||||
projectName += "ubi-docker" + archString + "-export";
|
||||
break;
|
||||
|
||||
default:
|
||||
projectName += distribution.getType();
|
||||
break;
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.gradle.test;
|
|||
import org.elasticsearch.gradle.Architecture;
|
||||
import org.elasticsearch.gradle.DistributionDownloadPlugin;
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution;
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor;
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution.Platform;
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution.Type;
|
||||
import org.elasticsearch.gradle.Jdk;
|
||||
|
@ -110,24 +109,28 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
TaskProvider<?> depsTask = project.getTasks().register(taskname + "#deps");
|
||||
depsTask.configure(t -> t.dependsOn(distribution, examplePlugin));
|
||||
depsTasks.put(taskname, depsTask);
|
||||
TaskProvider<Test> destructiveTask = configureTestTask(project, taskname, distribution, t -> {
|
||||
t.onlyIf(t2 -> distribution.isDocker() == false || dockerSupport.get().getDockerAvailability().isAvailable);
|
||||
addSysprop(t, DISTRIBUTION_SYSPROP, distribution::getFilepath);
|
||||
addSysprop(t, EXAMPLE_PLUGIN_SYSPROP, () -> examplePlugin.getSingleFile().toString());
|
||||
t.exclude("**/PackageUpgradeTests.class");
|
||||
}, depsTask);
|
||||
|
||||
if (distribution.getPlatform() == Platform.WINDOWS) {
|
||||
windowsTestTasks.add(destructiveTask);
|
||||
} else {
|
||||
linuxTestTasks.computeIfAbsent(distribution.getType(), k -> new ArrayList<>()).add(destructiveTask);
|
||||
// TODO - suppressing failure temporarily where duplicate tasks are created for docker.
|
||||
try {
|
||||
TaskProvider<Test> destructiveTask = configureTestTask(project, taskname, distribution, t -> {
|
||||
t.onlyIf(t2 -> distribution.isDocker() == false || dockerSupport.get().getDockerAvailability().isAvailable);
|
||||
addSysprop(t, DISTRIBUTION_SYSPROP, distribution::getFilepath);
|
||||
addSysprop(t, EXAMPLE_PLUGIN_SYSPROP, () -> examplePlugin.getSingleFile().toString());
|
||||
t.exclude("**/PackageUpgradeTests.class");
|
||||
}, depsTask);
|
||||
if (distribution.getPlatform() == Platform.WINDOWS) {
|
||||
windowsTestTasks.add(destructiveTask);
|
||||
} else {
|
||||
linuxTestTasks.computeIfAbsent(distribution.getType(), k -> new ArrayList<>()).add(destructiveTask);
|
||||
}
|
||||
destructiveDistroTest.configure(t -> t.dependsOn(destructiveTask));
|
||||
lifecycleTasks.get(distribution.getType()).configure(t -> t.dependsOn(destructiveTask));
|
||||
} catch (Exception ex) {
|
||||
System.out.println(ex.getMessage());
|
||||
}
|
||||
destructiveDistroTest.configure(t -> t.dependsOn(destructiveTask));
|
||||
lifecycleTasks.get(distribution.getType()).configure(t -> t.dependsOn(destructiveTask));
|
||||
|
||||
if ((distribution.getType() == Type.DEB || distribution.getType() == Type.RPM) && distribution.getBundledJdk()) {
|
||||
for (Version version : BuildParams.getBwcVersions().getIndexCompatible()) {
|
||||
if (distribution.getFlavor() == Flavor.OSS && version.before("6.3.0")) {
|
||||
if (version.before("6.3.0")) {
|
||||
continue; // before opening xpack
|
||||
}
|
||||
final ElasticsearchDistribution bwcDistro;
|
||||
|
@ -140,7 +143,6 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
distribution.getArchitecture(),
|
||||
distribution.getType(),
|
||||
distribution.getPlatform(),
|
||||
distribution.getFlavor(),
|
||||
distribution.getBundledJdk(),
|
||||
version.toString()
|
||||
);
|
||||
|
@ -206,8 +208,7 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
// auto-detection doesn't work.
|
||||
//
|
||||
// The shouldTestDocker property could be null, hence we use Boolean.TRUE.equals()
|
||||
boolean shouldExecute = (type != Type.DOCKER && type != Type.DOCKER_UBI)
|
||||
|| Boolean.TRUE.equals(vmProject.findProperty("shouldTestDocker"));
|
||||
boolean shouldExecute = (type != Type.DOCKER) || Boolean.TRUE.equals(vmProject.findProperty("shouldTestDocker"));
|
||||
|
||||
if (shouldExecute) {
|
||||
distroTest.configure(t -> t.dependsOn(wrapperTask));
|
||||
|
@ -234,7 +235,6 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
Map<ElasticsearchDistribution.Type, TaskProvider<?>> lifecyleTasks = new HashMap<>();
|
||||
|
||||
lifecyleTasks.put(Type.DOCKER, project.getTasks().register(taskPrefix + ".docker"));
|
||||
lifecyleTasks.put(Type.DOCKER_UBI, project.getTasks().register(taskPrefix + ".ubi"));
|
||||
lifecyleTasks.put(Type.ARCHIVE, project.getTasks().register(taskPrefix + ".archives"));
|
||||
lifecyleTasks.put(Type.DEB, project.getTasks().register(taskPrefix + ".packages"));
|
||||
lifecyleTasks.put(Type.RPM, lifecyleTasks.get(Type.DEB));
|
||||
|
@ -363,55 +363,38 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
List<ElasticsearchDistribution> currentDistros = new ArrayList<>();
|
||||
|
||||
for (Architecture architecture : Architecture.values()) {
|
||||
for (Type type : Arrays.asList(Type.DEB, Type.RPM, Type.DOCKER, Type.DOCKER_UBI)) {
|
||||
for (Flavor flavor : Flavor.values()) {
|
||||
for (boolean bundledJdk : Arrays.asList(true, false)) {
|
||||
if (bundledJdk == false) {
|
||||
// We'll never publish an ARM (aarch64) build without a bundled JDK.
|
||||
if (architecture == Architecture.AARCH64) {
|
||||
continue;
|
||||
}
|
||||
// All our Docker images include a bundled JDK so it doesn't make sense to test without one.
|
||||
if (type == Type.DOCKER || type == Type.DOCKER_UBI) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// We don't publish the OSS distribution on UBI
|
||||
if (type == Type.DOCKER_UBI && flavor == Flavor.OSS) {
|
||||
for (Type type : Arrays.asList(Type.DEB, Type.RPM, Type.DOCKER)) {
|
||||
for (boolean bundledJdk : Arrays.asList(true, false)) {
|
||||
if (bundledJdk == false) {
|
||||
// We'll never publish an ARM (aarch64) build without a bundled JDK.
|
||||
if (architecture == Architecture.AARCH64) {
|
||||
continue;
|
||||
}
|
||||
// All our Docker images include a bundled JDK so it doesn't make sense to test without one.
|
||||
if (type == Type.DOCKER) {
|
||||
continue;
|
||||
}
|
||||
|
||||
currentDistros.add(
|
||||
createDistro(distributions, architecture, type, null, flavor, bundledJdk, VersionProperties.getElasticsearch())
|
||||
);
|
||||
}
|
||||
|
||||
currentDistros.add(
|
||||
createDistro(distributions, architecture, type, null, bundledJdk, VersionProperties.getElasticsearch())
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (Architecture architecture : Architecture.values()) {
|
||||
for (Platform platform : Arrays.asList(Platform.LINUX, Platform.WINDOWS)) {
|
||||
for (Flavor flavor : Flavor.values()) {
|
||||
for (boolean bundledJdk : Arrays.asList(true, false)) {
|
||||
if (bundledJdk == false && architecture != Architecture.X64) {
|
||||
// We will never publish distributions for non-x86 (amd64) platforms
|
||||
// without a bundled JDK
|
||||
continue;
|
||||
}
|
||||
|
||||
currentDistros.add(
|
||||
createDistro(
|
||||
distributions,
|
||||
architecture,
|
||||
Type.ARCHIVE,
|
||||
platform,
|
||||
flavor,
|
||||
bundledJdk,
|
||||
VersionProperties.getElasticsearch()
|
||||
)
|
||||
);
|
||||
for (boolean bundledJdk : Arrays.asList(true, false)) {
|
||||
if (bundledJdk == false && architecture != Architecture.X64) {
|
||||
// We will never publish distributions for non-x86 (amd64) platforms
|
||||
// without a bundled JDK
|
||||
continue;
|
||||
}
|
||||
|
||||
currentDistros.add(
|
||||
createDistro(distributions, architecture, Type.ARCHIVE, platform, bundledJdk, VersionProperties.getElasticsearch())
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -424,15 +407,13 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
Architecture architecture,
|
||||
Type type,
|
||||
Platform platform,
|
||||
Flavor flavor,
|
||||
boolean bundledJdk,
|
||||
String version
|
||||
) {
|
||||
String name = distroId(type, platform, flavor, bundledJdk, architecture) + "-" + version;
|
||||
boolean isDocker = type == Type.DOCKER || type == Type.DOCKER_UBI;
|
||||
String name = distroId(type, platform, bundledJdk, architecture) + "-" + version;
|
||||
boolean isDocker = type == Type.DOCKER;
|
||||
ElasticsearchDistribution distro = distributions.create(name, d -> {
|
||||
d.setArchitecture(architecture);
|
||||
d.setFlavor(flavor);
|
||||
d.setType(type);
|
||||
if (type == Type.ARCHIVE) {
|
||||
d.setPlatform(platform);
|
||||
|
@ -457,19 +438,15 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
return project.getName().contains("windows");
|
||||
}
|
||||
|
||||
private static String distroId(Type type, Platform platform, Flavor flavor, boolean bundledJdk, Architecture architecture) {
|
||||
return flavor
|
||||
+ "-"
|
||||
+ (type == Type.ARCHIVE ? platform + "-" : "")
|
||||
+ type
|
||||
+ (bundledJdk ? "" : "-no-jdk")
|
||||
+ (architecture == Architecture.X64 ? "" : "-" + architecture.toString().toLowerCase());
|
||||
private static String distroId(Type type, Platform platform, boolean bundledJdk, Architecture architecture) {
|
||||
return (type == Type.ARCHIVE ? platform + "-" : "") + type + (bundledJdk ? "" : "-no-jdk") + (architecture == Architecture.X64
|
||||
? ""
|
||||
: "-" + architecture.toString().toLowerCase());
|
||||
}
|
||||
|
||||
private static String destructiveDistroTestTaskName(ElasticsearchDistribution distro) {
|
||||
Type type = distro.getType();
|
||||
return "destructiveDistroTest."
|
||||
+ distroId(type, distro.getPlatform(), distro.getFlavor(), distro.getBundledJdk(), distro.getArchitecture());
|
||||
return "destructiveDistroTest." + distroId(type, distro.getPlatform(), distro.getBundledJdk(), distro.getArchitecture());
|
||||
}
|
||||
|
||||
private static String destructiveDistroUpgradeTestTaskName(ElasticsearchDistribution distro, String bwcVersion) {
|
||||
|
@ -477,7 +454,7 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
return "destructiveDistroUpgradeTest.v"
|
||||
+ bwcVersion
|
||||
+ "."
|
||||
+ distroId(type, distro.getPlatform(), distro.getFlavor(), distro.getBundledJdk(), distro.getArchitecture());
|
||||
+ distroId(type, distro.getPlatform(), distro.getBundledJdk(), distro.getArchitecture());
|
||||
}
|
||||
|
||||
private static void addSysprop(Test task, String sysprop, Supplier<String> valueSupplier) {
|
||||
|
|
|
@ -267,16 +267,10 @@ public class ElasticsearchNode implements TestClusterConfiguration {
|
|||
if (testDistribution == TestDistribution.INTEG_TEST) {
|
||||
distribution.setType(ElasticsearchDistribution.Type.INTEG_TEST_ZIP);
|
||||
// we change the underlying distribution when changing the test distribution of the cluster.
|
||||
distribution.setFlavor(null);
|
||||
distribution.setPlatform(null);
|
||||
distribution.setBundledJdk(null);
|
||||
} else {
|
||||
distribution.setType(ElasticsearchDistribution.Type.ARCHIVE);
|
||||
if (testDistribution == TestDistribution.DEFAULT) {
|
||||
distribution.setFlavor(ElasticsearchDistribution.Flavor.DEFAULT);
|
||||
} else {
|
||||
distribution.setFlavor(ElasticsearchDistribution.Flavor.OSS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor;
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution.Platform;
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution.Type;
|
||||
import org.elasticsearch.gradle.info.BuildParams;
|
||||
|
@ -63,15 +62,7 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
);
|
||||
|
||||
public void testVersionDefault() {
|
||||
ElasticsearchDistribution distro = checkDistro(
|
||||
createProject(null, false),
|
||||
"testdistro",
|
||||
null,
|
||||
Type.ARCHIVE,
|
||||
Platform.LINUX,
|
||||
Flavor.OSS,
|
||||
true
|
||||
);
|
||||
ElasticsearchDistribution distro = checkDistro(createProject(null, false), "testdistro", null, Type.ARCHIVE, Platform.LINUX, true);
|
||||
assertEquals(distro.getVersion(), VersionProperties.getElasticsearch());
|
||||
}
|
||||
|
||||
|
@ -82,35 +73,18 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
"badversion",
|
||||
Type.ARCHIVE,
|
||||
Platform.LINUX,
|
||||
Flavor.OSS,
|
||||
true,
|
||||
"Invalid version format: 'badversion'"
|
||||
);
|
||||
}
|
||||
|
||||
public void testTypeDefault() {
|
||||
ElasticsearchDistribution distro = checkDistro(
|
||||
createProject(null, false),
|
||||
"testdistro",
|
||||
"5.0.0",
|
||||
null,
|
||||
Platform.LINUX,
|
||||
Flavor.OSS,
|
||||
true
|
||||
);
|
||||
ElasticsearchDistribution distro = checkDistro(createProject(null, false), "testdistro", "5.0.0", null, Platform.LINUX, true);
|
||||
assertEquals(distro.getType(), Type.ARCHIVE);
|
||||
}
|
||||
|
||||
public void testPlatformDefault() {
|
||||
ElasticsearchDistribution distro = checkDistro(
|
||||
createProject(null, false),
|
||||
"testdistro",
|
||||
"5.0.0",
|
||||
Type.ARCHIVE,
|
||||
null,
|
||||
Flavor.OSS,
|
||||
true
|
||||
);
|
||||
ElasticsearchDistribution distro = checkDistro(createProject(null, false), "testdistro", "5.0.0", Type.ARCHIVE, null, true);
|
||||
assertEquals(distro.getPlatform(), ElasticsearchDistribution.CURRENT_PLATFORM);
|
||||
}
|
||||
|
||||
|
@ -122,37 +96,10 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
Type.INTEG_TEST_ZIP,
|
||||
Platform.LINUX,
|
||||
null,
|
||||
null,
|
||||
"platform cannot be set on elasticsearch distribution [testdistro]"
|
||||
);
|
||||
}
|
||||
|
||||
public void testFlavorDefault() {
|
||||
ElasticsearchDistribution distro = checkDistro(
|
||||
createProject(null, false),
|
||||
"testdistro",
|
||||
"5.0.0",
|
||||
Type.ARCHIVE,
|
||||
Platform.LINUX,
|
||||
null,
|
||||
true
|
||||
);
|
||||
assertEquals(distro.getFlavor(), Flavor.DEFAULT);
|
||||
}
|
||||
|
||||
public void testFlavorForIntegTest() {
|
||||
assertDistroError(
|
||||
createProject(null, false),
|
||||
"testdistro",
|
||||
"5.0.0",
|
||||
Type.INTEG_TEST_ZIP,
|
||||
null,
|
||||
Flavor.OSS,
|
||||
null,
|
||||
"flavor [oss] not allowed for elasticsearch distribution [testdistro] of type [integ_test_zip]"
|
||||
);
|
||||
}
|
||||
|
||||
public void testBundledJdkDefault() {
|
||||
ElasticsearchDistribution distro = checkDistro(
|
||||
createProject(null, false),
|
||||
|
@ -160,7 +107,6 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
"5.0.0",
|
||||
Type.ARCHIVE,
|
||||
Platform.LINUX,
|
||||
null,
|
||||
true
|
||||
);
|
||||
assertTrue(distro.getBundledJdk());
|
||||
|
@ -173,7 +119,6 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
"5.0.0",
|
||||
Type.INTEG_TEST_ZIP,
|
||||
null,
|
||||
null,
|
||||
true,
|
||||
"bundledJdk cannot be set on elasticsearch distribution [testdistro]"
|
||||
);
|
||||
|
@ -184,70 +129,62 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
Project archiveProject = ProjectBuilder.builder().withParent(archivesProject).withName("integ-test-zip").build();
|
||||
archiveProject.getConfigurations().create("default");
|
||||
archiveProject.getArtifacts().add("default", new File("doesnotmatter"));
|
||||
createDistro(project, "distro", VersionProperties.getElasticsearch(), Type.INTEG_TEST_ZIP, null, null, null);
|
||||
createDistro(project, "distro", VersionProperties.getElasticsearch(), Type.INTEG_TEST_ZIP, null, null);
|
||||
checkPlugin(project);
|
||||
}
|
||||
|
||||
public void testLocalCurrentVersionArchives() {
|
||||
for (Platform platform : Platform.values()) {
|
||||
for (Flavor flavor : Flavor.values()) {
|
||||
for (boolean bundledJdk : new boolean[] { true, false }) {
|
||||
// create a new project in each iteration, so that we know we are resolving the only additional project being created
|
||||
Project project = createProject(BWC_MINOR, true);
|
||||
String projectName = projectName(platform.toString(), flavor, bundledJdk);
|
||||
projectName += (platform == Platform.WINDOWS ? "-zip" : "-tar");
|
||||
Project archiveProject = ProjectBuilder.builder().withParent(archivesProject).withName(projectName).build();
|
||||
archiveProject.getConfigurations().create("default");
|
||||
archiveProject.getArtifacts().add("default", new File("doesnotmatter"));
|
||||
createDistro(project, "distro", VersionProperties.getElasticsearch(), Type.ARCHIVE, platform, flavor, bundledJdk);
|
||||
checkPlugin(project);
|
||||
}
|
||||
for (boolean bundledJdk : new boolean[] { true, false }) {
|
||||
// create a new project in each iteration, so that we know we are resolving the only additional project being created
|
||||
Project project = createProject(BWC_MINOR, true);
|
||||
String projectName = projectName(platform.toString(), bundledJdk);
|
||||
projectName += (platform == Platform.WINDOWS ? "-zip" : "-tar");
|
||||
Project archiveProject = ProjectBuilder.builder().withParent(archivesProject).withName(projectName).build();
|
||||
archiveProject.getConfigurations().create("default");
|
||||
archiveProject.getArtifacts().add("default", new File("doesnotmatter"));
|
||||
createDistro(project, "distro", VersionProperties.getElasticsearch(), Type.ARCHIVE, platform, bundledJdk);
|
||||
checkPlugin(project);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testLocalCurrentVersionPackages() {
|
||||
for (Type packageType : new Type[] { Type.RPM, Type.DEB }) {
|
||||
for (Flavor flavor : Flavor.values()) {
|
||||
for (boolean bundledJdk : new boolean[] { true, false }) {
|
||||
Project project = createProject(BWC_MINOR, true);
|
||||
String projectName = projectName(packageType.toString(), flavor, bundledJdk);
|
||||
Project packageProject = ProjectBuilder.builder().withParent(packagesProject).withName(projectName).build();
|
||||
packageProject.getConfigurations().create("default");
|
||||
packageProject.getArtifacts().add("default", new File("doesnotmatter"));
|
||||
createDistro(project, "distro", VersionProperties.getElasticsearch(), packageType, null, flavor, bundledJdk);
|
||||
checkPlugin(project);
|
||||
}
|
||||
for (boolean bundledJdk : new boolean[] { true, false }) {
|
||||
Project project = createProject(BWC_MINOR, true);
|
||||
String projectName = projectName(packageType.toString(), bundledJdk);
|
||||
Project packageProject = ProjectBuilder.builder().withParent(packagesProject).withName(projectName).build();
|
||||
packageProject.getConfigurations().create("default");
|
||||
packageProject.getArtifacts().add("default", new File("doesnotmatter"));
|
||||
createDistro(project, "distro", VersionProperties.getElasticsearch(), packageType, null, bundledJdk);
|
||||
checkPlugin(project);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testLocalBwcArchives() {
|
||||
for (Platform platform : Platform.values()) {
|
||||
for (Flavor flavor : Flavor.values()) {
|
||||
// note: no non bundled jdk for bwc
|
||||
String configName = projectName(platform.toString(), flavor, true);
|
||||
configName += (platform == Platform.WINDOWS ? "-zip" : "-tar");
|
||||
// note: no non bundled jdk for bwc
|
||||
String configName = projectName(platform.toString(), true);
|
||||
configName += (platform == Platform.WINDOWS ? "-zip" : "-tar");
|
||||
|
||||
checkBwc("minor", configName, BWC_MINOR_VERSION, Type.ARCHIVE, platform, flavor, BWC_MINOR, true);
|
||||
checkBwc("staged", configName, BWC_STAGED_VERSION, Type.ARCHIVE, platform, flavor, BWC_STAGED, true);
|
||||
checkBwc("bugfix", configName, BWC_BUGFIX_VERSION, Type.ARCHIVE, platform, flavor, BWC_BUGFIX, true);
|
||||
checkBwc("maintenance", configName, BWC_MAINTENANCE_VERSION, Type.ARCHIVE, platform, flavor, BWC_MAINTENANCE, true);
|
||||
}
|
||||
checkBwc("minor", configName, BWC_MINOR_VERSION, Type.ARCHIVE, platform, BWC_MINOR, true);
|
||||
checkBwc("staged", configName, BWC_STAGED_VERSION, Type.ARCHIVE, platform, BWC_STAGED, true);
|
||||
checkBwc("bugfix", configName, BWC_BUGFIX_VERSION, Type.ARCHIVE, platform, BWC_BUGFIX, true);
|
||||
checkBwc("maintenance", configName, BWC_MAINTENANCE_VERSION, Type.ARCHIVE, platform, BWC_MAINTENANCE, true);
|
||||
}
|
||||
}
|
||||
|
||||
public void testLocalBwcPackages() {
|
||||
for (Type packageType : new Type[] { Type.RPM, Type.DEB }) {
|
||||
for (Flavor flavor : Flavor.values()) {
|
||||
// note: no non bundled jdk for bwc
|
||||
String configName = projectName(packageType.toString(), flavor, true);
|
||||
// note: no non bundled jdk for bwc
|
||||
String configName = projectName(packageType.toString(), true);
|
||||
|
||||
checkBwc("minor", configName, BWC_MINOR_VERSION, packageType, null, flavor, BWC_MINOR, true);
|
||||
checkBwc("staged", configName, BWC_STAGED_VERSION, packageType, null, flavor, BWC_STAGED, true);
|
||||
checkBwc("bugfix", configName, BWC_BUGFIX_VERSION, packageType, null, flavor, BWC_BUGFIX, true);
|
||||
checkBwc("maintenance", configName, BWC_MAINTENANCE_VERSION, packageType, null, flavor, BWC_MAINTENANCE, true);
|
||||
}
|
||||
checkBwc("minor", configName, BWC_MINOR_VERSION, packageType, null, BWC_MINOR, true);
|
||||
checkBwc("staged", configName, BWC_STAGED_VERSION, packageType, null, BWC_STAGED, true);
|
||||
checkBwc("bugfix", configName, BWC_BUGFIX_VERSION, packageType, null, BWC_BUGFIX, true);
|
||||
checkBwc("maintenance", configName, BWC_MAINTENANCE_VERSION, packageType, null, BWC_MAINTENANCE, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -257,13 +194,12 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
String version,
|
||||
Type type,
|
||||
Platform platform,
|
||||
Flavor flavor,
|
||||
Boolean bundledJdk,
|
||||
String message
|
||||
) {
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> checkDistro(project, name, version, type, platform, flavor, bundledJdk)
|
||||
() -> checkDistro(project, name, version, type, platform, bundledJdk)
|
||||
);
|
||||
assertThat(e.getMessage(), containsString(message));
|
||||
}
|
||||
|
@ -274,7 +210,6 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
String version,
|
||||
Type type,
|
||||
Platform platform,
|
||||
Flavor flavor,
|
||||
Boolean bundledJdk
|
||||
) {
|
||||
NamedDomainObjectContainer<ElasticsearchDistribution> distros = DistributionDownloadPlugin.getContainer(project);
|
||||
|
@ -288,9 +223,6 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
if (platform != null) {
|
||||
distro.setPlatform(platform);
|
||||
}
|
||||
if (flavor != null) {
|
||||
distro.setFlavor(flavor);
|
||||
}
|
||||
if (bundledJdk != null) {
|
||||
distro.setBundledJdk(bundledJdk);
|
||||
}
|
||||
|
@ -304,10 +236,9 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
String version,
|
||||
Type type,
|
||||
Platform platform,
|
||||
Flavor flavor,
|
||||
Boolean bundledJdk
|
||||
) {
|
||||
ElasticsearchDistribution distribution = createDistro(project, name, version, type, platform, flavor, bundledJdk);
|
||||
ElasticsearchDistribution distribution = createDistro(project, name, version, type, platform, bundledJdk);
|
||||
distribution.finalizeValues();
|
||||
return distribution;
|
||||
}
|
||||
|
@ -324,7 +255,6 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
Version version,
|
||||
Type type,
|
||||
Platform platform,
|
||||
Flavor flavor,
|
||||
BwcVersions bwcVersions,
|
||||
boolean isInternal
|
||||
) {
|
||||
|
@ -332,7 +262,7 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
Project archiveProject = ProjectBuilder.builder().withParent(bwcProject).withName(projectName).build();
|
||||
archiveProject.getConfigurations().create(config);
|
||||
archiveProject.getArtifacts().add(config, new File("doesnotmatter"));
|
||||
createDistro(project, "distro", version.toString(), type, platform, flavor, true);
|
||||
createDistro(project, "distro", version.toString(), type, platform, true);
|
||||
checkPlugin(project);
|
||||
}
|
||||
|
||||
|
@ -351,11 +281,8 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
|||
return project;
|
||||
}
|
||||
|
||||
private static String projectName(String base, Flavor flavor, boolean bundledJdk) {
|
||||
String prefix = "";
|
||||
if (flavor == Flavor.OSS) {
|
||||
prefix += "oss-";
|
||||
}
|
||||
private static String projectName(String base, boolean bundledJdk) {
|
||||
String prefix = "oss-";
|
||||
if (bundledJdk == false) {
|
||||
prefix += "no-jdk-";
|
||||
}
|
||||
|
|
|
@ -102,14 +102,13 @@ public class MainResponse {
|
|||
private static final ConstructingObjectParser<Version, Void> PARSER =
|
||||
new ConstructingObjectParser<>(Version.class.getName(), true,
|
||||
args -> {
|
||||
return new Version((String) args[0], (String) args[1], (String) args[2], (String) args[3], (String) args[4],
|
||||
(Boolean) args[5], (String) args[6], (String) args[7], (String) args[8]);
|
||||
return new Version((String) args[0], (String) args[1], (String) args[2], (String) args[3],
|
||||
(Boolean) args[4], (String) args[5], (String) args[6], (String) args[7]);
|
||||
}
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("number"));
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("build_flavor"));
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("build_type"));
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("build_hash"));
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("build_date"));
|
||||
|
@ -118,8 +117,8 @@ public class MainResponse {
|
|||
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("minimum_wire_compatibility_version"));
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("minimum_index_compatibility_version"));
|
||||
}
|
||||
|
||||
private final String number;
|
||||
private final String buildFlavor;
|
||||
private final String buildType;
|
||||
private final String buildHash;
|
||||
private final String buildDate;
|
||||
|
@ -128,10 +127,9 @@ public class MainResponse {
|
|||
private final String minimumWireCompatibilityVersion;
|
||||
private final String minimumIndexCompatibilityVersion;
|
||||
|
||||
public Version(String number, String buildFlavor, String buildType, String buildHash, String buildDate, boolean isSnapshot,
|
||||
String luceneVersion, String minimumWireCompatibilityVersion, String minimumIndexCompatibilityVersion) {
|
||||
public Version(String number, String buildType, String buildHash, String buildDate, boolean isSnapshot,
|
||||
String luceneVersion, String minimumWireCompatibilityVersion, String minimumIndexCompatibilityVersion) {
|
||||
this.number = number;
|
||||
this.buildFlavor = buildFlavor;
|
||||
this.buildType = buildType;
|
||||
this.buildHash = buildHash;
|
||||
this.buildDate = buildDate;
|
||||
|
@ -145,10 +143,6 @@ public class MainResponse {
|
|||
return number;
|
||||
}
|
||||
|
||||
public String getBuildFlavor() {
|
||||
return buildFlavor;
|
||||
}
|
||||
|
||||
public String getBuildType() {
|
||||
return buildType;
|
||||
}
|
||||
|
@ -184,7 +178,6 @@ public class MainResponse {
|
|||
Version version = (Version) o;
|
||||
return isSnapshot == version.isSnapshot &&
|
||||
number.equals(version.number) &&
|
||||
Objects.equals(buildFlavor, version.buildFlavor) &&
|
||||
Objects.equals(buildType, version.buildType) &&
|
||||
buildHash.equals(version.buildHash) &&
|
||||
buildDate.equals(version.buildDate) &&
|
||||
|
@ -195,7 +188,7 @@ public class MainResponse {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(number, buildFlavor, buildType, buildHash, buildDate, isSnapshot, luceneVersion,
|
||||
return Objects.hash(number, buildType, buildHash, buildDate, isSnapshot, luceneVersion,
|
||||
minimumWireCompatibilityVersion, minimumIndexCompatibilityVersion);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,6 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase {
|
|||
assertNotNull(info.getNodeName());
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> versionMap = (Map<String, Object>) infoAsMap.get("version");
|
||||
assertEquals(versionMap.get("build_flavor"), info.getVersion().getBuildFlavor());
|
||||
assertEquals(versionMap.get("build_type"), info.getVersion().getBuildType());
|
||||
assertEquals(versionMap.get("build_hash"), info.getVersion().getBuildHash());
|
||||
assertEquals(versionMap.get("build_date"), info.getVersion().getBuildDate());
|
||||
|
|
|
@ -165,7 +165,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testInfo() throws IOException {
|
||||
MainResponse testInfo = new MainResponse("nodeName", new MainResponse.Version("number", "buildFlavor", "buildType", "buildHash",
|
||||
MainResponse testInfo = new MainResponse("nodeName", new MainResponse.Version("number", "buildType", "buildHash",
|
||||
"buildDate", true, "luceneVersion", "minimumWireCompatibilityVersion", "minimumIndexCompatibilityVersion"),
|
||||
"clusterName", "clusterUuid", "You Know, for Search");
|
||||
mockResponse((ToXContentFragment) (builder, params) -> {
|
||||
|
@ -175,7 +175,6 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
builder.field("cluster_uuid", testInfo.getClusterUuid());
|
||||
builder.startObject("version")
|
||||
.field("number", testInfo.getVersion().getNumber())
|
||||
.field("build_flavor", testInfo.getVersion().getBuildFlavor())
|
||||
.field("build_type", testInfo.getVersion().getBuildType())
|
||||
.field("build_hash", testInfo.getVersion().getBuildHash())
|
||||
.field("build_date", testInfo.getVersion().getBuildDate())
|
||||
|
|
|
@ -41,7 +41,7 @@ public class MainResponseTests extends AbstractResponseTestCase<org.elasticsearc
|
|||
final String date = new Date(randomNonNegativeLong()).toString();
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_1, Version.CURRENT);
|
||||
Build build = new Build(
|
||||
Build.Flavor.UNKNOWN, Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean(),
|
||||
Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean(),
|
||||
version.toString()
|
||||
);
|
||||
return new org.elasticsearch.action.main.MainResponse(nodeName, version, clusterName, clusterUuid , build);
|
||||
|
@ -62,7 +62,6 @@ public class MainResponseTests extends AbstractResponseTestCase<org.elasticsearc
|
|||
assertThat(serverTestInstance.getBuild().hash(), equalTo(clientInstance.getVersion().getBuildHash()));
|
||||
assertThat(serverTestInstance.getVersion().toString(), equalTo(clientInstance.getVersion().getNumber()));
|
||||
assertThat(serverTestInstance.getBuild().date(), equalTo(clientInstance.getVersion().getBuildDate()));
|
||||
assertThat(serverTestInstance.getBuild().flavor().displayName(), equalTo(clientInstance.getVersion().getBuildFlavor()));
|
||||
assertThat(serverTestInstance.getBuild().type().displayName(), equalTo(clientInstance.getVersion().getBuildType()));
|
||||
assertThat(serverTestInstance.getVersion().luceneVersion.toString(), equalTo(clientInstance.getVersion().getLuceneVersion()));
|
||||
assertThat(serverTestInstance.getVersion().minimumIndexCompatibilityVersion().toString(),
|
||||
|
|
|
@ -46,7 +46,6 @@ public class MiscellaneousDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
String nodeName = response.getNodeName();
|
||||
MainResponse.Version version = response.getVersion();
|
||||
String buildDate = version.getBuildDate();
|
||||
String buildFlavor = version.getBuildFlavor();
|
||||
String buildHash = version.getBuildHash();
|
||||
String buildType = version.getBuildType();
|
||||
String luceneVersion = version.getLuceneVersion();
|
||||
|
@ -59,7 +58,6 @@ public class MiscellaneousDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
assertNotNull(nodeName);
|
||||
assertNotNull(version);
|
||||
assertNotNull(buildDate);
|
||||
assertNotNull(buildFlavor);
|
||||
assertNotNull(buildHash);
|
||||
assertNotNull(buildType);
|
||||
assertNotNull(luceneVersion);
|
||||
|
|
|
@ -17,28 +17,25 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
|
||||
apply plugin: 'elasticsearch.internal-distribution-archive-setup'
|
||||
|
||||
CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, String architecture, boolean oss, boolean jdk) {
|
||||
CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, String architecture, boolean jdk) {
|
||||
return copySpec {
|
||||
into("elasticsearch-${version}") {
|
||||
into('lib') {
|
||||
with libFiles(oss)
|
||||
with libFiles()
|
||||
}
|
||||
into('config') {
|
||||
dirMode 0750
|
||||
fileMode 0660
|
||||
with configFiles(distributionType, oss, jdk)
|
||||
with configFiles(distributionType, jdk)
|
||||
from {
|
||||
dirMode 0750
|
||||
jvmOptionsDir.getParent()
|
||||
}
|
||||
}
|
||||
into('bin') {
|
||||
with binFiles(distributionType, oss, jdk)
|
||||
with binFiles(distributionType, jdk)
|
||||
}
|
||||
if (jdk) {
|
||||
into("darwin".equals(platform) ? 'jdk.app' : 'jdk') {
|
||||
|
@ -65,7 +62,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla
|
|||
rename { 'LICENSE.txt' }
|
||||
}
|
||||
|
||||
with noticeFile(oss, jdk)
|
||||
with noticeFile(jdk)
|
||||
into('modules') {
|
||||
with modulesFiles
|
||||
}
|
||||
|
@ -76,105 +73,56 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla
|
|||
distribution_archives {
|
||||
integTestZip {
|
||||
content {
|
||||
archiveFiles(transportModulesFiles, 'zip', null, 'x64', true, false)
|
||||
}
|
||||
}
|
||||
|
||||
windowsZip {
|
||||
archiveClassifier = 'windows-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(false, 'windows-x86_64'), 'zip', 'windows', 'x64', false, true)
|
||||
archiveFiles(transportModulesFiles, 'zip', null, 'x64', false)
|
||||
}
|
||||
}
|
||||
|
||||
ossWindowsZip {
|
||||
archiveClassifier = 'windows-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(true, 'windows-x86_64'), 'zip', 'windows', 'x64', true, true)
|
||||
}
|
||||
}
|
||||
|
||||
noJdkWindowsZip {
|
||||
archiveClassifier = 'no-jdk-windows-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(false, 'windows-x86_64'), 'zip', 'windows', 'x64', false, false)
|
||||
archiveFiles(modulesFiles('windows-x86_64'), 'zip', 'windows', 'x64', true)
|
||||
}
|
||||
}
|
||||
|
||||
ossNoJdkWindowsZip {
|
||||
archiveClassifier = 'no-jdk-windows-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(true, 'windows-x86_64'), 'zip', 'windows', 'x64', true, false)
|
||||
}
|
||||
}
|
||||
|
||||
darwinTar {
|
||||
archiveClassifier = 'darwin-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(false, 'darwin-x86_64'), 'tar', 'darwin', 'x64', false, true)
|
||||
archiveFiles(modulesFiles('windows-x86_64'), 'zip', 'windows', 'x64', false)
|
||||
}
|
||||
}
|
||||
|
||||
ossDarwinTar {
|
||||
archiveClassifier = 'darwin-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(true, 'darwin-x86_64'), 'tar', 'darwin', 'x64', true, true)
|
||||
}
|
||||
}
|
||||
|
||||
noJdkDarwinTar {
|
||||
archiveClassifier = 'no-jdk-darwin-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(false, 'darwin-x86_64'), 'tar', 'darwin', 'x64', false, false)
|
||||
archiveFiles(modulesFiles('darwin-x86_64'), 'tar', 'darwin', 'x64', true)
|
||||
}
|
||||
}
|
||||
|
||||
ossNoJdkDarwinTar {
|
||||
archiveClassifier = 'no-jdk-darwin-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(true, 'darwin-x86_64'), 'tar', 'darwin', 'x64', true, false)
|
||||
}
|
||||
}
|
||||
|
||||
linuxAarch64Tar {
|
||||
archiveClassifier = 'linux-aarch64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(false, 'linux-aarch64'), 'tar', 'linux', 'aarch64', false, true)
|
||||
}
|
||||
}
|
||||
|
||||
linuxTar {
|
||||
archiveClassifier = 'linux-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(false, 'linux-x86_64'), 'tar', 'linux', 'x64', false, true)
|
||||
archiveFiles(modulesFiles('darwin-x86_64'), 'tar', 'darwin', 'x64', false)
|
||||
}
|
||||
}
|
||||
|
||||
ossLinuxAarch64Tar {
|
||||
archiveClassifier = 'linux-aarch64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(true, 'linux-aarch64'), 'tar', 'linux', 'aarch64', true, true)
|
||||
archiveFiles(modulesFiles('linux-aarch64'), 'tar', 'linux', 'aarch64', true)
|
||||
}
|
||||
}
|
||||
|
||||
ossLinuxTar {
|
||||
archiveClassifier = 'linux-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(true, 'linux-x86_64'), 'tar', 'linux', 'x64', true, true)
|
||||
}
|
||||
}
|
||||
|
||||
noJdkLinuxTar {
|
||||
archiveClassifier = 'no-jdk-linux-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(false, 'linux-x86_64'), 'tar', 'linux', 'x64', false, false)
|
||||
archiveFiles(modulesFiles('linux-x86_64'), 'tar', 'linux', 'x64', true)
|
||||
}
|
||||
}
|
||||
|
||||
ossNoJdkLinuxTar {
|
||||
archiveClassifier = 'no-jdk-linux-x86_64'
|
||||
content {
|
||||
archiveFiles(modulesFiles(true, 'linux-x86_64'), 'tar', 'linux', 'x64', true, false)
|
||||
archiveFiles(modulesFiles('linux-x86_64'), 'tar', 'linux', 'x64', false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -183,5 +131,5 @@ subprojects {
|
|||
apply plugin: 'distribution'
|
||||
apply plugin: 'elasticsearch.internal-distribution-archive-check'
|
||||
|
||||
group = "org.elasticsearch.distribution.${name.startsWith("oss-") ? "oss" : "default"}"
|
||||
group = "org.elasticsearch.distribution.oss"
|
||||
}
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -49,15 +49,6 @@ tasks.register("generateDependenciesReport", ConcatFilesTask) {
|
|||
String sourceUrl = "https://hg.openjdk.java.net/jdk-updates/jdk${jdkMajorVersion}u/archive/jdk-${jdkVersion}.tar.gz"
|
||||
additionalLines << "OpenJDK,${jdkVersion},https://openjdk.java.net/,GPL-2.0-with-classpath-exception,${sourceUrl}".toString()
|
||||
|
||||
// Explicitly add the dependency on the RHEL UBI Docker base image
|
||||
String[] rhelUbiFields = [
|
||||
'Red Hat Universal Base Image minimal',
|
||||
'8',
|
||||
'https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8',
|
||||
'Custom;https://www.redhat.com/licenses/EULA_Red_Hat_Universal_Base_Image_English_20190422.pdf',
|
||||
'https://oss-dependencies.elastic.co/redhat/ubi/ubi-minimal-8-source.tar.gz'
|
||||
]
|
||||
additionalLines << rhelUbiFields.join(',')
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
|
@ -334,7 +325,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
/*****************************************************************************
|
||||
* Common files in all distributions *
|
||||
*****************************************************************************/
|
||||
libFiles = { oss ->
|
||||
libFiles = {
|
||||
copySpec {
|
||||
// delay by using closures, since they have not yet been configured, so no jar task exists yet
|
||||
from(configurations.libs)
|
||||
|
@ -344,15 +335,10 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
into('tools/keystore-cli') {
|
||||
from(configurations.libsKeystoreCli)
|
||||
}
|
||||
if (oss == false) {
|
||||
into('tools/security-cli') {
|
||||
from(configurations.libsSecurityCli)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
modulesFiles = { oss, platform ->
|
||||
modulesFiles = { platform ->
|
||||
copySpec {
|
||||
eachFile {
|
||||
if (it.relativePath.segments[-2] == 'bin' || (platform == 'darwin-x86_64' && it.relativePath.segments[-2] == 'MacOS')) {
|
||||
|
@ -363,12 +349,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
it.mode = 0644
|
||||
}
|
||||
}
|
||||
def buildModules
|
||||
if (oss) {
|
||||
buildModules = buildOssModulesTaskProvider
|
||||
} else {
|
||||
buildModules = buildDefaultModulesTaskProvider
|
||||
}
|
||||
def buildModules = buildOssModulesTaskProvider
|
||||
List excludePlatforms = ['linux-x86_64', 'linux-aarch64', 'windows-x86_64', 'darwin-x86_64']
|
||||
if (platform != null) {
|
||||
excludePlatforms.remove(excludePlatforms.indexOf(platform))
|
||||
|
@ -393,25 +374,20 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
from buildTransportModulesTaskProvider
|
||||
}
|
||||
|
||||
configFiles = { distributionType, oss, jdk ->
|
||||
configFiles = { distributionType, jdk ->
|
||||
copySpec {
|
||||
with copySpec {
|
||||
// main config files, processed with distribution specific substitutions
|
||||
from '../src/config'
|
||||
exclude 'log4j2.properties' // this is handled separately below
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss, jdk))
|
||||
}
|
||||
if (oss) {
|
||||
from project(':distribution').buildOssLog4jConfig
|
||||
from project(':distribution').buildOssConfig
|
||||
} else {
|
||||
from project(':distribution').buildDefaultLog4jConfig
|
||||
from project(':distribution').buildDefaultConfig
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, jdk))
|
||||
}
|
||||
from project(':distribution').buildOssLog4jConfig
|
||||
from project(':distribution').buildOssConfig
|
||||
}
|
||||
}
|
||||
|
||||
binFiles = { distributionType, oss, jdk ->
|
||||
binFiles = { distributionType, jdk ->
|
||||
copySpec {
|
||||
// non-windows files, for all distributions
|
||||
with copySpec {
|
||||
|
@ -419,7 +395,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
exclude '*.exe'
|
||||
exclude '*.bat'
|
||||
eachFile { it.setMode(0755) }
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss, jdk))
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, jdk))
|
||||
}
|
||||
// windows files, only for zip
|
||||
if (distributionType == 'zip') {
|
||||
|
@ -427,7 +403,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
from '../src/bin'
|
||||
include '*.bat'
|
||||
filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf'))
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss, jdk))
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, jdk))
|
||||
}
|
||||
with copySpec {
|
||||
from '../src/bin'
|
||||
|
@ -437,11 +413,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
// module provided bin files
|
||||
with copySpec {
|
||||
eachFile { it.setMode(0755) }
|
||||
if (oss) {
|
||||
from project(':distribution').buildOssBin
|
||||
} else {
|
||||
from project(':distribution').buildDefaultBin
|
||||
}
|
||||
from project(':distribution').buildOssBin
|
||||
if (distributionType != 'zip') {
|
||||
exclude '*.bat'
|
||||
}
|
||||
|
@ -449,19 +421,15 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
}
|
||||
}
|
||||
|
||||
noticeFile = { oss, jdk ->
|
||||
noticeFile = { jdk ->
|
||||
copySpec {
|
||||
if (project.name == 'integ-test-zip') {
|
||||
from buildServerNoticeTaskProvider
|
||||
} else {
|
||||
if (oss && jdk) {
|
||||
if (jdk) {
|
||||
from buildOssNoticeTaskProvider
|
||||
} else if (oss) {
|
||||
from buildOssNoJdkNoticeTaskProvider
|
||||
} else if (jdk) {
|
||||
from buildDefaultNoticeTaskProvider
|
||||
} else {
|
||||
from buildDefaultNoJdkNoticeTaskProvider
|
||||
from buildOssNoJdkNoticeTaskProvider
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -522,7 +490,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
* </dl>
|
||||
*/
|
||||
subprojects {
|
||||
ext.expansionsForDistribution = { distributionType, oss, jdk ->
|
||||
ext.expansionsForDistribution = { distributionType, jdk ->
|
||||
final String defaultHeapSize = "1g"
|
||||
final String packagingPathData = "path.data: /var/lib/elasticsearch"
|
||||
final String pathLogs = "/var/log/elasticsearch"
|
||||
|
@ -598,11 +566,6 @@ subprojects {
|
|||
'def': footer
|
||||
],
|
||||
|
||||
'es.distribution.flavor': [
|
||||
'def': oss ? 'oss' : 'default'
|
||||
],
|
||||
|
||||
|
||||
'es.distribution.type': [
|
||||
'deb': 'deb',
|
||||
'rpm': 'rpm',
|
||||
|
@ -649,13 +612,11 @@ subprojects {
|
|||
}
|
||||
}
|
||||
|
||||
['archives:windows-zip', 'archives:oss-windows-zip',
|
||||
'archives:darwin-tar', 'archives:oss-darwin-tar',
|
||||
'archives:linux-aarch64-tar', 'archives:oss-linux-aarch64-tar',
|
||||
'archives:linux-tar', 'archives:oss-linux-tar',
|
||||
['archives:oss-windows-zip',
|
||||
'archives:oss-darwin-tar',
|
||||
'archives:oss-linux-aarch64-tar',
|
||||
'archives:oss-linux-tar',
|
||||
'archives:integ-test-zip',
|
||||
'packages:rpm', 'packages:deb',
|
||||
'packages:aarch64-rpm', 'packages:aarch64-deb',
|
||||
'packages:oss-rpm', 'packages:oss-deb',
|
||||
'packages:aarch64-oss-rpm', 'packages:aarch64-oss-deb'
|
||||
].forEach { subName ->
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import org.elasticsearch.gradle.Architecture
|
||||
import org.elasticsearch.gradle.DockerBase
|
||||
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.docker.DockerBuildTask
|
||||
|
@ -14,20 +13,16 @@ apply plugin: 'elasticsearch.rest-resources'
|
|||
testFixtures.useFixture()
|
||||
|
||||
configurations {
|
||||
aarch64DockerSource
|
||||
dockerSource
|
||||
aarch64OssDockerSource
|
||||
ossDockerSource
|
||||
}
|
||||
|
||||
dependencies {
|
||||
aarch64DockerSource project(path: ":distribution:archives:linux-aarch64-tar", configuration:"default")
|
||||
dockerSource project(path: ":distribution:archives:linux-tar", configuration:"default")
|
||||
aarch64OssDockerSource project(path: ":distribution:archives:oss-linux-aarch64-tar", configuration:"default")
|
||||
ossDockerSource project(path: ":distribution:archives:oss-linux-tar", configuration:"default")
|
||||
}
|
||||
|
||||
ext.expansions = { Architecture architecture, boolean oss, DockerBase base, boolean local ->
|
||||
ext.expansions = { Architecture architecture, DockerBase base, boolean local ->
|
||||
String classifier
|
||||
if (local) {
|
||||
if (architecture == Architecture.AARCH64) {
|
||||
|
@ -44,7 +39,7 @@ ext.expansions = { Architecture architecture, boolean oss, DockerBase base, bool
|
|||
classifier = "linux-\$(arch)"
|
||||
}
|
||||
|
||||
final String elasticsearch = "elasticsearch-${oss ? 'oss-' : ''}${VersionProperties.elasticsearch}-${classifier}.tar.gz"
|
||||
final String elasticsearch = "elasticsearch-oss-${VersionProperties.elasticsearch}-${classifier}.tar.gz"
|
||||
|
||||
/* Both the following Dockerfile commands put the resulting artifact at
|
||||
* the same location, regardless of classifier, so that the commands that
|
||||
|
@ -66,83 +61,62 @@ RUN curl --retry 8 -S -L \\
|
|||
'build_date' : BuildParams.buildDate,
|
||||
'git_revision' : BuildParams.gitRevision,
|
||||
'license' : 'Apache-2.0',
|
||||
'package_manager' : base == DockerBase.UBI ? 'microdnf' : 'yum',
|
||||
'package_manager' : 'yum',
|
||||
'source_elasticsearch': sourceElasticsearch,
|
||||
'docker_base' : base.name().toLowerCase(),
|
||||
'version' : VersionProperties.elasticsearch
|
||||
]
|
||||
}
|
||||
|
||||
private static String buildPath(Architecture architecture, boolean oss, DockerBase base) {
|
||||
private static String buildPath(Architecture architecture, DockerBase base) {
|
||||
return 'build/' +
|
||||
(architecture == Architecture.AARCH64 ? 'aarch64-' : '') +
|
||||
(oss ? 'oss-' : '') +
|
||||
(base == DockerBase.UBI ? 'ubi-' : '') +
|
||||
'oss-' +
|
||||
'docker'
|
||||
}
|
||||
|
||||
private static String taskName(String prefix, Architecture architecture, boolean oss, DockerBase base, String suffix) {
|
||||
private static String taskName(String prefix, Architecture architecture, DockerBase base, String suffix) {
|
||||
return prefix +
|
||||
(architecture == Architecture.AARCH64 ? 'Aarch64' : '') +
|
||||
(oss ? 'Oss' : '') +
|
||||
(base == DockerBase.UBI ? 'Ubi' : '') +
|
||||
'Oss' +
|
||||
suffix
|
||||
}
|
||||
|
||||
project.ext {
|
||||
dockerBuildContext = { Architecture architecture, boolean oss, DockerBase base, boolean local ->
|
||||
dockerBuildContext = { Architecture architecture, DockerBase base, boolean local ->
|
||||
copySpec {
|
||||
into('bin') {
|
||||
from project.projectDir.toPath().resolve("src/docker/bin")
|
||||
}
|
||||
|
||||
into('config') {
|
||||
/*
|
||||
* The OSS and default distributions have different configurations, therefore we want to allow overriding the default configuration
|
||||
* from files in the 'oss' sub-directory. We don't want the 'oss' sub-directory to appear in the final build context, however.
|
||||
*/
|
||||
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
|
||||
from(project.projectDir.toPath().resolve("src/docker/config")) {
|
||||
exclude 'oss'
|
||||
}
|
||||
if (oss) {
|
||||
// Overlay the config file
|
||||
from project.projectDir.toPath().resolve("src/docker/config/oss")
|
||||
}
|
||||
from project.projectDir.toPath().resolve("src/docker/config")
|
||||
}
|
||||
|
||||
from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) {
|
||||
expand(expansions(architecture, oss, base, local))
|
||||
expand(expansions(architecture, base, local))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void addCopyDockerContextTask(Architecture architecture, boolean oss, DockerBase base) {
|
||||
if (oss && base != DockerBase.CENTOS) {
|
||||
void addCopyDockerContextTask(Architecture architecture, DockerBase base) {
|
||||
if (base != DockerBase.CENTOS) {
|
||||
throw new GradleException("The only allowed docker base image for OSS builds is CENTOS")
|
||||
}
|
||||
|
||||
tasks.register(taskName("copy", architecture, oss, base, "DockerContext"), Sync) {
|
||||
expansions(architecture, oss, base, true).findAll { it.key != 'build_date' }.each { k, v ->
|
||||
tasks.register(taskName("copy", architecture, base, "DockerContext"), Sync) {
|
||||
expansions(architecture, base, true).findAll { it.key != 'build_date' }.each { k, v ->
|
||||
inputs.property(k, { v.toString() })
|
||||
}
|
||||
into buildPath(architecture, oss, base)
|
||||
into buildPath(architecture, base)
|
||||
|
||||
with dockerBuildContext(architecture, oss, base, true)
|
||||
with dockerBuildContext(architecture, base, true)
|
||||
|
||||
if (architecture == Architecture.AARCH64) {
|
||||
if (oss) {
|
||||
from configurations.aarch64OssDockerSource
|
||||
} else {
|
||||
from configurations.aarch64DockerSource
|
||||
}
|
||||
from configurations.aarch64OssDockerSource
|
||||
} else {
|
||||
if (oss) {
|
||||
from configurations.ossDockerSource
|
||||
} else {
|
||||
from configurations.dockerSource
|
||||
}
|
||||
from configurations.ossDockerSource
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -157,31 +131,24 @@ def createAndSetWritable(Object... locations) {
|
|||
|
||||
elasticsearch_distributions {
|
||||
Architecture.values().each { eachArchitecture ->
|
||||
Flavor.values().each { distroFlavor ->
|
||||
"docker_$distroFlavor${ eachArchitecture == Architecture.AARCH64 ? '_aarch64' : '' }" {
|
||||
architecture = eachArchitecture
|
||||
flavor = distroFlavor
|
||||
type = 'docker'
|
||||
version = VersionProperties.getElasticsearch()
|
||||
failIfUnavailable = false // This ensures we don't attempt to build images if docker is unavailable
|
||||
}
|
||||
"docker${ eachArchitecture == Architecture.AARCH64 ? '_aarch64' : '' }" {
|
||||
architecture = eachArchitecture
|
||||
type = 'docker'
|
||||
version = VersionProperties.getElasticsearch()
|
||||
failIfUnavailable = false // This ensures we don't attempt to build images if docker is unavailable
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tasks.named("preProcessFixture").configure {
|
||||
dependsOn elasticsearch_distributions.docker_default, elasticsearch_distributions.docker_oss
|
||||
dependsOn elasticsearch_distributions.docker
|
||||
doLast {
|
||||
// tests expect to have an empty repo
|
||||
project.delete(
|
||||
"${buildDir}/repo",
|
||||
"${buildDir}/oss-repo"
|
||||
)
|
||||
createAndSetWritable(
|
||||
"${buildDir}/repo",
|
||||
"${buildDir}/oss-repo",
|
||||
"${buildDir}/logs/default-1",
|
||||
"${buildDir}/logs/default-2",
|
||||
"${buildDir}/logs/oss-1",
|
||||
"${buildDir}/logs/oss-2"
|
||||
)
|
||||
|
@ -198,34 +165,24 @@ tasks.named("check").configure {
|
|||
dependsOn "integTest"
|
||||
}
|
||||
|
||||
void addBuildDockerImage(Architecture architecture, boolean oss, DockerBase base) {
|
||||
if (oss && base != DockerBase.CENTOS) {
|
||||
void addBuildDockerImage(Architecture architecture, DockerBase base) {
|
||||
if (base != DockerBase.CENTOS) {
|
||||
throw new GradleException("The only allowed docker base image for OSS builds is CENTOS")
|
||||
}
|
||||
|
||||
final TaskProvider<DockerBuildTask> buildDockerImageTask =
|
||||
tasks.register(taskName("build", architecture, oss, base, "DockerImage"), DockerBuildTask) {
|
||||
tasks.register(taskName("build", architecture, base, "DockerImage"), DockerBuildTask) {
|
||||
onlyIf { Architecture.current() == architecture }
|
||||
TaskProvider<Sync> copyContextTask = tasks.named(taskName("copy", architecture, oss, base, "DockerContext"))
|
||||
TaskProvider<Sync> copyContextTask = tasks.named(taskName("copy", architecture, base, "DockerContext"))
|
||||
dependsOn(copyContextTask)
|
||||
dockerContext.fileProvider(copyContextTask.map { it.destinationDir })
|
||||
baseImages = [ base.getImage() ]
|
||||
|
||||
String version = VersionProperties.elasticsearch
|
||||
if (oss) {
|
||||
tags = [
|
||||
"docker.elastic.co/elasticsearch/elasticsearch-oss:${version}",
|
||||
"elasticsearch-oss:test"
|
||||
]
|
||||
} else {
|
||||
String suffix = base == DockerBase.UBI ? '-ubi8' : ''
|
||||
tags = [
|
||||
"elasticsearch${suffix}:${version}",
|
||||
"docker.elastic.co/elasticsearch/elasticsearch${suffix}:${version}",
|
||||
"docker.elastic.co/elasticsearch/elasticsearch-full${suffix}:${version}",
|
||||
"elasticsearch${suffix}:test",
|
||||
]
|
||||
}
|
||||
}
|
||||
tasks.named("assemble").configure {
|
||||
dependsOn(buildDockerImageTask)
|
||||
|
@ -233,15 +190,11 @@ void addBuildDockerImage(Architecture architecture, boolean oss, DockerBase base
|
|||
}
|
||||
|
||||
for (final Architecture architecture : Architecture.values()) {
|
||||
// We only create Docker images for the OSS distribution on CentOS.
|
||||
for (final DockerBase base : DockerBase.values()) {
|
||||
for (final boolean oss : [false, true]) {
|
||||
if (oss && base != DockerBase.CENTOS) {
|
||||
// We only create Docker images for the OSS distribution on CentOS.
|
||||
// Other bases only use the default distribution.
|
||||
continue
|
||||
}
|
||||
addCopyDockerContextTask(architecture, oss, base)
|
||||
addBuildDockerImage(architecture, oss, base)
|
||||
if (base == DockerBase.CENTOS) {
|
||||
addCopyDockerContextTask(architecture, base)
|
||||
addBuildDockerImage(architecture, base)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -262,16 +215,15 @@ subprojects { Project subProject ->
|
|||
apply plugin: 'distribution'
|
||||
|
||||
final Architecture architecture = subProject.name.contains('aarch64-') ? Architecture.AARCH64 : Architecture.X64
|
||||
final boolean oss = subProject.name.contains('oss-')
|
||||
final DockerBase base = subProject.name.contains('ubi-') ? DockerBase.UBI : DockerBase.CENTOS
|
||||
final DockerBase base = DockerBase.CENTOS
|
||||
|
||||
final String arch = architecture == Architecture.AARCH64 ? '-aarch64' : ''
|
||||
final String suffix = oss ? '-oss' : base == DockerBase.UBI ? '-ubi8' : ''
|
||||
final String extension = base == DockerBase.UBI ? 'ubi.tar' : 'docker.tar'
|
||||
final String suffix = '-oss'
|
||||
final String extension = 'docker.tar'
|
||||
final String artifactName = "elasticsearch${arch}${suffix}_test"
|
||||
|
||||
final String exportTaskName = taskName("export", architecture, oss, base, "DockerImage")
|
||||
final String buildTaskName = taskName("build", architecture, oss, base, "DockerImage")
|
||||
final String exportTaskName = taskName("export", architecture, base, "DockerImage")
|
||||
final String buildTaskName = taskName("build", architecture, base, "DockerImage")
|
||||
final String tarFile = "${parent.projectDir}/build/${artifactName}_${VersionProperties.elasticsearch}.${extension}"
|
||||
|
||||
tasks.register(exportTaskName, LoggedExec) {
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// export is done in the parent project.
|
|
@ -1,14 +0,0 @@
|
|||
import org.elasticsearch.gradle.DockerBase
|
||||
|
||||
apply plugin: 'base'
|
||||
|
||||
tasks.register("buildDockerBuildContext", Tar) {
|
||||
archiveExtension = 'tar.gz'
|
||||
compression = Compression.GZIP
|
||||
archiveClassifier = "docker-build-context"
|
||||
archiveBaseName = "elasticsearch"
|
||||
// Non-local builds don't need to specify an architecture.
|
||||
with dockerBuildContext(null, false, DockerBase.CENTOS, false)
|
||||
}
|
||||
|
||||
tasks.named("assemble").configure {dependsOn "buildDockerBuildContext"}
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// export is done in the parent project.
|
|
@ -8,7 +8,7 @@ tasks.register("buildOssDockerBuildContext", Tar) {
|
|||
archiveClassifier = "docker-build-context"
|
||||
archiveBaseName = "elasticsearch-oss"
|
||||
// Non-local builds don't need to specify an architecture.
|
||||
with dockerBuildContext(null, true, DockerBase.CENTOS, false)
|
||||
with dockerBuildContext(null, DockerBase.CENTOS, false)
|
||||
}
|
||||
|
||||
tasks.named("assemble").configure { dependsOn "buildOssDockerBuildContext" }
|
||||
|
|
|
@ -20,10 +20,6 @@
|
|||
################################################################################
|
||||
|
||||
FROM ${base_image} AS builder
|
||||
<% if (docker_base == 'ubi') { %>
|
||||
# Install required packages to extract the Elasticsearch distribution
|
||||
RUN ${package_manager} install -y tar gzip
|
||||
<% } %>
|
||||
# `tini` is a tiny but valid init for containers. This is used to cleanly
|
||||
# control how ES and any child processes are shut down.
|
||||
#
|
||||
|
@ -70,7 +66,7 @@ ENV ELASTIC_CONTAINER true
|
|||
RUN for iter in {1..10}; do \\
|
||||
${package_manager} update --setopt=tsflags=nodocs -y && \\
|
||||
${package_manager} install --setopt=tsflags=nodocs -y \\
|
||||
nc shadow-utils zip unzip <%= docker_base == 'ubi' ? 'findutils procps-ng' : '' %> && \\
|
||||
nc shadow-utils zip unzip && \\
|
||||
${package_manager} clean all && exit_code=0 && break || exit_code=\$? && echo "${package_manager} error: retry \$iter in 10s" && \\
|
||||
sleep 10; \\
|
||||
done; \\
|
||||
|
@ -124,18 +120,6 @@ LABEL org.label-schema.build-date="${build_date}" \\
|
|||
org.opencontainers.image.url="https://www.elastic.co/products/elasticsearch" \\
|
||||
org.opencontainers.image.vendor="Elastic" \\
|
||||
org.opencontainers.image.version="${version}"
|
||||
<% if (docker_base == 'ubi') { %>
|
||||
LABEL name="Elasticsearch" \\
|
||||
maintainer="infra@elastic.co" \\
|
||||
vendor="Elastic" \\
|
||||
version="${version}" \\
|
||||
release="1" \\
|
||||
summary="Elasticsearch" \\
|
||||
description="You know, for search."
|
||||
|
||||
RUN mkdir /licenses && \\
|
||||
cp LICENSE.txt /licenses/LICENSE
|
||||
<% } %>
|
||||
|
||||
ENTRYPOINT ["/tini", "--", "/usr/local/bin/docker-entrypoint.sh"]
|
||||
# Dummy overridable parameter parsed by entrypoint
|
||||
|
|
|
@ -21,12 +21,8 @@ package org.elasticsearch.docker.test;
|
|||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.common.CharArrays;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -35,11 +31,8 @@ import org.junit.BeforeClass;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.CharBuffer;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
|
||||
public class DockerYmlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
|
@ -58,13 +51,12 @@ public class DockerYmlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
|||
|
||||
@Override
|
||||
protected String getTestRestCluster() {
|
||||
String distribution = getDistribution();
|
||||
return new StringBuilder()
|
||||
.append("localhost:")
|
||||
.append(getProperty("test.fixtures.elasticsearch-" + distribution + "-1.tcp.9200"))
|
||||
.append(getProperty("test.fixtures.elasticsearch-oss-1.tcp.9200"))
|
||||
.append(",")
|
||||
.append("localhost:")
|
||||
.append(getProperty("test.fixtures.elasticsearch-" + distribution + "-2.tcp.9200"))
|
||||
.append(getProperty("test.fixtures.elasticsearch-oss-2.tcp.9200"))
|
||||
.toString();
|
||||
}
|
||||
|
||||
|
@ -73,18 +65,6 @@ public class DockerYmlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
private String getDistribution() {
|
||||
String distribution = System.getProperty("tests.distribution", "default");
|
||||
if (distribution.equals("oss") == false && distribution.equals("default") == false) {
|
||||
throw new IllegalArgumentException("supported values for tests.distribution are oss or default but it was " + distribution);
|
||||
}
|
||||
return distribution;
|
||||
}
|
||||
|
||||
private boolean isOss() {
|
||||
return getDistribution().equals("oss");
|
||||
}
|
||||
|
||||
private String getProperty(String key) {
|
||||
String value = System.getProperty(key);
|
||||
if (value == null) {
|
||||
|
@ -124,40 +104,11 @@ public class DockerYmlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
|||
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
if (isOss()) {
|
||||
return super.restClientSettings();
|
||||
}
|
||||
String token = basicAuthHeaderValue(USER, new SecureString(PASS.toCharArray()));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
.put(ESRestTestCase.TRUSTSTORE_PATH, keyStore)
|
||||
.put(ESRestTestCase.TRUSTSTORE_PASSWORD, KEYSTORE_PASS)
|
||||
.build();
|
||||
return super.restClientSettings();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getProtocol() {
|
||||
if (isOss()) {
|
||||
return "http";
|
||||
}
|
||||
return "https";
|
||||
}
|
||||
|
||||
private static String basicAuthHeaderValue(String username, SecureString passwd) {
|
||||
CharBuffer chars = CharBuffer.allocate(username.length() + passwd.length() + 1);
|
||||
byte[] charBytes = null;
|
||||
try {
|
||||
chars.put(username).put(':').put(passwd.getChars());
|
||||
charBytes = CharArrays.toUtf8Bytes(chars.array());
|
||||
|
||||
//TODO we still have passwords in Strings in headers. Maybe we can look into using a CharSequence?
|
||||
String basicToken = Base64.getEncoder().encodeToString(charBytes);
|
||||
return "Basic " + basicToken;
|
||||
} finally {
|
||||
Arrays.fill(chars.array(), (char) 0);
|
||||
if (charBytes != null) {
|
||||
Arrays.fill(charBytes, (byte) 0);
|
||||
}
|
||||
}
|
||||
return "http";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// export is done in the parent project.
|
|
@ -1,13 +0,0 @@
|
|||
import org.elasticsearch.gradle.DockerBase
|
||||
|
||||
apply plugin: 'base'
|
||||
|
||||
task buildUbiDockerBuildContext(type: Tar) {
|
||||
archiveExtension = 'tar.gz'
|
||||
compression = Compression.GZIP
|
||||
archiveClassifier = "docker-build-context"
|
||||
archiveBaseName = "elasticsearch-ubi8"
|
||||
with dockerBuildContext(null, false, DockerBase.UBI, false)
|
||||
}
|
||||
|
||||
assemble.dependsOn buildUbiDockerBuildContext
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// export is done in the parent project.
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -55,23 +55,23 @@ plugins {
|
|||
id "nebula.ospackage-base" version "8.3.0"
|
||||
}
|
||||
|
||||
void addProcessFilesTask(String type, boolean oss, boolean jdk) {
|
||||
String packagingFiles = "build/packaging/${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}"
|
||||
void addProcessFilesTask(String type, boolean jdk) {
|
||||
String packagingFiles = "build/packaging/oss-${jdk ? '' : 'no-jdk-'}${type}"
|
||||
|
||||
String taskName = "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files"
|
||||
String taskName = "processOss'${jdk ? '' : 'NoJdk'}${type.capitalize()}Files"
|
||||
tasks.register(taskName, Copy) {
|
||||
into packagingFiles
|
||||
|
||||
with copySpec {
|
||||
from 'src/common'
|
||||
from "src/${type}"
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(type, oss, jdk))
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(type, jdk))
|
||||
}
|
||||
|
||||
into('etc/elasticsearch') {
|
||||
with configFiles(type, oss, jdk)
|
||||
with configFiles(type, jdk)
|
||||
}
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(type, oss, jdk))
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(type, jdk))
|
||||
|
||||
doLast {
|
||||
// create empty dirs, we set the permissions when configuring the packages
|
||||
|
@ -86,25 +86,21 @@ void addProcessFilesTask(String type, boolean oss, boolean jdk) {
|
|||
}
|
||||
}
|
||||
|
||||
addProcessFilesTask('deb', true, true)
|
||||
addProcessFilesTask('deb', true, false)
|
||||
addProcessFilesTask('deb', false, true)
|
||||
addProcessFilesTask('deb', false, false)
|
||||
addProcessFilesTask('rpm', true, true)
|
||||
addProcessFilesTask('rpm', true, false)
|
||||
addProcessFilesTask('rpm', false, true)
|
||||
addProcessFilesTask('rpm', false, false)
|
||||
addProcessFilesTask('deb', true)
|
||||
addProcessFilesTask('deb', false)
|
||||
addProcessFilesTask('rpm', true)
|
||||
addProcessFilesTask('rpm', false)
|
||||
|
||||
// Common configuration that is package dependent. This can't go in ospackage
|
||||
// since we have different templated files that need to be consumed, but the structure
|
||||
// is the same
|
||||
Closure commonPackageConfig(String type, boolean oss, boolean jdk, String architecture) {
|
||||
Closure commonPackageConfig(String type, boolean jdk, String architecture) {
|
||||
return {
|
||||
onlyIf {
|
||||
OS.current().equals(OS.WINDOWS) == false
|
||||
}
|
||||
dependsOn "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files"
|
||||
packageName "elasticsearch${oss ? '-oss' : ''}"
|
||||
dependsOn "processOss'${jdk ? '' : 'NoJdk'}${type.capitalize()}Files"
|
||||
packageName "elasticsearch-oss"
|
||||
if (type == 'deb') {
|
||||
if (architecture == 'x64') {
|
||||
arch('amd64')
|
||||
|
@ -123,13 +119,13 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit
|
|||
}
|
||||
// Follow elasticsearch's file naming convention
|
||||
String jdkString = jdk ? "" : "no-jdk-"
|
||||
String prefix = "${architecture == 'aarch64' ? 'aarch64-' : ''}${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}"
|
||||
String prefix = "${architecture == 'aarch64' ? 'aarch64-' : ''}oss-${jdk ? '' : 'no-jdk-'}${type}"
|
||||
destinationDirectory = file("${prefix}/build/distributions")
|
||||
|
||||
// SystemPackagingTask overrides default archive task convention mappings, but doesn't provide a setter so we have to override the convention mapping itself
|
||||
conventionMapping.archiveFile = { objects.fileProperty().fileValue(file("${destinationDirectory.get()}/${packageName}-${project.version}-${jdkString}${archString}.${type}")) }
|
||||
|
||||
String packagingFiles = "build/packaging/${oss ? 'oss-' : ''}${jdk ? '' : 'no-jdk-'}${type}"
|
||||
String packagingFiles = "build/packaging/oss-${jdk ? '' : 'no-jdk-'}${type}"
|
||||
|
||||
String scripts = "${packagingFiles}/scripts"
|
||||
preInstall file("${scripts}/preinst")
|
||||
|
@ -144,17 +140,17 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit
|
|||
// specify it again explicitly for copying common files
|
||||
into('/usr/share/elasticsearch') {
|
||||
into('bin') {
|
||||
with binFiles(type, oss, jdk)
|
||||
with binFiles(type, jdk)
|
||||
}
|
||||
from(rootProject.projectDir) {
|
||||
include 'README.asciidoc'
|
||||
fileMode 0644
|
||||
}
|
||||
into('lib') {
|
||||
with libFiles(oss)
|
||||
with libFiles()
|
||||
}
|
||||
into('modules') {
|
||||
with modulesFiles(oss, 'linux-' + ((architecture == 'x64') ? 'x86_64' : architecture))
|
||||
with modulesFiles('linux-' + ((architecture == 'x64') ? 'x86_64' : architecture))
|
||||
}
|
||||
if (jdk) {
|
||||
into('jdk') {
|
||||
|
@ -200,12 +196,6 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit
|
|||
configurationFile '/etc/elasticsearch/elasticsearch.yml'
|
||||
configurationFile '/etc/elasticsearch/jvm.options'
|
||||
configurationFile '/etc/elasticsearch/log4j2.properties'
|
||||
if (oss == false) {
|
||||
configurationFile '/etc/elasticsearch/role_mapping.yml'
|
||||
configurationFile '/etc/elasticsearch/roles.yml'
|
||||
configurationFile '/etc/elasticsearch/users'
|
||||
configurationFile '/etc/elasticsearch/users_roles'
|
||||
}
|
||||
from("${packagingFiles}") {
|
||||
dirMode 02750
|
||||
into('/etc')
|
||||
|
@ -224,7 +214,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit
|
|||
createDirectoryEntry true
|
||||
fileType CONFIG | NOREPLACE
|
||||
}
|
||||
String envFile = expansionsForDistribution(type, oss, jdk)['path.env']
|
||||
String envFile = expansionsForDistribution(type, jdk)['path.env']
|
||||
configurationFile envFile
|
||||
into(new File(envFile).getParent()) {
|
||||
fileType CONFIG | NOREPLACE
|
||||
|
@ -279,11 +269,8 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk, String archit
|
|||
copyEmptyDir('/var/lib/elasticsearch', 'elasticsearch', 'elasticsearch', 02750)
|
||||
copyEmptyDir('/usr/share/elasticsearch/plugins', 'root', 'root', 0755)
|
||||
|
||||
// the oss package conflicts with the default distribution and vice versa
|
||||
conflicts('elasticsearch' + (oss ? '' : '-oss'))
|
||||
|
||||
into '/usr/share/elasticsearch'
|
||||
with noticeFile(oss, jdk)
|
||||
with noticeFile(jdk)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -321,9 +308,9 @@ ospackage {
|
|||
into '/usr/share/elasticsearch'
|
||||
}
|
||||
|
||||
Closure commonDebConfig(boolean oss, boolean jdk, String architecture) {
|
||||
Closure commonDebConfig(boolean jdk, String architecture) {
|
||||
return {
|
||||
configure(commonPackageConfig('deb', oss, jdk, architecture))
|
||||
configure(commonPackageConfig('deb', jdk, architecture))
|
||||
|
||||
// jdeb does not provide a way to set the License control attribute, and ospackage
|
||||
// silently ignores setting it. Instead, we set the license as "custom field"
|
||||
|
@ -340,40 +327,26 @@ Closure commonDebConfig(boolean oss, boolean jdk, String architecture) {
|
|||
|
||||
into('/usr/share/lintian/overrides') {
|
||||
from('src/deb/lintian/elasticsearch')
|
||||
if (oss) {
|
||||
rename('elasticsearch', 'elasticsearch-oss')
|
||||
}
|
||||
rename('elasticsearch', 'elasticsearch-oss')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('buildAarch64Deb', Deb) {
|
||||
configure(commonDebConfig(false, true, 'aarch64'))
|
||||
}
|
||||
|
||||
tasks.register('buildDeb', Deb) {
|
||||
configure(commonDebConfig(false, true, 'x64'))
|
||||
}
|
||||
|
||||
tasks.register('buildAarch64OssDeb', Deb) {
|
||||
configure(commonDebConfig(true, true, 'aarch64'))
|
||||
configure(commonDebConfig(true, 'aarch64'))
|
||||
}
|
||||
|
||||
tasks.register('buildOssDeb', Deb) {
|
||||
configure(commonDebConfig(true, true, 'x64'))
|
||||
}
|
||||
|
||||
tasks.register('buildNoJdkDeb', Deb) {
|
||||
configure(commonDebConfig(false, false, 'x64'))
|
||||
configure(commonDebConfig(true, 'x64'))
|
||||
}
|
||||
|
||||
tasks.register('buildOssNoJdkDeb', Deb) {
|
||||
configure(commonDebConfig(true, false, 'x64'))
|
||||
configure(commonDebConfig(true, 'x64'))
|
||||
}
|
||||
|
||||
Closure commonRpmConfig(boolean oss, boolean jdk, String architecture) {
|
||||
Closure commonRpmConfig(boolean jdk, String architecture) {
|
||||
return {
|
||||
configure(commonPackageConfig('rpm', oss, jdk, architecture))
|
||||
configure(commonPackageConfig('rpm', jdk, architecture))
|
||||
|
||||
license 'ASL 2.0'
|
||||
|
||||
|
@ -396,28 +369,16 @@ Closure commonRpmConfig(boolean oss, boolean jdk, String architecture) {
|
|||
}
|
||||
}
|
||||
|
||||
tasks.register('buildAarch64Rpm', Rpm) {
|
||||
configure(commonRpmConfig(false, true, 'aarch64'))
|
||||
}
|
||||
|
||||
tasks.register('buildRpm', Rpm) {
|
||||
configure(commonRpmConfig(false, true, 'x64'))
|
||||
}
|
||||
|
||||
tasks.register('buildAarch64OssRpm', Rpm) {
|
||||
configure(commonRpmConfig(true, true, 'aarch64'))
|
||||
configure(commonRpmConfig(true, 'aarch64'))
|
||||
}
|
||||
|
||||
tasks.register('buildOssRpm', Rpm) {
|
||||
configure(commonRpmConfig(true, true, 'x64'))
|
||||
}
|
||||
|
||||
tasks.register('buildNoJdkRpm', Rpm) {
|
||||
configure(commonRpmConfig(false, false, 'x64'))
|
||||
configure(commonRpmConfig(true, 'x64'))
|
||||
}
|
||||
|
||||
tasks.register('buildOssNoJdkRpm', Rpm) {
|
||||
configure(commonRpmConfig(true, false, 'x64'))
|
||||
configure(commonRpmConfig(true, 'x64'))
|
||||
}
|
||||
|
||||
Closure dpkgExists = { it -> new File('/bin/dpkg-deb').exists() || new File('/usr/bin/dpkg-deb').exists() || new File('/usr/local/bin/dpkg-deb').exists() }
|
||||
|
@ -494,14 +455,7 @@ subprojects {
|
|||
Path copyrightPath
|
||||
String expectedLicense
|
||||
String licenseFilename
|
||||
if (project.name.contains('oss-')) {
|
||||
copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch-oss/copyright")
|
||||
}
|
||||
// TODO - remove this block and only check for the OSS distribution
|
||||
// https://github.com/opendistro-for-elasticsearch/search/issues/50
|
||||
else {
|
||||
copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch/copyright")
|
||||
}
|
||||
copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch-oss/copyright")
|
||||
expectedLicense = "ASL-2.0"
|
||||
licenseFilename = "APACHE-LICENSE-2.0.txt"
|
||||
final List<String> header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/",
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
Binary file not shown.
Before Width: | Height: | Size: 1.8 KiB |
|
@ -1,2 +0,0 @@
|
|||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
Binary file not shown.
Before Width: | Height: | Size: 1.8 KiB |
|
@ -62,7 +62,6 @@ if [[ $DAEMONIZE = false ]]; then
|
|||
$ES_JAVA_OPTS \
|
||||
-Des.path.home="$ES_HOME" \
|
||||
-Des.path.conf="$ES_PATH_CONF" \
|
||||
-Des.distribution.flavor="$ES_DISTRIBUTION_FLAVOR" \
|
||||
-Des.distribution.type="$ES_DISTRIBUTION_TYPE" \
|
||||
-Des.bundled_jdk="$ES_BUNDLED_JDK" \
|
||||
-cp "$ES_CLASSPATH" \
|
||||
|
@ -75,7 +74,6 @@ else
|
|||
$ES_JAVA_OPTS \
|
||||
-Des.path.home="$ES_HOME" \
|
||||
-Des.path.conf="$ES_PATH_CONF" \
|
||||
-Des.distribution.flavor="$ES_DISTRIBUTION_FLAVOR" \
|
||||
-Des.distribution.type="$ES_DISTRIBUTION_TYPE" \
|
||||
-Des.bundled_jdk="$ES_BUNDLED_JDK" \
|
||||
-cp "$ES_CLASSPATH" \
|
||||
|
|
|
@ -26,7 +26,6 @@ exec \
|
|||
$ES_JAVA_OPTS \
|
||||
-Des.path.home="$ES_HOME" \
|
||||
-Des.path.conf="$ES_PATH_CONF" \
|
||||
-Des.distribution.flavor="$ES_DISTRIBUTION_FLAVOR" \
|
||||
-Des.distribution.type="$ES_DISTRIBUTION_TYPE" \
|
||||
-cp "$ES_CLASSPATH" \
|
||||
"$ES_MAIN_CLASS" \
|
||||
|
|
|
@ -20,7 +20,6 @@ set ES_JAVA_OPTS=-Xms4m -Xmx64m -XX:+UseSerialGC %ES_JAVA_OPTS%
|
|||
%ES_JAVA_OPTS% ^
|
||||
-Des.path.home="%ES_HOME%" ^
|
||||
-Des.path.conf="%ES_PATH_CONF%" ^
|
||||
-Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%" ^
|
||||
-Des.distribution.type="%ES_DISTRIBUTION_TYPE%" ^
|
||||
-cp "%ES_CLASSPATH%" ^
|
||||
"%ES_MAIN_CLASS%" ^
|
||||
|
|
|
@ -88,7 +88,6 @@ fi
|
|||
# now make ES_PATH_CONF absolute
|
||||
ES_PATH_CONF=`cd "$ES_PATH_CONF"; pwd`
|
||||
|
||||
ES_DISTRIBUTION_FLAVOR=${es.distribution.flavor}
|
||||
ES_DISTRIBUTION_TYPE=${es.distribution.type}
|
||||
ES_BUNDLED_JDK=${es.bundled_jdk}
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ if not defined ES_PATH_CONF (
|
|||
rem now make ES_PATH_CONF absolute
|
||||
for %%I in ("%ES_PATH_CONF%..") do set ES_PATH_CONF=%%~dpfI
|
||||
|
||||
set ES_DISTRIBUTION_FLAVOR=${es.distribution.flavor}
|
||||
set ES_DISTRIBUTION_TYPE=${es.distribution.type}
|
||||
set ES_BUNDLED_JDK=${es.bundled_jdk}
|
||||
|
||||
|
|
|
@ -194,7 +194,7 @@ if "%JVM_SS%" == "" (
|
|||
set OTHER_JAVA_OPTS=%OTHER_JAVA_OPTS:"=%
|
||||
set OTHER_JAVA_OPTS=%OTHER_JAVA_OPTS:~1%
|
||||
|
||||
set ES_PARAMS=-Delasticsearch;-Des.path.home="%ES_HOME%";-Des.path.conf="%ES_PATH_CONF%";-Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%";-Des.distribution.type="%ES_DISTRIBUTION_TYPE%";-Des.bundled_jdk="%ES_BUNDLED_JDK%"
|
||||
set ES_PARAMS=-Delasticsearch;-Des.path.home="%ES_HOME%";-Des.path.conf="%ES_PATH_CONF%";-Des.distribution.type="%ES_DISTRIBUTION_TYPE%";-Des.bundled_jdk="%ES_BUNDLED_JDK%"
|
||||
|
||||
if "%ES_START_TYPE%" == "" set ES_START_TYPE=manual
|
||||
if "%ES_STOP_TIMEOUT%" == "" set ES_STOP_TIMEOUT=0
|
||||
|
|
|
@ -99,7 +99,6 @@ SET KEYSTORE_PASSWORD=!KEYSTORE_PASSWORD:^\=^^^\!
|
|||
|
||||
ECHO.!KEYSTORE_PASSWORD!| %JAVA% %ES_JAVA_OPTS% -Delasticsearch ^
|
||||
-Des.path.home="%ES_HOME%" -Des.path.conf="%ES_PATH_CONF%" ^
|
||||
-Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%" ^
|
||||
-Des.distribution.type="%ES_DISTRIBUTION_TYPE%" ^
|
||||
-Des.bundled_jdk="%ES_BUNDLED_JDK%" ^
|
||||
-cp "%ES_CLASSPATH%" "org.elasticsearch.bootstrap.Elasticsearch" !newparams!
|
||||
|
|
|
@ -29,7 +29,6 @@ apply plugin:'elasticsearch.internal-distribution-download'
|
|||
|
||||
elasticsearch_distributions {
|
||||
local {
|
||||
flavor = 'default'
|
||||
type = 'archive'
|
||||
architecture = Architecture.current()
|
||||
}
|
||||
|
|
|
@ -339,12 +339,6 @@ public class ArchiveTests extends PackagingTestCase {
|
|||
final Result result = sh.run(bin.shardTool + " -h");
|
||||
assertThat(result.stdout, containsString("A CLI tool to remove corrupted parts of unrecoverable shards"));
|
||||
};
|
||||
|
||||
// TODO: this should be checked on all distributions
|
||||
if (distribution().isDefault()) {
|
||||
Platforms.onLinux(action);
|
||||
Platforms.onWindows(action);
|
||||
}
|
||||
}
|
||||
|
||||
public void test92ElasticsearchNodeCliPackaging() throws Exception {
|
||||
|
@ -354,12 +348,6 @@ public class ArchiveTests extends PackagingTestCase {
|
|||
final Result result = sh.run(bin.nodeTool + " -h");
|
||||
assertThat(result.stdout, containsString("A CLI tool to do unsafe cluster and index manipulations on current node"));
|
||||
};
|
||||
|
||||
// TODO: this should be checked on all distributions
|
||||
if (distribution().isDefault()) {
|
||||
Platforms.onLinux(action);
|
||||
Platforms.onWindows(action);
|
||||
}
|
||||
}
|
||||
|
||||
public void test93ElasticsearchNodeCustomDataPathAndNotEsHomeWorkDir() throws Exception {
|
||||
|
|
|
@ -52,12 +52,5 @@ public class DebMetadataTests extends PackagingTestCase {
|
|||
final Shell.Result result = sh.run("dpkg -I " + getDistributionFile(distribution()));
|
||||
|
||||
TestCase.assertTrue(Pattern.compile("(?m)^ Depends:.*bash.*").matcher(result.stdout).find());
|
||||
|
||||
String oppositePackageName = "elasticsearch";
|
||||
if (distribution().isDefault()) {
|
||||
oppositePackageName += "-oss";
|
||||
}
|
||||
|
||||
TestCase.assertTrue(Pattern.compile("(?m)^ Conflicts: " + oppositePackageName + "$").matcher(result.stdout).find());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,10 +32,8 @@ import static org.elasticsearch.packaging.util.Packages.SYSVINIT_SCRIPT;
|
|||
import static org.elasticsearch.packaging.util.Packages.assertInstalled;
|
||||
import static org.elasticsearch.packaging.util.Packages.assertRemoved;
|
||||
import static org.elasticsearch.packaging.util.Packages.installPackage;
|
||||
import static org.elasticsearch.packaging.util.Packages.packageStatus;
|
||||
import static org.elasticsearch.packaging.util.Packages.remove;
|
||||
import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
public class DebPreservationTests extends PackagingTestCase {
|
||||
|
@ -67,43 +65,14 @@ public class DebPreservationTests extends PackagingTestCase {
|
|||
installation.config(Paths.get("jvm.options.d", "heap.options"))
|
||||
);
|
||||
|
||||
if (distribution().isDefault()) {
|
||||
assertPathsExist(
|
||||
installation.config,
|
||||
installation.config("role_mapping.yml"),
|
||||
installation.config("roles.yml"),
|
||||
installation.config("users"),
|
||||
installation.config("users_roles")
|
||||
);
|
||||
}
|
||||
|
||||
// keystore was removed
|
||||
|
||||
assertPathsDoNotExist(installation.config("elasticsearch.keystore"), installation.config(".elasticsearch.keystore.initial_md5sum"));
|
||||
|
||||
// doc files were removed
|
||||
|
||||
assertPathsDoNotExist(
|
||||
Paths.get("/usr/share/doc/" + distribution().flavor.name),
|
||||
Paths.get("/usr/share/doc/" + distribution().flavor.name + "/copyright")
|
||||
);
|
||||
|
||||
// sysvinit service file was not removed
|
||||
assertThat(SYSVINIT_SCRIPT, fileExists());
|
||||
|
||||
// defaults file was not removed
|
||||
assertThat(installation.envFile, fileExists());
|
||||
}
|
||||
|
||||
public void test30Purge() throws Exception {
|
||||
append(installation.config(Paths.get("jvm.options.d", "heap.options")), "# foo");
|
||||
|
||||
sh.run("dpkg --purge " + distribution().flavor.name);
|
||||
|
||||
assertRemoved(distribution());
|
||||
|
||||
assertPathsDoNotExist(installation.config, installation.envFile, SYSVINIT_SCRIPT);
|
||||
|
||||
assertThat(packageStatus(distribution()).exitCode, is(1));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
package org.elasticsearch.packaging.test;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.apache.http.client.fluent.Request;
|
||||
import org.elasticsearch.packaging.util.Distribution;
|
||||
import org.elasticsearch.packaging.util.Installation;
|
||||
import org.elasticsearch.packaging.util.Platforms;
|
||||
import org.elasticsearch.packaging.util.ServerUtils;
|
||||
|
@ -59,10 +57,8 @@ import static org.elasticsearch.packaging.util.Docker.waitForElasticsearch;
|
|||
import static org.elasticsearch.packaging.util.FileMatcher.p600;
|
||||
import static org.elasticsearch.packaging.util.FileMatcher.p644;
|
||||
import static org.elasticsearch.packaging.util.FileMatcher.p660;
|
||||
import static org.elasticsearch.packaging.util.FileMatcher.p775;
|
||||
import static org.elasticsearch.packaging.util.FileUtils.append;
|
||||
import static org.elasticsearch.packaging.util.FileUtils.rm;
|
||||
import static org.hamcrest.Matchers.arrayContaining;
|
||||
import static org.hamcrest.Matchers.arrayWithSize;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.emptyString;
|
||||
|
@ -253,85 +249,6 @@ public class DockerTests extends PackagingTestCase {
|
|||
waitForElasticsearch(installation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the elastic user's password can be configured via a file and the ELASTIC_PASSWORD_FILE environment variable.
|
||||
*/
|
||||
public void test080ConfigurePasswordThroughEnvironmentVariableFile() throws Exception {
|
||||
// Test relies on configuring security
|
||||
assumeTrue(distribution.isDefault());
|
||||
|
||||
final String xpackPassword = "hunter2";
|
||||
final String passwordFilename = "password.txt";
|
||||
|
||||
append(tempDir.resolve(passwordFilename), xpackPassword + "\n");
|
||||
|
||||
Map<String, String> envVars = new HashMap<>();
|
||||
envVars.put("ELASTIC_PASSWORD_FILE", "/run/secrets/" + passwordFilename);
|
||||
|
||||
// File permissions need to be secured in order for the ES wrapper to accept
|
||||
// them for populating env var values
|
||||
Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p600);
|
||||
// But when running in Vagrant, also ensure ES can actually access the file
|
||||
chownWithPrivilegeEscalation(tempDir.resolve(passwordFilename), "1000:0");
|
||||
|
||||
final Map<Path, Path> volumes = singletonMap(tempDir, Paths.get("/run/secrets"));
|
||||
|
||||
// Restart the container
|
||||
runContainer(distribution(), volumes, envVars);
|
||||
|
||||
// If we configured security correctly, then this call will only work if we specify the correct credentials.
|
||||
try {
|
||||
waitForElasticsearch("green", null, installation, "elastic", "hunter2");
|
||||
} catch (Exception e) {
|
||||
throw new AssertionError(
|
||||
"Failed to check whether Elasticsearch had started. This could be because "
|
||||
+ "authentication isn't working properly. Check the container logs",
|
||||
e
|
||||
);
|
||||
}
|
||||
|
||||
// Also check that an unauthenticated call fails
|
||||
final int statusCode = Request.Get("http://localhost:9200/_nodes").execute().returnResponse().getStatusLine().getStatusCode();
|
||||
assertThat("Expected server to require authentication", statusCode, equalTo(401));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that when verifying the file permissions of _FILE environment variables, symlinks
|
||||
* are followed.
|
||||
*/
|
||||
public void test081SymlinksAreFollowedWithEnvironmentVariableFiles() throws Exception {
|
||||
// Test relies on configuring security
|
||||
assumeTrue(distribution.isDefault());
|
||||
// Test relies on symlinks
|
||||
assumeFalse(Platforms.WINDOWS);
|
||||
|
||||
final String xpackPassword = "hunter2";
|
||||
final String passwordFilename = "password.txt";
|
||||
final String symlinkFilename = "password_symlink";
|
||||
|
||||
// ELASTIC_PASSWORD_FILE
|
||||
Files.write(tempDir.resolve(passwordFilename), (xpackPassword + "\n").getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
// Link to the password file. We can't use an absolute path for the target, because
|
||||
// it won't resolve inside the container.
|
||||
Files.createSymbolicLink(tempDir.resolve(symlinkFilename), Paths.get(passwordFilename));
|
||||
|
||||
// Enable security so that we can test that the password has been used
|
||||
Map<String, String> envVars = new HashMap<>();
|
||||
envVars.put("ELASTIC_PASSWORD_FILE", "/run/secrets/" + symlinkFilename);
|
||||
|
||||
// File permissions need to be secured in order for the ES wrapper to accept
|
||||
// them for populating env var values. The wrapper will resolve the symlink
|
||||
// and check the target's permissions.
|
||||
Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p600);
|
||||
|
||||
final Map<Path, Path> volumes = singletonMap(tempDir, Paths.get("/run/secrets"));
|
||||
|
||||
// Restart the container - this will check that Elasticsearch started correctly,
|
||||
// and didn't fail to follow the symlink and check the file permissions
|
||||
runContainer(distribution(), volumes, envVars);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that environment variables cannot be used with _FILE environment variables.
|
||||
*/
|
||||
|
@ -385,70 +302,6 @@ public class DockerTests extends PackagingTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that when verifying the file permissions of _FILE environment variables, symlinks
|
||||
* are followed, and that invalid target permissions are detected.
|
||||
*/
|
||||
public void test084SymlinkToFileWithInvalidPermissionsIsRejected() throws Exception {
|
||||
// Test relies on configuring security
|
||||
assumeTrue(distribution.isDefault());
|
||||
// Test relies on symlinks
|
||||
assumeFalse(Platforms.WINDOWS);
|
||||
|
||||
final String xpackPassword = "hunter2";
|
||||
final String passwordFilename = "password.txt";
|
||||
final String symlinkFilename = "password_symlink";
|
||||
|
||||
// ELASTIC_PASSWORD_FILE
|
||||
Files.write(tempDir.resolve(passwordFilename), (xpackPassword + "\n").getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
// Link to the password file. We can't use an absolute path for the target, because
|
||||
// it won't resolve inside the container.
|
||||
Files.createSymbolicLink(tempDir.resolve(symlinkFilename), Paths.get(passwordFilename));
|
||||
|
||||
// Enable security so that we can test that the password has been used
|
||||
Map<String, String> envVars = new HashMap<>();
|
||||
envVars.put("ELASTIC_PASSWORD_FILE", "/run/secrets/" + symlinkFilename);
|
||||
|
||||
// Set invalid permissions on the file that the symlink targets
|
||||
Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p775);
|
||||
|
||||
final Map<Path, Path> volumes = singletonMap(tempDir, Paths.get("/run/secrets"));
|
||||
|
||||
// Restart the container
|
||||
final Result dockerLogs = runContainerExpectingFailure(distribution(), volumes, envVars);
|
||||
|
||||
assertThat(
|
||||
dockerLogs.stderr,
|
||||
containsString(
|
||||
"ERROR: File "
|
||||
+ passwordFilename
|
||||
+ " (target of symlink /run/secrets/"
|
||||
+ symlinkFilename
|
||||
+ " from ELASTIC_PASSWORD_FILE) must have file permissions 400 or 600, but actually has: 775"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that environment variables are translated to -E options even for commands invoked under
|
||||
* `docker exec`, where the Docker image's entrypoint is not executed.
|
||||
*/
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/67097")
|
||||
public void test085EnvironmentVariablesAreRespectedUnderDockerExec() {
|
||||
// This test relies on a CLI tool attempting to connect to Elasticsearch, and the
|
||||
// tool in question is only in the default distribution.
|
||||
assumeTrue(distribution.isDefault());
|
||||
|
||||
runContainer(distribution(), null, singletonMap("http.host", "this.is.not.valid"));
|
||||
|
||||
// This will fail if the env var above is passed as a -E argument
|
||||
final Result result = sh.runIgnoreExitCode("elasticsearch-setup-passwords auto");
|
||||
|
||||
assertFalse("elasticsearch-setup-passwords command should have failed", result.isSuccess());
|
||||
assertThat(result.stdout, containsString("java.net.UnknownHostException: this.is.not.valid: Name or service not known"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the elasticsearch-shard tool is shipped in the Docker image and is executable.
|
||||
*/
|
||||
|
@ -513,11 +366,7 @@ public class DockerTests extends PackagingTestCase {
|
|||
staticLabels.put("vcs-url", "https://github.com/elastic/elasticsearch");
|
||||
staticLabels.put("vendor", "Elastic");
|
||||
|
||||
if (distribution.isOSS()) {
|
||||
staticLabels.put("license", "Apache-2.0");
|
||||
} else {
|
||||
staticLabels.put("license", "Elastic-License");
|
||||
}
|
||||
staticLabels.put("license", "Apache-2.0");
|
||||
|
||||
// TODO: we should check the actual version value
|
||||
final Set<String> dynamicLabels = new HashSet<>();
|
||||
|
@ -553,11 +402,7 @@ public class DockerTests extends PackagingTestCase {
|
|||
staticLabels.put("source", "https://github.com/elastic/elasticsearch");
|
||||
staticLabels.put("vendor", "Elastic");
|
||||
|
||||
if (distribution.isOSS()) {
|
||||
staticLabels.put("licenses", "Apache-2.0");
|
||||
} else {
|
||||
staticLabels.put("licenses", "Elastic-License");
|
||||
}
|
||||
staticLabels.put("licenses", "Apache-2.0");
|
||||
|
||||
// TODO: we should check the actual version value
|
||||
final Set<String> dynamicLabels = new HashSet<>();
|
||||
|
@ -644,46 +489,4 @@ public class DockerTests extends PackagingTestCase {
|
|||
assertThat("Failed to find [cpu] in node OS cgroup stats", cgroupStats.get("cpu"), not(nullValue()));
|
||||
assertThat("Failed to find [cpuacct] in node OS cgroup stats", cgroupStats.get("cpuacct"), not(nullValue()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the UBI images has the correct license information in the correct place.
|
||||
*/
|
||||
public void test200UbiImagesHaveLicenseDirectory() {
|
||||
assumeTrue(distribution.packaging == Distribution.Packaging.DOCKER_UBI);
|
||||
|
||||
final String[] files = sh.run("find /licenses -type f").stdout.split("\n");
|
||||
assertThat(files, arrayContaining("/licenses/LICENSE"));
|
||||
|
||||
// UBI image doesn't contain `diff`
|
||||
final String ubiLicense = sh.run("cat /licenses/LICENSE").stdout;
|
||||
final String distroLicense = sh.run("cat /usr/share/elasticsearch/LICENSE.txt").stdout;
|
||||
assertThat(ubiLicense, equalTo(distroLicense));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the UBI image has the expected labels
|
||||
*/
|
||||
public void test210UbiLabels() throws Exception {
|
||||
assumeTrue(distribution.packaging == Distribution.Packaging.DOCKER_UBI);
|
||||
|
||||
final Map<String, String> labels = getImageLabels(distribution);
|
||||
|
||||
final Map<String, String> staticLabels = new HashMap<>();
|
||||
staticLabels.put("name", "Elasticsearch");
|
||||
staticLabels.put("maintainer", "infra@elastic.co");
|
||||
staticLabels.put("vendor", "Elastic");
|
||||
staticLabels.put("summary", "Elasticsearch");
|
||||
staticLabels.put("description", "You know, for search.");
|
||||
|
||||
final Set<String> dynamicLabels = new HashSet<>();
|
||||
dynamicLabels.add("release");
|
||||
dynamicLabels.add("version");
|
||||
|
||||
staticLabels.forEach((key, value) -> {
|
||||
assertThat(labels, hasKey(key));
|
||||
assertThat(labels.get(key), equalTo(value));
|
||||
});
|
||||
|
||||
dynamicLabels.forEach(key -> assertThat(labels, hasKey(key)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -491,7 +491,6 @@ public class KeystoreManagementTests extends PackagingTestCase {
|
|||
assertThat(keystore, file(File, "root", "elasticsearch", p660));
|
||||
break;
|
||||
case DOCKER:
|
||||
case DOCKER_UBI:
|
||||
assertPermissionsAndOwnership(keystore, p660);
|
||||
break;
|
||||
default:
|
||||
|
|
|
@ -220,7 +220,6 @@ public abstract class PackagingTestCase extends Assert {
|
|||
Packages.verifyPackageInstallation(installation, distribution, sh);
|
||||
break;
|
||||
case DOCKER:
|
||||
case DOCKER_UBI:
|
||||
installation = Docker.runContainer(distribution);
|
||||
Docker.verifyContainerInstallation(installation, distribution);
|
||||
break;
|
||||
|
@ -296,7 +295,6 @@ public abstract class PackagingTestCase extends Assert {
|
|||
case RPM:
|
||||
return Packages.runElasticsearchStartCommand(sh);
|
||||
case DOCKER:
|
||||
case DOCKER_UBI:
|
||||
// nothing, "installing" docker image is running it
|
||||
return Shell.NO_OP;
|
||||
default:
|
||||
|
@ -315,7 +313,6 @@ public abstract class PackagingTestCase extends Assert {
|
|||
Packages.stopElasticsearch(sh);
|
||||
break;
|
||||
case DOCKER:
|
||||
case DOCKER_UBI:
|
||||
// nothing, "installing" docker image is running it
|
||||
break;
|
||||
default:
|
||||
|
@ -335,7 +332,6 @@ public abstract class PackagingTestCase extends Assert {
|
|||
Packages.assertElasticsearchStarted(sh, installation);
|
||||
break;
|
||||
case DOCKER:
|
||||
case DOCKER_UBI:
|
||||
Docker.waitForElasticsearchToStart();
|
||||
break;
|
||||
default:
|
||||
|
|
|
@ -48,12 +48,5 @@ public class RpmMetadataTests extends PackagingTestCase {
|
|||
TestCase.assertTrue(Pattern.compile("(?m)^/bin/bash\\s*$").matcher(deps.stdout).find());
|
||||
|
||||
final Shell.Result conflicts = sh.run("rpm -qp --conflicts " + getDistributionFile(distribution()));
|
||||
|
||||
String oppositePackageName = "elasticsearch";
|
||||
if (distribution().isDefault()) {
|
||||
oppositePackageName += "-oss";
|
||||
}
|
||||
|
||||
TestCase.assertTrue(Pattern.compile("(?m)^" + oppositePackageName + "\\s*$").matcher(conflicts.stdout).find());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -82,11 +82,6 @@ public class RpmPreservationTests extends PackagingTestCase {
|
|||
.map(each -> installation.config(each))
|
||||
.forEach(path -> append(path, "# foo"));
|
||||
append(installation.config(Paths.get("jvm.options.d", "heap.options")), "# foo");
|
||||
if (distribution().isDefault()) {
|
||||
Stream.of("role_mapping.yml", "roles.yml", "users", "users_roles")
|
||||
.map(each -> installation.config(each))
|
||||
.forEach(path -> append(path, "# foo"));
|
||||
}
|
||||
|
||||
remove(distribution());
|
||||
assertRemoved(distribution());
|
||||
|
@ -112,10 +107,6 @@ public class RpmPreservationTests extends PackagingTestCase {
|
|||
|
||||
Stream.of("elasticsearch.yml", "jvm.options", "log4j2.properties").forEach(this::assertConfFilePreserved);
|
||||
assertThat(installation.config(Paths.get("jvm.options.d", "heap.options")), fileExists());
|
||||
|
||||
if (distribution().isDefault()) {
|
||||
Stream.of("role_mapping.yml", "roles.yml", "users", "users_roles").forEach(this::assertConfFilePreserved);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertConfFilePreserved(String configFile) {
|
||||
|
|
|
@ -27,7 +27,6 @@ public class Distribution {
|
|||
public final Path path;
|
||||
public final Packaging packaging;
|
||||
public final Platform platform;
|
||||
public final Flavor flavor;
|
||||
public final boolean hasJdk;
|
||||
public final String version;
|
||||
|
||||
|
@ -39,15 +38,12 @@ public class Distribution {
|
|||
this.packaging = Packaging.TAR;
|
||||
} else if (filename.endsWith(".docker.tar")) {
|
||||
this.packaging = Packaging.DOCKER;
|
||||
} else if (filename.endsWith(".ubi.tar")) {
|
||||
this.packaging = Packaging.DOCKER_UBI;
|
||||
} else {
|
||||
int lastDot = filename.lastIndexOf('.');
|
||||
this.packaging = Packaging.valueOf(filename.substring(lastDot + 1).toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
this.platform = filename.contains("windows") ? Platform.WINDOWS : Platform.LINUX;
|
||||
this.flavor = filename.contains("oss") ? Flavor.OSS : Flavor.DEFAULT;
|
||||
this.hasJdk = filename.contains("no-jdk") == false;
|
||||
String version = filename.split("-", 3)[1];
|
||||
if (filename.contains("-SNAPSHOT")) {
|
||||
|
@ -56,14 +52,6 @@ public class Distribution {
|
|||
this.version = version;
|
||||
}
|
||||
|
||||
public boolean isDefault() {
|
||||
return flavor.equals(Flavor.DEFAULT);
|
||||
}
|
||||
|
||||
public boolean isOSS() {
|
||||
return flavor.equals(Flavor.OSS);
|
||||
}
|
||||
|
||||
public boolean isArchive() {
|
||||
return packaging == Packaging.TAR || packaging == Packaging.ZIP;
|
||||
}
|
||||
|
@ -73,7 +61,7 @@ public class Distribution {
|
|||
}
|
||||
|
||||
public boolean isDocker() {
|
||||
return packaging == Packaging.DOCKER || packaging == Packaging.DOCKER_UBI;
|
||||
return packaging == Packaging.DOCKER;
|
||||
}
|
||||
|
||||
public enum Packaging {
|
||||
|
@ -82,8 +70,7 @@ public class Distribution {
|
|||
ZIP(".zip", Platforms.WINDOWS),
|
||||
DEB(".deb", Platforms.isDPKG()),
|
||||
RPM(".rpm", Platforms.isRPM()),
|
||||
DOCKER(".docker.tar", Platforms.isDocker()),
|
||||
DOCKER_UBI(".ubi.tar", Platforms.isDocker());
|
||||
DOCKER(".docker.tar", Platforms.isDocker());
|
||||
|
||||
/** The extension of this distribution's file */
|
||||
public final String extension;
|
||||
|
@ -106,16 +93,4 @@ public class Distribution {
|
|||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
|
||||
public enum Flavor {
|
||||
|
||||
OSS("elasticsearch-oss"),
|
||||
DEFAULT("elasticsearch");
|
||||
|
||||
public final String name;
|
||||
|
||||
Flavor(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -586,6 +586,6 @@ public class Docker {
|
|||
}
|
||||
|
||||
public static String getImageName(Distribution distribution) {
|
||||
return distribution.flavor.name + (distribution.packaging == Distribution.Packaging.DOCKER_UBI ? "-ubi8" : "") + ":test";
|
||||
return ":test";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -124,9 +124,7 @@ public class Packages {
|
|||
}
|
||||
|
||||
private static Result runPackageManager(Distribution distribution, Shell sh, PackageManagerCommand command) {
|
||||
final String distributionArg = command == PackageManagerCommand.QUERY || command == PackageManagerCommand.REMOVE
|
||||
? distribution.flavor.name
|
||||
: distribution.path.toString();
|
||||
final String distributionArg = distribution.path.toString();
|
||||
|
||||
if (Platforms.isRPM()) {
|
||||
String rpmOptions = RPM_OPTIONS.get(command);
|
||||
|
@ -204,7 +202,7 @@ public class Packages {
|
|||
if (distribution.packaging == Distribution.Packaging.RPM) {
|
||||
assertThat(es.home.resolve("LICENSE.txt"), file(File, "root", "root", p644));
|
||||
} else {
|
||||
Path copyrightDir = Paths.get(sh.run("readlink -f /usr/share/doc/" + distribution.flavor.name).stdout.trim());
|
||||
Path copyrightDir = Paths.get(sh.run("readlink -f /usr/share/doc/").stdout.trim());
|
||||
assertThat(copyrightDir, file(Directory, "root", "root", p755));
|
||||
assertThat(copyrightDir.resolve("copyright"), file(File, "root", "root", p644));
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ elasticsearch_distributions {
|
|||
docker {
|
||||
type = 'docker'
|
||||
architecture = Architecture.current()
|
||||
flavor = System.getProperty('tests.distribution', 'default')
|
||||
version = VersionProperties.getElasticsearch()
|
||||
failIfUnavailable = false // This ensures we skip this testing if Docker is unavailable
|
||||
}
|
||||
|
@ -62,11 +61,7 @@ preProcessFixture {
|
|||
|
||||
dockerCompose {
|
||||
tcpPortsToIgnoreWhenWaiting = [9600, 9601]
|
||||
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) {
|
||||
useComposeFiles = ['docker-compose.yml']
|
||||
} else {
|
||||
useComposeFiles = ['docker-compose-oss.yml']
|
||||
}
|
||||
useComposeFiles = ['docker-compose-oss.yml']
|
||||
}
|
||||
|
||||
def createAndSetWritable(Object... locations) {
|
||||
|
|
|
@ -24,11 +24,8 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
|||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.CharArrays;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -37,19 +34,12 @@ import org.junit.BeforeClass;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.CharBuffer;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
|
||||
public abstract class AbstractMultiClusterRemoteTestCase extends ESRestTestCase {
|
||||
|
||||
private static final String USER = "x_pack_rest_user";
|
||||
private static final String PASS = "x-pack-test-password";
|
||||
private static final String KEYSTORE_PASS = "testnode";
|
||||
|
||||
@Override
|
||||
protected boolean preserveClusterUponCompletion() {
|
||||
return true;
|
||||
|
@ -62,7 +52,7 @@ public abstract class AbstractMultiClusterRemoteTestCase extends ESRestTestCase
|
|||
|
||||
@Override
|
||||
protected String getTestRestCluster() {
|
||||
return "localhost:" + getProperty("test.fixtures.elasticsearch-" + getDistribution() + "-1.tcp.9200");
|
||||
return "localhost:" + getProperty("test.fixtures.elasticsearch-oss-1.tcp.9200");
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -71,8 +61,8 @@ public abstract class AbstractMultiClusterRemoteTestCase extends ESRestTestCase
|
|||
return;
|
||||
}
|
||||
|
||||
cluster1Client = buildClient("localhost:" + getProperty("test.fixtures.elasticsearch-" + getDistribution() + "-1.tcp.9200"));
|
||||
cluster2Client = buildClient("localhost:" + getProperty("test.fixtures.elasticsearch-" + getDistribution() + "-2.tcp.9200"));
|
||||
cluster1Client = buildClient("localhost:" + getProperty("test.fixtures.elasticsearch-oss-1.tcp.9200"));
|
||||
cluster2Client = buildClient("localhost:" + getProperty("test.fixtures.elasticsearch-oss-2.tcp.9200"));
|
||||
|
||||
cluster1Client().cluster().health(new ClusterHealthRequest().waitForNodes("1").waitForYellowStatus(), RequestOptions.DEFAULT);
|
||||
cluster2Client().cluster().health(new ClusterHealthRequest().waitForNodes("1").waitForYellowStatus(), RequestOptions.DEFAULT);
|
||||
|
@ -80,13 +70,6 @@ public abstract class AbstractMultiClusterRemoteTestCase extends ESRestTestCase
|
|||
initialized = true;
|
||||
}
|
||||
|
||||
protected String getDistribution() {
|
||||
String distribution = System.getProperty("tests.distribution", "default");
|
||||
if (distribution.equals("oss") == false && distribution.equals("default") == false) {
|
||||
throw new IllegalArgumentException("supported values for tests.distribution are oss or default but it was " + distribution);
|
||||
}
|
||||
return distribution;
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void destroyClients() throws IOException {
|
||||
|
@ -119,10 +102,6 @@ public abstract class AbstractMultiClusterRemoteTestCase extends ESRestTestCase
|
|||
return new HighLevelClient(buildClient(restAdminSettings(), new HttpHost[]{httpHost}));
|
||||
}
|
||||
|
||||
protected boolean isOss() {
|
||||
return getDistribution().equals("oss");
|
||||
}
|
||||
|
||||
static Path keyStore;
|
||||
|
||||
@BeforeClass
|
||||
|
@ -144,41 +123,12 @@ public abstract class AbstractMultiClusterRemoteTestCase extends ESRestTestCase
|
|||
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
if (isOss()) {
|
||||
return super.restClientSettings();
|
||||
}
|
||||
String token = basicAuthHeaderValue(USER, new SecureString(PASS.toCharArray()));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
.put(ESRestTestCase.TRUSTSTORE_PATH, keyStore)
|
||||
.put(ESRestTestCase.TRUSTSTORE_PASSWORD, KEYSTORE_PASS)
|
||||
.build();
|
||||
return super.restClientSettings();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getProtocol() {
|
||||
if (isOss()) {
|
||||
return "http";
|
||||
}
|
||||
return "https";
|
||||
}
|
||||
|
||||
private static String basicAuthHeaderValue(String username, SecureString passwd) {
|
||||
CharBuffer chars = CharBuffer.allocate(username.length() + passwd.length() + 1);
|
||||
byte[] charBytes = null;
|
||||
try {
|
||||
chars.put(username).put(':').put(passwd.getChars());
|
||||
charBytes = CharArrays.toUtf8Bytes(chars.array());
|
||||
|
||||
//TODO we still have passwords in Strings in headers. Maybe we can look into using a CharSequence?
|
||||
String basicToken = Base64.getEncoder().encodeToString(charBytes);
|
||||
return "Basic " + basicToken;
|
||||
} finally {
|
||||
Arrays.fill(chars.array(), (char) 0);
|
||||
if (charBytes != null) {
|
||||
Arrays.fill(charBytes, (byte) 0);
|
||||
}
|
||||
}
|
||||
return "http";
|
||||
}
|
||||
|
||||
private String getProperty(String key) {
|
||||
|
|
|
@ -68,7 +68,7 @@ public class RemoteClustersIT extends AbstractMultiClusterRemoteTestCase {
|
|||
}
|
||||
|
||||
public void testProxyModeConnectionWorks() throws IOException {
|
||||
String cluster2RemoteClusterSeed = "elasticsearch-" + getDistribution() + "-2:9300";
|
||||
String cluster2RemoteClusterSeed = "elasticsearch-oss-2:9300";
|
||||
logger.info("Configuring remote cluster [{}]", cluster2RemoteClusterSeed);
|
||||
ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings(Settings.builder()
|
||||
.put("cluster.remote.cluster2.mode", "proxy")
|
||||
|
@ -85,7 +85,7 @@ public class RemoteClustersIT extends AbstractMultiClusterRemoteTestCase {
|
|||
}
|
||||
|
||||
public void testSniffModeConnectionFails() throws IOException {
|
||||
String cluster2RemoteClusterSeed = "elasticsearch-" + getDistribution() + "-2:9300";
|
||||
String cluster2RemoteClusterSeed = "elasticsearch-oss-2:9300";
|
||||
logger.info("Configuring remote cluster [{}]", cluster2RemoteClusterSeed);
|
||||
ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings(Settings.builder()
|
||||
.put("cluster.remote.cluster2alt.mode", "sniff")
|
||||
|
|
|
@ -39,11 +39,4 @@ integTest {
|
|||
project.delete(repo)
|
||||
repo.mkdirs()
|
||||
}
|
||||
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'oss'))) {
|
||||
systemProperty 'tests.rest.blacklist', [
|
||||
'cat.templates/10_basic/No templates',
|
||||
'cat.templates/10_basic/Sort templates',
|
||||
'cat.templates/10_basic/Multiple template',
|
||||
].join(',')
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,9 +26,8 @@ apply plugin: 'elasticsearch.testclusters'
|
|||
apply plugin: 'elasticsearch.standalone-test'
|
||||
apply from : "$rootDir/gradle/bwc-test.gradle"
|
||||
|
||||
boolean isDefaultDistro = System.getProperty('tests.distribution', 'oss') == 'default'
|
||||
for (Version bwcVersion : BuildParams.bwcVersions.indexCompatible) {
|
||||
if (bwcVersion.before('6.3.0') && isDefaultDistro) {
|
||||
if (bwcVersion.before('6.3.0')) {
|
||||
// explicitly running restart on the current node does not work in step 2
|
||||
// below when plugins are installed, wihch is the case for x-pack as a plugin
|
||||
// prior to 6.3.0
|
||||
|
|
|
@ -57,7 +57,6 @@ elasticsearch_distributions {
|
|||
docker {
|
||||
type = 'docker'
|
||||
architecture = Architecture.current()
|
||||
flavor = System.getProperty('tests.distribution', 'default')
|
||||
version = VersionProperties.getElasticsearch()
|
||||
failIfUnavailable = false // This ensures we skip this testing if Docker is unavailable
|
||||
}
|
||||
|
@ -68,11 +67,7 @@ preProcessFixture {
|
|||
}
|
||||
|
||||
dockerCompose {
|
||||
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) {
|
||||
useComposeFiles = ['docker-compose.yml']
|
||||
} else {
|
||||
useComposeFiles = ['docker-compose-oss.yml']
|
||||
}
|
||||
useComposeFiles = ['docker-compose-oss.yml']
|
||||
}
|
||||
|
||||
tasks.register("integTest", Test) {
|
||||
|
|
|
@ -41,42 +41,6 @@ public class Build {
|
|||
*/
|
||||
public static final Build CURRENT;
|
||||
|
||||
public enum Flavor {
|
||||
|
||||
DEFAULT("default"),
|
||||
OSS("oss"),
|
||||
UNKNOWN("unknown");
|
||||
|
||||
final String displayName;
|
||||
|
||||
Flavor(final String displayName) {
|
||||
this.displayName = displayName;
|
||||
}
|
||||
|
||||
public String displayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
public static Flavor fromDisplayName(final String displayName, final boolean strict) {
|
||||
switch (displayName) {
|
||||
case "default":
|
||||
return Flavor.DEFAULT;
|
||||
case "oss":
|
||||
return Flavor.OSS;
|
||||
case "unknown":
|
||||
return Flavor.UNKNOWN;
|
||||
default:
|
||||
if (strict) {
|
||||
final String message = "unexpected distribution flavor [" + displayName + "]; your distribution is broken";
|
||||
throw new IllegalStateException(message);
|
||||
} else {
|
||||
return Flavor.UNKNOWN;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public enum Type {
|
||||
|
||||
DEB("deb"),
|
||||
|
@ -122,7 +86,6 @@ public class Build {
|
|||
}
|
||||
|
||||
static {
|
||||
final Flavor flavor;
|
||||
final Type type;
|
||||
final String hash;
|
||||
final String date;
|
||||
|
@ -130,7 +93,6 @@ public class Build {
|
|||
final String version;
|
||||
|
||||
// these are parsed at startup, and we require that we are able to recognize the values passed in by the startup scripts
|
||||
flavor = Flavor.fromDisplayName(System.getProperty("es.distribution.flavor", "unknown"), true);
|
||||
type = Type.fromDisplayName(System.getProperty("es.distribution.type", "unknown"), true);
|
||||
|
||||
final String esPrefix = "elasticsearch-" + Version.CURRENT;
|
||||
|
@ -180,7 +142,7 @@ public class Build {
|
|||
"Stopping Elasticsearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
|
||||
}
|
||||
|
||||
CURRENT = new Build(flavor, type, hash, date, isSnapshot, version);
|
||||
CURRENT = new Build(type, hash, date, isSnapshot, version);
|
||||
}
|
||||
|
||||
private final boolean isSnapshot;
|
||||
|
@ -195,17 +157,15 @@ public class Build {
|
|||
return codeSource == null ? null : codeSource.getLocation();
|
||||
}
|
||||
|
||||
private final Flavor flavor;
|
||||
private final Type type;
|
||||
private final String hash;
|
||||
private final String date;
|
||||
private final String version;
|
||||
|
||||
public Build(
|
||||
final Flavor flavor, final Type type, final String hash, final String date, boolean isSnapshot,
|
||||
final Type type, final String hash, final String date, boolean isSnapshot,
|
||||
String version
|
||||
) {
|
||||
this.flavor = flavor;
|
||||
this.type = type;
|
||||
this.hash = hash;
|
||||
this.date = date;
|
||||
|
@ -222,13 +182,12 @@ public class Build {
|
|||
}
|
||||
|
||||
public static Build readBuild(StreamInput in) throws IOException {
|
||||
final Flavor flavor;
|
||||
final String flavor;
|
||||
final Type type;
|
||||
if (in.getVersion().onOrAfter(Version.V_6_3_0)) {
|
||||
// be lenient when reading on the wire, the enumeration values from other versions might be different than what we know
|
||||
flavor = Flavor.fromDisplayName(in.readString(), false);
|
||||
} else {
|
||||
flavor = Flavor.OSS;
|
||||
// The following block is kept for existing BWS tests to pass.
|
||||
// TODO - clean up this code when we remove all v6 bwc tests.
|
||||
if (in.getVersion().onOrAfter(Version.V_6_3_0) && in.getVersion().onOrBefore(Version.V_7_0_0)) {
|
||||
flavor = in.readString();
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_6_3_0)) {
|
||||
// be lenient when reading on the wire, the enumeration values from other versions might be different than what we know
|
||||
|
@ -246,12 +205,14 @@ public class Build {
|
|||
} else {
|
||||
version = in.getVersion().toString();
|
||||
}
|
||||
return new Build(flavor, type, hash, date, snapshot, version);
|
||||
return new Build(type, hash, date, snapshot, version);
|
||||
}
|
||||
|
||||
public static void writeBuild(Build build, StreamOutput out) throws IOException {
|
||||
if (out.getVersion().onOrAfter(Version.V_6_3_0)) {
|
||||
out.writeString(build.flavor().displayName());
|
||||
// The following block is kept for existing BWS tests to pass.
|
||||
// TODO - clean up this code when we remove all v6 bwc tests.
|
||||
if (out.getVersion().onOrAfter(Version.V_6_3_0) && out.getVersion().onOrBefore(Version.V_7_0_0)) {
|
||||
out.writeString("oss");
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_3_0)) {
|
||||
final Type buildType;
|
||||
|
@ -283,10 +244,6 @@ public class Build {
|
|||
return version;
|
||||
}
|
||||
|
||||
public Flavor flavor() {
|
||||
return flavor;
|
||||
}
|
||||
|
||||
public Type type() {
|
||||
return type;
|
||||
}
|
||||
|
@ -306,7 +263,7 @@ public class Build {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + flavor.displayName() + "][" + type.displayName + "][" + hash + "][" + date + "][" + version +"]";
|
||||
return "[" + type.displayName + "][" + hash + "][" + date + "][" + version +"]";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -320,10 +277,6 @@ public class Build {
|
|||
|
||||
Build build = (Build) o;
|
||||
|
||||
if (!flavor.equals(build.flavor)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!type.equals(build.type)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -342,7 +295,7 @@ public class Build {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(flavor, type, isSnapshot, hash, date, version);
|
||||
return Objects.hash(type, isSnapshot, hash, date, version);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -484,7 +484,6 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
Locale.ROOT,
|
||||
"Version: %s, Build: %s/%s/%s/%s, JVM: %s",
|
||||
Build.CURRENT.getQualifiedVersion(),
|
||||
Build.CURRENT.flavor().displayName(),
|
||||
Build.CURRENT.type().displayName(),
|
||||
Build.CURRENT.hash(),
|
||||
Build.CURRENT.date(),
|
||||
|
|
|
@ -75,7 +75,6 @@ public class NodesInfoResponse extends BaseNodesResponse<NodeInfo> implements To
|
|||
builder.field("ip", nodeInfo.getNode().getHostAddress());
|
||||
|
||||
builder.field("version", nodeInfo.getVersion());
|
||||
builder.field("build_flavor", nodeInfo.getBuild().flavor().displayName());
|
||||
builder.field("build_type", nodeInfo.getBuild().type().displayName());
|
||||
builder.field("build_hash", nodeInfo.getBuild().hash());
|
||||
if (nodeInfo.getTotalIndexingBuffer() != null) {
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
|
@ -668,14 +667,13 @@ public class ClusterStatsNodes implements ToXContentFragment {
|
|||
|
||||
static class PackagingTypes implements ToXContentFragment {
|
||||
|
||||
private final Map<Tuple<String, String>, AtomicInteger> packagingTypes;
|
||||
private final Map<String, AtomicInteger> packagingTypes;
|
||||
|
||||
PackagingTypes(final List<NodeInfo> nodeInfos) {
|
||||
final Map<Tuple<String, String>, AtomicInteger> packagingTypes = new HashMap<>();
|
||||
final Map<String, AtomicInteger> packagingTypes = new HashMap<>();
|
||||
for (final NodeInfo nodeInfo : nodeInfos) {
|
||||
final String flavor = nodeInfo.getBuild().flavor().displayName();
|
||||
final String type = nodeInfo.getBuild().type().displayName();
|
||||
packagingTypes.computeIfAbsent(Tuple.tuple(flavor, type), k -> new AtomicInteger()).incrementAndGet();
|
||||
packagingTypes.computeIfAbsent(type, k -> new AtomicInteger()).incrementAndGet();
|
||||
}
|
||||
this.packagingTypes = Collections.unmodifiableMap(packagingTypes);
|
||||
}
|
||||
|
@ -684,11 +682,10 @@ public class ClusterStatsNodes implements ToXContentFragment {
|
|||
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||
builder.startArray("packaging_types");
|
||||
{
|
||||
for (final Map.Entry<Tuple<String, String>, AtomicInteger> entry : packagingTypes.entrySet()) {
|
||||
for (final Map.Entry<String, AtomicInteger> entry : packagingTypes.entrySet()) {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field("flavor", entry.getKey().v1());
|
||||
builder.field("type", entry.getKey().v2());
|
||||
builder.field("type", entry.getKey());
|
||||
builder.field("count", entry.getValue().get());
|
||||
}
|
||||
builder.endObject();
|
||||
|
|
|
@ -105,7 +105,6 @@ public class MainResponse extends ActionResponse implements ToXContentObject {
|
|||
builder.field("cluster_uuid", clusterUuid);
|
||||
builder.startObject("version")
|
||||
.field("number", build.getQualifiedVersion())
|
||||
.field("build_flavor", build.flavor().displayName())
|
||||
.field("build_type", build.type().displayName())
|
||||
.field("build_hash", build.hash())
|
||||
.field("build_date", build.date())
|
||||
|
@ -128,7 +127,6 @@ public class MainResponse extends ActionResponse implements ToXContentObject {
|
|||
PARSER.declareString((response, value) -> response.clusterUuid = value, new ParseField("cluster_uuid"));
|
||||
PARSER.declareString((response, value) -> {}, new ParseField("tagline"));
|
||||
PARSER.declareObject((response, value) -> {
|
||||
final String buildFlavor = (String) value.get("build_flavor");
|
||||
final String buildType = (String) value.get("build_type");
|
||||
response.build =
|
||||
new Build(
|
||||
|
@ -136,7 +134,6 @@ public class MainResponse extends ActionResponse implements ToXContentObject {
|
|||
* Be lenient when reading on the wire, the enumeration values from other versions might be different than what
|
||||
* we know.
|
||||
*/
|
||||
buildFlavor == null ? Build.Flavor.UNKNOWN : Build.Flavor.fromDisplayName(buildFlavor, false),
|
||||
buildType == null ? Build.Type.UNKNOWN : Build.Type.fromDisplayName(buildType, false),
|
||||
(String) value.get("build_hash"),
|
||||
(String) value.get("build_date"),
|
||||
|
|
|
@ -134,9 +134,8 @@ class Elasticsearch extends EnvironmentAwareCommand {
|
|||
if (options.has(versionOption)) {
|
||||
final String versionOutput = String.format(
|
||||
Locale.ROOT,
|
||||
"Version: %s, Build: %s/%s/%s/%s, JVM: %s",
|
||||
"Version: %s, Build: %s/%s/%s, JVM: %s",
|
||||
Build.CURRENT.getQualifiedVersion(),
|
||||
Build.CURRENT.flavor().displayName(),
|
||||
Build.CURRENT.type().displayName(),
|
||||
Build.CURRENT.hash(),
|
||||
Build.CURRENT.date(),
|
||||
|
|
|
@ -307,10 +307,9 @@ public class Node implements Closeable {
|
|||
|
||||
final JvmInfo jvmInfo = JvmInfo.jvmInfo();
|
||||
logger.info(
|
||||
"version[{}], pid[{}], build[{}/{}/{}/{}], OS[{}/{}/{}], JVM[{}/{}/{}/{}]",
|
||||
"version[{}], pid[{}], build[{}/{}/{}], OS[{}/{}/{}], JVM[{}/{}/{}/{}]",
|
||||
Build.CURRENT.getQualifiedVersion(),
|
||||
jvmInfo.pid(),
|
||||
Build.CURRENT.flavor().displayName(),
|
||||
Build.CURRENT.type().displayName(),
|
||||
Build.CURRENT.hash(),
|
||||
Build.CURRENT.date(),
|
||||
|
|
|
@ -143,7 +143,6 @@ public class RestNodesAction extends AbstractCatAction {
|
|||
table.addCell("http_address", "default:false;alias:http;desc:bound http address");
|
||||
|
||||
table.addCell("version", "default:false;alias:v;desc:es version");
|
||||
table.addCell("flavor", "default:false;alias:f;desc:es distribution flavor");
|
||||
table.addCell("type", "default:false;alias:t;desc:es distribution type");
|
||||
table.addCell("build", "default:false;alias:b;desc:es build hash");
|
||||
table.addCell("jdk", "default:false;alias:j;desc:jdk version");
|
||||
|
@ -299,7 +298,6 @@ public class RestNodesAction extends AbstractCatAction {
|
|||
}
|
||||
|
||||
table.addCell(node.getVersion().toString());
|
||||
table.addCell(info == null ? null : info.getBuild().flavor().displayName());
|
||||
table.addCell(info == null ? null : info.getBuild().type().displayName());
|
||||
table.addCell(info == null ? null : info.getBuild().hash());
|
||||
table.addCell(jvmInfo == null ? null : jvmInfo.version());
|
||||
|
|
|
@ -54,28 +54,28 @@ public class BuildTests extends ESTestCase {
|
|||
|
||||
public void testIsProduction() {
|
||||
Build build = new Build(
|
||||
Build.CURRENT.flavor(), Build.CURRENT.type(), Build.CURRENT.hash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.type(), Build.CURRENT.hash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.isSnapshot(), Math.abs(randomInt()) + "." + Math.abs(randomInt()) + "." + Math.abs(randomInt())
|
||||
);
|
||||
assertTrue(build.getQualifiedVersion(), build.isProductionRelease());
|
||||
|
||||
assertFalse(new Build(
|
||||
Build.CURRENT.flavor(), Build.CURRENT.type(), Build.CURRENT.hash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.type(), Build.CURRENT.hash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.isSnapshot(), "7.0.0-alpha1"
|
||||
).isProductionRelease());
|
||||
|
||||
assertFalse(new Build(
|
||||
Build.CURRENT.flavor(), Build.CURRENT.type(), Build.CURRENT.hash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.type(), Build.CURRENT.hash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.isSnapshot(), "7.0.0-alpha1-SNAPSHOT"
|
||||
).isProductionRelease());
|
||||
|
||||
assertFalse(new Build(
|
||||
Build.CURRENT.flavor(), Build.CURRENT.type(), Build.CURRENT.hash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.type(), Build.CURRENT.hash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.isSnapshot(), "7.0.0-SNAPSHOT"
|
||||
).isProductionRelease());
|
||||
|
||||
assertFalse(new Build(
|
||||
Build.CURRENT.flavor(), Build.CURRENT.type(), Build.CURRENT.hash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.type(), Build.CURRENT.hash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.isSnapshot(), "Unknown"
|
||||
).isProductionRelease());
|
||||
}
|
||||
|
@ -84,45 +84,37 @@ public class BuildTests extends ESTestCase {
|
|||
Build build = Build.CURRENT;
|
||||
|
||||
Build another = new Build(
|
||||
build.flavor(), build.type(), build.hash(), build.date(), build.isSnapshot(), build.getQualifiedVersion()
|
||||
build.type(), build.hash(), build.date(), build.isSnapshot(), build.getQualifiedVersion()
|
||||
);
|
||||
assertEquals(build, another);
|
||||
assertEquals(build.hashCode(), another.hashCode());
|
||||
|
||||
final Set<Build.Flavor> otherFlavors =
|
||||
Arrays.stream(Build.Flavor.values()).filter(f -> !f.equals(build.flavor())).collect(Collectors.toSet());
|
||||
final Build.Flavor otherFlavor = randomFrom(otherFlavors);
|
||||
Build differentFlavor = new Build(
|
||||
otherFlavor, build.type(), build.hash(), build.date(), build.isSnapshot(), build.getQualifiedVersion()
|
||||
);
|
||||
assertNotEquals(build, differentFlavor);
|
||||
|
||||
final Set<Build.Type> otherTypes =
|
||||
Arrays.stream(Build.Type.values()).filter(f -> !f.equals(build.type())).collect(Collectors.toSet());
|
||||
final Build.Type otherType = randomFrom(otherTypes);
|
||||
Build differentType = new Build(
|
||||
build.flavor(), otherType, build.hash(), build.date(), build.isSnapshot(), build.getQualifiedVersion()
|
||||
otherType, build.hash(), build.date(), build.isSnapshot(), build.getQualifiedVersion()
|
||||
);
|
||||
assertNotEquals(build, differentType);
|
||||
|
||||
Build differentHash = new Build(
|
||||
build.flavor(), build.type(), randomAlphaOfLengthBetween(3, 10), build.date(), build.isSnapshot(),
|
||||
build.type(), randomAlphaOfLengthBetween(3, 10), build.date(), build.isSnapshot(),
|
||||
build.getQualifiedVersion()
|
||||
);
|
||||
assertNotEquals(build, differentHash);
|
||||
|
||||
Build differentDate = new Build(
|
||||
build.flavor(), build.type(), build.hash(), "1970-01-01", build.isSnapshot(), build.getQualifiedVersion()
|
||||
build.type(), build.hash(), "1970-01-01", build.isSnapshot(), build.getQualifiedVersion()
|
||||
);
|
||||
assertNotEquals(build, differentDate);
|
||||
|
||||
Build differentSnapshot = new Build(
|
||||
build.flavor(), build.type(), build.hash(), build.date(), !build.isSnapshot(), build.getQualifiedVersion()
|
||||
build.type(), build.hash(), build.date(), !build.isSnapshot(), build.getQualifiedVersion()
|
||||
);
|
||||
assertNotEquals(build, differentSnapshot);
|
||||
|
||||
Build differentVersion = new Build(
|
||||
build.flavor(), build.type(), build.hash(), build.date(), build.isSnapshot(), "1.2.3"
|
||||
build.type(), build.hash(), build.date(), build.isSnapshot(), "1.2.3"
|
||||
);
|
||||
assertNotEquals(build, differentVersion);
|
||||
}
|
||||
|
@ -163,31 +155,27 @@ public class BuildTests extends ESTestCase {
|
|||
|
||||
public void testSerialization() {
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(new WriteableBuild(new Build(
|
||||
randomFrom(Build.Flavor.values()), randomFrom(Build.Type.values()),
|
||||
randomFrom(Build.Type.values()),
|
||||
randomAlphaOfLength(6), randomAlphaOfLength(6), randomBoolean(), randomAlphaOfLength(6))),
|
||||
// Note: the cast of the Copy- and MutateFunction is needed for some IDE (specifically Eclipse 4.10.0) to infer the right type
|
||||
(WriteableBuild b) -> copyWriteable(b, writableRegistry(), WriteableBuild::new, Version.CURRENT),
|
||||
(WriteableBuild b) -> {
|
||||
switch (randomIntBetween(1, 6)) {
|
||||
switch (randomIntBetween(1, 5)) {
|
||||
case 1:
|
||||
return new WriteableBuild(new Build(
|
||||
randomValueOtherThan(b.build.flavor(), () -> randomFrom(Build.Flavor.values())), b.build.type(),
|
||||
b.build.hash(), b.build.date(), b.build.isSnapshot(), b.build.getQualifiedVersion()));
|
||||
case 2:
|
||||
return new WriteableBuild(new Build(b.build.flavor(),
|
||||
randomValueOtherThan(b.build.type(), () -> randomFrom(Build.Type.values())),
|
||||
b.build.hash(), b.build.date(), b.build.isSnapshot(), b.build.getQualifiedVersion()));
|
||||
case 3:
|
||||
return new WriteableBuild(new Build(b.build.flavor(), b.build.type(),
|
||||
case 2:
|
||||
return new WriteableBuild(new Build(b.build.type(),
|
||||
randomStringExcept(b.build.hash()), b.build.date(), b.build.isSnapshot(), b.build.getQualifiedVersion()));
|
||||
case 4:
|
||||
return new WriteableBuild(new Build(b.build.flavor(), b.build.type(),
|
||||
case 3:
|
||||
return new WriteableBuild(new Build(b.build.type(),
|
||||
b.build.hash(), randomStringExcept(b.build.date()), b.build.isSnapshot(), b.build.getQualifiedVersion()));
|
||||
case 5:
|
||||
return new WriteableBuild(new Build(b.build.flavor(), b.build.type(),
|
||||
case 4:
|
||||
return new WriteableBuild(new Build(b.build.type(),
|
||||
b.build.hash(), b.build.date(), b.build.isSnapshot() == false, b.build.getQualifiedVersion()));
|
||||
case 6:
|
||||
return new WriteableBuild(new Build(b.build.flavor(), b.build.type(),
|
||||
case 5:
|
||||
return new WriteableBuild(new Build(b.build.type(),
|
||||
b.build.hash(), b.build.date(), b.build.isSnapshot(), randomStringExcept(b.build.getQualifiedVersion())));
|
||||
}
|
||||
throw new AssertionError();
|
||||
|
@ -195,7 +183,7 @@ public class BuildTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testSerializationBWC() throws IOException {
|
||||
final WriteableBuild dockerBuild = new WriteableBuild(new Build(randomFrom(Build.Flavor.values()), Build.Type.DOCKER,
|
||||
final WriteableBuild dockerBuild = new WriteableBuild(new Build(Build.Type.DOCKER,
|
||||
randomAlphaOfLength(6), randomAlphaOfLength(6), randomBoolean(), randomAlphaOfLength(6)));
|
||||
|
||||
final List<Version> versions = Version.getDeclaredVersions(Version.class);
|
||||
|
@ -211,11 +199,6 @@ public class BuildTests extends ESTestCase {
|
|||
final WriteableBuild post67pre70 = copyWriteable(dockerBuild, writableRegistry(), WriteableBuild::new, post67Pre70Version);
|
||||
final WriteableBuild post70 = copyWriteable(dockerBuild, writableRegistry(), WriteableBuild::new, post70Version);
|
||||
|
||||
assertThat(pre63.build.flavor(), equalTo(Build.Flavor.OSS));
|
||||
assertThat(post63pre67.build.flavor(), equalTo(dockerBuild.build.flavor()));
|
||||
assertThat(post67pre70.build.flavor(), equalTo(dockerBuild.build.flavor()));
|
||||
assertThat(post70.build.flavor(), equalTo(dockerBuild.build.flavor()));
|
||||
|
||||
assertThat(pre63.build.type(), equalTo(Build.Type.UNKNOWN));
|
||||
assertThat(post63pre67.build.type(), equalTo(Build.Type.TAR));
|
||||
assertThat(post67pre70.build.type(), equalTo(dockerBuild.build.type()));
|
||||
|
@ -227,13 +210,6 @@ public class BuildTests extends ESTestCase {
|
|||
assertThat(post70.build.getQualifiedVersion(), equalTo(dockerBuild.build.getQualifiedVersion()));
|
||||
}
|
||||
|
||||
public void testFlavorParsing() {
|
||||
for (final Build.Flavor flavor : Build.Flavor.values()) {
|
||||
// strict or not should not impact parsing at all here
|
||||
assertThat(Build.Flavor.fromDisplayName(flavor.displayName(), randomBoolean()), sameInstance(flavor));
|
||||
}
|
||||
}
|
||||
|
||||
public void testTypeParsing() {
|
||||
for (final Build.Type type : Build.Type.values()) {
|
||||
// strict or not should not impact parsing at all here
|
||||
|
@ -241,18 +217,6 @@ public class BuildTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testLenientFlavorParsing() {
|
||||
final String displayName = randomAlphaOfLength(8);
|
||||
assertThat(Build.Flavor.fromDisplayName(displayName, false), equalTo(Build.Flavor.UNKNOWN));
|
||||
}
|
||||
|
||||
public void testStrictFlavorParsing() {
|
||||
final String displayName = randomAlphaOfLength(8);
|
||||
@SuppressWarnings("ResultOfMethodCallIgnored") final IllegalStateException e =
|
||||
expectThrows(IllegalStateException.class, () -> Build.Flavor.fromDisplayName(displayName, true));
|
||||
assertThat(e, hasToString(containsString("unexpected distribution flavor [" + displayName + "]; your distribution is broken")));
|
||||
}
|
||||
|
||||
public void testLenientTypeParsing() {
|
||||
final String displayName = randomAlphaOfLength(8);
|
||||
assertThat(Build.Type.fromDisplayName(displayName, false), equalTo(Build.Type.UNKNOWN));
|
||||
|
|
|
@ -44,7 +44,7 @@ public class MainResponseTests extends AbstractSerializingTestCase<MainResponse>
|
|||
final String date = new Date(randomNonNegativeLong()).toString();
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_1, Version.CURRENT);
|
||||
Build build = new Build(
|
||||
Build.Flavor.UNKNOWN, Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean(),
|
||||
Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean(),
|
||||
version.toString()
|
||||
);
|
||||
return new MainResponse(nodeName, version, clusterName, clusterUuid , build);
|
||||
|
@ -64,7 +64,7 @@ public class MainResponseTests extends AbstractSerializingTestCase<MainResponse>
|
|||
String clusterUUID = randomAlphaOfLengthBetween(10, 20);
|
||||
final Build current = Build.CURRENT;
|
||||
Build build = new Build(
|
||||
current.flavor(), current.type(), current.hash(), current.date(), current.isSnapshot(),
|
||||
current.type(), current.hash(), current.date(), current.isSnapshot(),
|
||||
current.getQualifiedVersion()
|
||||
);
|
||||
Version version = Version.CURRENT;
|
||||
|
@ -77,7 +77,6 @@ public class MainResponseTests extends AbstractSerializingTestCase<MainResponse>
|
|||
+ "\"cluster_uuid\":\"" + clusterUUID + "\","
|
||||
+ "\"version\":{"
|
||||
+ "\"number\":\"" + build.getQualifiedVersion() + "\","
|
||||
+ "\"build_flavor\":\"" + current.flavor().displayName() + "\","
|
||||
+ "\"build_type\":\"" + current.type().displayName() + "\","
|
||||
+ "\"build_hash\":\"" + current.hash() + "\","
|
||||
+ "\"build_date\":\"" + current.date() + "\","
|
||||
|
@ -106,7 +105,7 @@ public class MainResponseTests extends AbstractSerializingTestCase<MainResponse>
|
|||
case 2:
|
||||
// toggle the snapshot flag of the original Build parameter
|
||||
build = new Build(
|
||||
Build.Flavor.UNKNOWN, Build.Type.UNKNOWN, build.hash(), build.date(), !build.isSnapshot(),
|
||||
Build.Type.UNKNOWN, build.hash(), build.date(), !build.isSnapshot(),
|
||||
build.getQualifiedVersion()
|
||||
);
|
||||
break;
|
||||
|
|
|
@ -65,8 +65,7 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase {
|
|||
assertThat(output, containsString("Version: " + Build.CURRENT.getQualifiedVersion()));
|
||||
final String expectedBuildOutput = String.format(
|
||||
Locale.ROOT,
|
||||
"Build: %s/%s/%s/%s",
|
||||
Build.CURRENT.flavor().displayName(),
|
||||
"Build: %s/%s/%s",
|
||||
Build.CURRENT.type().displayName(),
|
||||
Build.CURRENT.hash(),
|
||||
Build.CURRENT.date());
|
||||
|
|
|
@ -19,43 +19,23 @@ List projects = [
|
|||
'benchmarks',
|
||||
'distribution:archives:integ-test-zip',
|
||||
'distribution:archives:oss-windows-zip',
|
||||
'distribution:archives:windows-zip',
|
||||
'distribution:archives:oss-no-jdk-windows-zip',
|
||||
'distribution:archives:no-jdk-windows-zip',
|
||||
'distribution:archives:oss-darwin-tar',
|
||||
'distribution:archives:darwin-tar',
|
||||
'distribution:archives:oss-no-jdk-darwin-tar',
|
||||
'distribution:archives:no-jdk-darwin-tar',
|
||||
'distribution:archives:oss-linux-aarch64-tar',
|
||||
'distribution:archives:oss-linux-tar',
|
||||
'distribution:archives:linux-aarch64-tar',
|
||||
'distribution:archives:linux-tar',
|
||||
'distribution:archives:oss-no-jdk-linux-tar',
|
||||
'distribution:archives:no-jdk-linux-tar',
|
||||
'distribution:docker',
|
||||
'distribution:docker:docker-aarch64-build-context',
|
||||
'distribution:docker:docker-aarch64-export',
|
||||
'distribution:docker:docker-build-context',
|
||||
'distribution:docker:docker-export',
|
||||
'distribution:docker:oss-docker-aarch64-build-context',
|
||||
'distribution:docker:oss-docker-aarch64-export',
|
||||
'distribution:docker:oss-docker-build-context',
|
||||
'distribution:docker:oss-docker-export',
|
||||
'distribution:docker:ubi-docker-aarch64-export',
|
||||
'distribution:docker:ubi-docker-build-context',
|
||||
'distribution:docker:ubi-docker-export',
|
||||
'distribution:packages:aarch64-oss-deb',
|
||||
'distribution:packages:oss-deb',
|
||||
'distribution:packages:aarch64-deb',
|
||||
'distribution:packages:deb',
|
||||
'distribution:packages:oss-no-jdk-deb',
|
||||
'distribution:packages:no-jdk-deb',
|
||||
'distribution:packages:aarch64-oss-rpm',
|
||||
'distribution:packages:oss-rpm',
|
||||
'distribution:packages:aarch64-rpm',
|
||||
'distribution:packages:rpm',
|
||||
'distribution:packages:oss-no-jdk-rpm',
|
||||
'distribution:packages:no-jdk-rpm',
|
||||
'distribution:bwc:bugfix',
|
||||
'distribution:bwc:maintenance',
|
||||
'distribution:bwc:minor',
|
||||
|
|
|
@ -172,7 +172,6 @@ public class ReproduceInfoPrinter extends RunListener {
|
|||
}
|
||||
appendOpt("tests.locale", Locale.getDefault().toLanguageTag());
|
||||
appendOpt("tests.timezone", TimeZone.getDefault().getID());
|
||||
appendOpt("tests.distribution", System.getProperty("tests.distribution"));
|
||||
appendOpt("runtime.java", Integer.toString(JavaVersion.current().getVersion().get(0)));
|
||||
appendOpt(ESTestCase.FIPS_SYSPROP, System.getProperty(ESTestCase.FIPS_SYSPROP));
|
||||
return this;
|
||||
|
|
Loading…
Reference in New Issue