Backport testclusters fix bwc (#47363)

* Add support for bwc for testclusters and convert full cluster restart (#45374)

* Testclusters fix bwc (#46740)

Additions to make testclsuters work with lather versions of ES

* Do common node config on bwc tests

Before this PR we always ever ran `ElasticsearchCluster.start` once, and
the common node config was never done.
This becomes apparent in upgrading from `6.x` to `7.x` as the new config
is missing preventing the cluster from starting.

* Do common node config on bwc tests

Before this PR we always ever ran `ElasticsearchCluster.start` once, and
the common node config was never done.
This becomes apparent in upgrading from `6.x` to `7.x` as the new config
is missing preventing the cluster from starting.

* Fix logic to pick up snapshot from 6.x

* Make sure ports are cleared

* Fix test

* Don't clear all the config as we rely on it

* Fix removal of keys
This commit is contained in:
Alpar Torok 2019-10-02 14:37:00 +03:00 committed by GitHub
parent 42453aec96
commit a032f9b2d5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 279 additions and 148 deletions

View File

@ -202,17 +202,22 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
}
String extension = distribution.getType().toString();
String classifier = "x86_64";
if (distribution.getVersion().before("7.0.0")) {
classifier = null; // no platform specific distros before 7.0
} else if (distribution.getType() == Type.ARCHIVE) {
String classifier = ":x86_64";
if (distribution.getType() == Type.ARCHIVE) {
extension = distribution.getPlatform() == Platform.WINDOWS ? "zip" : "tar.gz";
classifier = distribution.getPlatform() + "-" + classifier;
if (distribution.getVersion().onOrAfter("7.0.0")) {
classifier = ":" + distribution.getPlatform() + "-x86_64";
} else {
classifier = "";
}
} else if (distribution.getType() == Type.DEB) {
classifier = "amd64";
classifier = ":amd64";
}
return FAKE_IVY_GROUP + ":elasticsearch" + (distribution.getFlavor() == Flavor.OSS ? "-oss:" : ":")
+ distribution.getVersion() + (classifier == null ? "" : ":" + classifier) + "@" + extension;
String flavor = "";
if (distribution.getFlavor() == Flavor.OSS && distribution.getVersion().onOrAfter("6.3.0")) {
flavor = "-oss";
}
return FAKE_IVY_GROUP + ":elasticsearch" + flavor + ":" + distribution.getVersion() + classifier + "@" + extension;
}
private static Dependency projectDependency(Project project, String projectPath, String projectConfig) {
@ -246,8 +251,12 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
projectName += "no-jdk-";
}
if (distribution.getType() == Type.ARCHIVE) {
Platform platform = distribution.getPlatform();
projectName += platform.toString() + (platform == Platform.WINDOWS ? "-zip" : "-tar");
if (distribution.getVersion().onOrAfter("7.0.0")) {
Platform platform = distribution.getPlatform();
projectName += platform.toString() + (platform == Platform.WINDOWS ? "-zip" : "-tar");
} else {
projectName = "zip";
}
} else {
projectName += distribution.getType();
}

View File

@ -161,7 +161,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
return bundledJdk.getOrElse(true);
}
public void setBundledJdk(boolean bundledJdk) {
public void setBundledJdk(Boolean bundledJdk) {
this.bundledJdk.set(bundledJdk);
}
@ -197,15 +197,15 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
void finalizeValues() {
if (getType() == Type.INTEG_TEST_ZIP) {
if (platform.isPresent()) {
if (platform.getOrNull() != null) {
throw new IllegalArgumentException(
"platform not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
}
if (flavor.isPresent()) {
if (flavor.getOrNull() != null) {
throw new IllegalArgumentException(
"flavor not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
"flavor [" + flavor.get() + "] not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
}
if (bundledJdk.isPresent()) {
if (bundledJdk.getOrNull() != null) {
throw new IllegalArgumentException(
"bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
}

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.PropertyNormalization;
import org.elasticsearch.gradle.ReaperService;
@ -59,24 +58,23 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
private final String clusterName;
private final NamedDomainObjectContainer<ElasticsearchNode> nodes;
private final File workingDirBase;
private final Function<Integer, ElasticsearchDistribution> distributionFactory;
private final LinkedHashMap<String, Predicate<TestClusterConfiguration>> waitConditions = new LinkedHashMap<>();
private final Project project;
private final ReaperService reaper;
private int nodeIndex = 0;
public ElasticsearchCluster(String path, String clusterName, Project project, ReaperService reaper,
Function<Integer, ElasticsearchDistribution> distributionFactory, File workingDirBase) {
public ElasticsearchCluster(String path, String clusterName, Project project,
ReaperService reaper, File workingDirBase) {
this.path = path;
this.clusterName = clusterName;
this.project = project;
this.reaper = reaper;
this.distributionFactory = distributionFactory;
this.workingDirBase = workingDirBase;
this.nodes = project.container(ElasticsearchNode.class);
this.nodes.add(
new ElasticsearchNode(
path, clusterName + "-0",
project, reaper, workingDirBase, distributionFactory.apply(0)
project, reaper, workingDirBase
)
);
// configure the cluster name eagerly so nodes know about it
@ -100,7 +98,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
for (int i = nodes.size() ; i < numberOfNodes; i++) {
this.nodes.add(new ElasticsearchNode(
path, clusterName + "-" + i, project, reaper, workingDirBase, distributionFactory.apply(i)
path, clusterName + "-" + i, project, reaper, workingDirBase
));
}
}
@ -126,6 +124,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
nodes.all(each -> each.setVersion(version));
}
@Override
public void setVersions(List<String> version) {
nodes.all(each -> each.setVersions(version));
}
@Override
public void setTestDistribution(TestDistribution distribution) {
nodes.all(each -> each.setTestDistribution(distribution));
@ -245,22 +248,50 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
@Override
public void start() {
commonNodeConfig();
nodes.forEach(ElasticsearchNode::start);
}
private void commonNodeConfig() {
final String nodeNames;
if (nodes.stream().map(ElasticsearchNode::getName).anyMatch( name -> name == null)) {
if (nodes.stream().map(ElasticsearchNode::getName).anyMatch(name -> name == null)) {
nodeNames = null;
} else {
nodeNames = nodes.stream().map(ElasticsearchNode::getName).collect(Collectors.joining(","));
};
nodeNames = nodes.stream().map(ElasticsearchNode::getName).map(this::safeName).collect(Collectors.joining(","));
}
ElasticsearchNode firstNode = null;
for (ElasticsearchNode node : nodes) {
// Can only configure master nodes if we have node names defined
if (nodeNames != null) {
// Can only configure master nodes if we have node names defined
if (node.getVersion().getMajor() >= 7) {
if (node.getVersion().onOrAfter("7.0.0")) {
node.defaultConfig.keySet().stream()
.filter(name -> name.startsWith("discovery.zen."))
.collect(Collectors.toList())
.forEach(node.defaultConfig::remove);
node.defaultConfig.put("cluster.initial_master_nodes", "[" + nodeNames + "]");
node.defaultConfig.put("discovery.seed_providers", "file");
node.defaultConfig.put("discovery.seed_hosts", "[]");
} else {
node.defaultConfig.put("discovery.zen.master_election.wait_for_joins_timeout", "5s");
if (nodes.size() > 1) {
node.defaultConfig.put("discovery.zen.minimum_master_nodes", Integer.toString(nodes.size() / 2 + 1));
}
if (node.getVersion().onOrAfter("6.5.0")) {
node.defaultConfig.put("discovery.zen.hosts_provider", "file");
node.defaultConfig.put("discovery.zen.ping.unicast.hosts", "[]");
} else {
if (firstNode == null) {
node.defaultConfig.put("discovery.zen.ping.unicast.hosts", "[]");
} else {
firstNode.waitForAllConditions();
node.defaultConfig.put("discovery.zen.ping.unicast.hosts", "[\"" + firstNode.getTransportPortURI() + "\"]");
}
}
}
}
node.start();
if (firstNode == null) {
firstNode = node;
}
}
}
@ -269,6 +300,25 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
nodes.forEach(ElasticsearchNode::restart);
}
public void goToNextVersion() {
stop(false);
nodes.all(ElasticsearchNode::goToNextVersion);
start();
writeUnicastHostsFiles();
}
public void nextNodeToNextVersion() {
if (nodeIndex + 1 > nodes.size()) {
throw new TestClustersException("Ran out of nodes to take to the next version");
}
ElasticsearchNode node = nodes.getByName(clusterName + "-" + nodeIndex);
node.stop(false);
node.goToNextVersion();
commonNodeConfig();
nodeIndex += 1;
node.start();
}
@Override
public void extraConfigFile(String destination, File from) {
nodes.all(node -> node.extraConfigFile(destination, from));
@ -320,9 +370,6 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
}
public void waitForAllConditions() {
LOGGER.info("Waiting for nodes");
nodes.forEach(ElasticsearchNode::waitForAllConditions);
writeUnicastHostsFiles();
LOGGER.info("Starting to wait for cluster to form");
@ -363,7 +410,6 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
nodes.size()
);
if (httpSslEnabled) {
getFirstNode().configureHttpWait(wait);
}
List<Map<String, String>> credentials = getFirstNode().getCredentials();

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.DistributionDownloadPlugin;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.LazyPropertyList;
@ -31,8 +32,8 @@ import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.http.WaitForHttpResource;
import org.gradle.api.Action;
import org.gradle.api.Named;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project;
import org.gradle.api.file.FileCollection;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Classpath;
@ -71,6 +72,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
@ -137,23 +139,23 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private final Path esStdoutFile;
private final Path esStderrFile;
private final Path tmpDir;
private final Path distroDir;
private String version;
private int currentDistro = 0;
private TestDistribution testDistribution;
private ElasticsearchDistribution distribution;
private List<ElasticsearchDistribution> distributions = new ArrayList<>();
private File javaHome;
private volatile Process esProcess;
private Function<String, String> nameCustomization = Function.identity();
private boolean isWorkingDirConfigured = false;
ElasticsearchNode(String path, String name, Project project, ReaperService reaper, File workingDirBase,
ElasticsearchDistribution distribution) {
ElasticsearchNode(String path, String name, Project project, ReaperService reaper, File workingDirBase) {
this.path = path;
this.name = name;
this.project = project;
this.reaper = reaper;
this.workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath();
this.distribution = distribution;
workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath();
distroDir = workingDir.resolve("distro");
confPathRepo = workingDir.resolve("repo");
configFile = workingDir.resolve("config/elasticsearch.yml");
confPathData = workingDir.resolve("data");
@ -175,15 +177,36 @@ public class ElasticsearchNode implements TestClusterConfiguration {
@Internal
public Version getVersion() {
return distribution.getVersion();
return distributions.get(currentDistro).getVersion();
}
@Override
public void setVersion(String version) {
requireNonNull(version, "null version passed when configuring test cluster `" + this + "`");
checkFrozen();
this.version = version;
this.distribution.setVersion(version);
distributions.clear();
doSetVersion(version);
}
@Override
public void setVersions(List<String> versions) {
requireNonNull(versions, "null version list passed when configuring test cluster `" + this + "`");
distributions.clear();
for (String version : versions) {
doSetVersion(version);
}
}
private void doSetVersion(String version) {
String distroName = "testclusters" + path.replace(":", "-") + "-" + this.name + "-" + version + "-";
NamedDomainObjectContainer<ElasticsearchDistribution> container = DistributionDownloadPlugin.getContainer(project);
if (container.findByName(distroName) == null) {
container.create(distroName);
}
ElasticsearchDistribution distro = container.getByName(distroName);
distro.setVersion(version);
setDistributionType(distro, testDistribution);
distributions.add(distro);
}
@Internal
@ -193,8 +216,8 @@ public class ElasticsearchNode implements TestClusterConfiguration {
// package private just so test clusters plugin can access to wire up task dependencies
@Internal
ElasticsearchDistribution getDistribution() {
return distribution;
List<ElasticsearchDistribution> getDistributions() {
return distributions;
}
@Override
@ -202,14 +225,24 @@ public class ElasticsearchNode implements TestClusterConfiguration {
requireNonNull(testDistribution, "null distribution passed when configuring test cluster `" + this + "`");
checkFrozen();
this.testDistribution = testDistribution;
for (ElasticsearchDistribution distribution : distributions) {
setDistributionType(distribution, testDistribution);
}
}
private void setDistributionType(ElasticsearchDistribution distribution, TestDistribution testDistribution) {
if (testDistribution == TestDistribution.INTEG_TEST) {
this.distribution.setType(ElasticsearchDistribution.Type.INTEG_TEST_ZIP);
distribution.setType(ElasticsearchDistribution.Type.INTEG_TEST_ZIP);
// we change the underlying distribution when changing the test distribution of the cluster.
distribution.setFlavor(null);
distribution.setPlatform(null);
distribution.setBundledJdk(null);
} else {
this.distribution.setType(ElasticsearchDistribution.Type.ARCHIVE);
distribution.setType(ElasticsearchDistribution.Type.ARCHIVE);
if (testDistribution == TestDistribution.DEFAULT) {
this.distribution.setFlavor(ElasticsearchDistribution.Flavor.DEFAULT);
distribution.setFlavor(ElasticsearchDistribution.Flavor.DEFAULT);
} else {
this.distribution.setFlavor(ElasticsearchDistribution.Flavor.OSS);
distribution.setFlavor(ElasticsearchDistribution.Flavor.OSS);
}
}
}
@ -319,8 +352,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
@Override
public void freeze() {
requireNonNull(distribution, "null distribution passed when configuring test cluster `" + this + "`");
requireNonNull(getVersion(), "null version passed when configuring test cluster `" + this + "`");
requireNonNull(distributions, "null distribution passed when configuring test cluster `" + this + "`");
requireNonNull(javaHome, "null javaHome passed when configuring test cluster `" + this + "`");
LOGGER.info("Locking configuration of `{}`", this);
configurationFrozen.set(true);
@ -363,10 +395,13 @@ public class ElasticsearchNode implements TestClusterConfiguration {
try {
if (isWorkingDirConfigured == false) {
logToProcessStdout("Configuring working directory: " + workingDir);
// Only configure working dir once so we don't lose data on restarts
// make sure we always start fresh
if (Files.exists(workingDir)) {
project.delete(workingDir);
}
isWorkingDirConfigured = true;
createWorkingDir(getExtractedDistributionDir());
}
createWorkingDir(getExtractedDistributionDir());
} catch (IOException e) {
throw new UncheckedIOException("Failed to create working directory for " + this, e);
}
@ -380,6 +415,14 @@ public class ElasticsearchNode implements TestClusterConfiguration {
);
}
if (getVersion().before("6.3.0") && testDistribution == TestDistribution.DEFAULT) {
LOGGER.info("emulating the {} flavor for {} by installing x-pack", testDistribution, getVersion());
runElaticsearchBinScript(
"elasticsearch-plugin",
"install", "--batch", "x-pack"
);
}
if (keystoreSettings.isEmpty() == false || keystoreFiles.isEmpty() == false) {
logToProcessStdout("Adding " + keystoreSettings.size() + " keystore settings and " + keystoreFiles.size() + " keystore files");
runElaticsearchBinScript("elasticsearch-keystore", "create");
@ -402,13 +445,17 @@ public class ElasticsearchNode implements TestClusterConfiguration {
copyExtraConfigFiles();
if (isSettingMissingOrTrue("xpack.security.enabled")) {
logToProcessStdout("Setting up " + credentials.size() + " users");
if (isSettingTrue("xpack.security.enabled")) {
if (credentials.isEmpty()) {
user(Collections.emptyMap());
}
}
if (credentials.isEmpty() == false) {
logToProcessStdout("Setting up " + credentials.size() + " users");
credentials.forEach(paramMap -> runElaticsearchBinScript(
"elasticsearch-users",
getVersion().onOrAfter("6.3.0") ? "elasticsearch-users" : "x-pack/users",
paramMap.entrySet().stream()
.flatMap(entry -> Stream.of(entry.getKey(), entry.getValue()))
.toArray(String[]::new)
@ -438,17 +485,19 @@ public class ElasticsearchNode implements TestClusterConfiguration {
public void restart() {
LOGGER.info("Restarting {}", this);
stop(false);
try {
Files.delete(httpPortsFile);
Files.delete(transportPortFile);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
start();
}
private boolean isSettingMissingOrTrue(String name) {
void goToNextVersion() {
if (currentDistro + 1 >= distributions.size()) {
throw new TestClustersException("Ran out of versions to go to for " + this);
}
logToProcessStdout("Switch version from " + getVersion() + " to " + distributions.get(currentDistro + 1).getVersion());
currentDistro += 1;
setting("node.attr.upgraded", "true");
}
private boolean isSettingTrue(String name) {
return Boolean.valueOf(settings.getOrDefault(name, "false").toString());
}
@ -476,8 +525,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
if (testDistribution == TestDistribution.INTEG_TEST) {
logToProcessStdout("Installing " + modules.size() + "modules");
for (File module : modules) {
Path destination = workingDir.resolve("modules").resolve(module.getName().replace(".zip", "")
.replace("-" + version, ""));
Path destination = distroDir.resolve("modules").resolve(module.getName().replace(".zip", "")
.replace("-" + getVersion(), "")
.replace("-SNAPSHOT", ""));
// only install modules that are not already bundled with the integ-test distribution
if (Files.exists(destination) == false) {
@ -494,7 +544,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
}
} else {
LOGGER.info("Not installing " + modules.size() + "(s) since the " + distribution + " distribution already " +
LOGGER.info("Not installing " + modules.size() + "(s) since the " + distributions + " distribution already " +
"has them");
}
}
@ -535,8 +585,8 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private void runElaticsearchBinScriptWithInput(String input, String tool, String... args) {
if (
Files.exists(workingDir.resolve("bin").resolve(tool)) == false &&
Files.exists(workingDir.resolve("bin").resolve(tool + ".bat")) == false
Files.exists(distroDir.resolve("bin").resolve(tool)) == false &&
Files.exists(distroDir.resolve("bin").resolve(tool + ".bat")) == false
) {
throw new TestClustersException("Can't run bin script: `" + tool + "` does not exist. " +
"Is this the distribution you expect it to be ?");
@ -544,7 +594,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) {
LoggedExec.exec(project, spec -> {
spec.setEnvironment(getESEnvironment());
spec.workingDir(workingDir);
spec.workingDir(distroDir);
spec.executable(
OS.conditionalString()
.onUnix(() -> "./bin/" + tool)
@ -629,8 +679,8 @@ public class ElasticsearchNode implements TestClusterConfiguration {
final ProcessBuilder processBuilder = new ProcessBuilder();
List<String> command = OS.<List<String>>conditional()
.onUnix(() -> Arrays.asList("./bin/elasticsearch"))
.onWindows(() -> Arrays.asList("cmd", "/c", "bin\\elasticsearch.bat"))
.onUnix(() -> Arrays.asList(distroDir.getFileName().resolve("./bin/elasticsearch").toString()))
.onWindows(() -> Arrays.asList("cmd", "/c", distroDir.getFileName().resolve("bin\\elasticsearch.bat").toString()))
.supply();
processBuilder.command(command);
processBuilder.directory(workingDir.toFile());
@ -662,11 +712,13 @@ public class ElasticsearchNode implements TestClusterConfiguration {
@Override
public List<String> getAllHttpSocketURI() {
waitForAllConditions();
return getHttpPortInternal();
}
@Override
public List<String> getAllTransportPortURI() {
waitForAllConditions();
return getTransportPortInternal();
}
@ -694,6 +746,17 @@ public class ElasticsearchNode implements TestClusterConfiguration {
logFileContents("Standard error of node", esStderrFile);
}
esProcess = null;
// Clean up the ports file in case this is started again.
try {
if (Files.exists(httpPortsFile)) {
Files.delete(httpPortsFile);
}
if (Files.exists(transportPortFile)) {
Files.delete(transportPortFile);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
@ -830,7 +893,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private void createWorkingDir(Path distroExtractDir) throws IOException {
syncWithLinks(distroExtractDir, workingDir);
syncWithLinks(distroExtractDir, distroDir);
Files.createDirectories(configFile.getParent());
Files.createDirectories(confPathRepo);
Files.createDirectories(confPathData);
@ -853,7 +916,14 @@ public class ElasticsearchNode implements TestClusterConfiguration {
try (Stream<Path> stream = Files.walk(sourceRoot)) {
stream.forEach(source -> {
Path destination = destinationRoot.resolve(sourceRoot.relativize(source));
Path relativeDestination = sourceRoot.relativize(source);
if (relativeDestination.getNameCount() <= 1) {
return;
}
// Throw away the first name as the archives have everything in a single top level folder we are not interested in
relativeDestination = relativeDestination.subpath(1, relativeDestination.getNameCount());
Path destination = destinationRoot.resolve(relativeDestination);
if (Files.isDirectory(source)) {
try {
Files.createDirectories(destination);
@ -933,9 +1003,6 @@ public class ElasticsearchNode implements TestClusterConfiguration {
.forEach(defaultConfig::remove);
try {
// We create hard links for the distribution, so we need to remove the config file before writing it
// to prevent the changes to reflect across all copies.
Files.delete(configFile);
Files.write(
configFile,
Stream.concat(
@ -944,8 +1011,21 @@ public class ElasticsearchNode implements TestClusterConfiguration {
)
.map(entry -> entry.getKey() + ": " + entry.getValue())
.collect(Collectors.joining("\n"))
.getBytes(StandardCharsets.UTF_8)
.getBytes(StandardCharsets.UTF_8),
StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE
);
final List<Path> configFiles;
try (Stream<Path> stream = Files.list(distroDir.resolve("config"))) {
configFiles = stream.collect(Collectors.toList());
}
logToProcessStdout("Copying additional config files from distro " + configFiles);
for (Path file : configFiles) {
Path dest = configFile.getParent().resolve(file.getFileName());
if (Files.exists(dest) == false) {
Files.copy(file, dest);
}
}
} catch (IOException e) {
throw new UncheckedIOException("Could not write config file: " + configFile, e);
}
@ -985,7 +1065,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private Path getExtractedDistributionDir() {
return Paths.get(distribution.getExtracted().toString()).resolve("elasticsearch-" + version);
return Paths.get(distributions.get(currentDistro).getExtracted().toString());
}
private List<File> getInstalledFileSet(Action<? super PatternFilterable> filter) {
@ -1020,19 +1100,26 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
@Classpath
private List<File> getDistributionClasspath() {
ArrayList<File> files = new ArrayList<>(project.fileTree(getExtractedDistributionDir())
.matching(filter -> filter.include("**/*.jar"))
.getFiles());
files.sort(Comparator.comparing(File::getName));
return files;
private Set<File> getDistributionClasspath() {
return getDistributionFiles(filter -> filter.include("**/*.jar"));
}
@InputFiles
@PathSensitive(PathSensitivity.RELATIVE)
private FileCollection getDistributionFiles() {
return project.fileTree(getExtractedDistributionDir()).minus(project.files(getDistributionClasspath()));
private Set<File> getDistributionFiles() {
return getDistributionFiles(filter -> filter.exclude("**/*.jar"));
}
private Set<File> getDistributionFiles(Action<PatternFilterable> patternFilter) {
Set<File> files = new TreeSet<>();
for (ElasticsearchDistribution distribution : distributions) {
files.addAll(
project.fileTree(Paths.get(distribution.getExtracted().toString()))
.matching(patternFilter)
.getFiles()
);
}
return files;
}
@Nested

View File

@ -38,6 +38,8 @@ public interface TestClusterConfiguration {
void setVersion(String version);
void setVersions(List<String> version);
void setTestDistribution(TestDistribution distribution);
void plugin(URI plugin);
@ -165,7 +167,7 @@ public interface TestClusterConfiguration {
default String safeName(String name) {
return name
.replaceAll("^[^a-zA-Z0-9]+", "")
.replaceAll("[^a-zA-Z0-9]+", "-");
.replaceAll("[^a-zA-Z0-9\\.]+", "-");
}
boolean isProcessAlive();

View File

@ -18,9 +18,9 @@ interface TestClustersAware extends Task {
);
}
for (ElasticsearchNode node : cluster.getNodes()) {
this.dependsOn(node.getDistribution().getExtracted());
}
cluster.getNodes().stream().flatMap(node -> node.getDistributions().stream()).forEach( distro ->
dependsOn(distro.getExtracted())
);
getClusters().add(cluster);
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.DistributionDownloadPlugin;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.ReaperPlugin;
import org.elasticsearch.gradle.ReaperService;
import org.gradle.api.NamedDomainObjectContainer;
@ -76,8 +75,6 @@ public class TestClustersPlugin implements Plugin<Project> {
}
private NamedDomainObjectContainer<ElasticsearchCluster> createTestClustersContainerExtension(Project project) {
NamedDomainObjectContainer<ElasticsearchDistribution> distros = DistributionDownloadPlugin.getContainer(project);
// Create an extensions that allows describing clusters
NamedDomainObjectContainer<ElasticsearchCluster> container = project.container(
ElasticsearchCluster.class,
@ -86,7 +83,6 @@ public class TestClustersPlugin implements Plugin<Project> {
name,
project,
reaper,
i -> distros.create(name + "-" + i),
new File(project.getBuildDir(), "testclusters")
)
);

View File

@ -50,12 +50,17 @@ public class DistributionDownloadPluginIT extends GradleIntegrationTestCase {
public void testBwc() throws Exception {
assertExtractedDistro("1.1.0", "archive", "linux", null, null,
"tests.local_distro.config", "linux-tar",
"tests.local_distro.config", "zip",
"tests.local_distro.project", ":distribution:bwc:minor",
"tests.current_version", "2.0.0");
}
public void testReleased() throws Exception {
doTestReleased("7.0.0", "/downloads/elasticsearch/elasticsearch-7.0.0-windows-x86_64.zip");
doTestReleased("6.5.0", "/downloads/elasticsearch/elasticsearch-6.5.0.zip");
}
private void doTestReleased(String version, String urlPath) throws IOException {
WireMockServer wireMock = new WireMockServer(0);
try {
final byte[] filebytes;
@ -63,12 +68,11 @@ public class DistributionDownloadPluginIT extends GradleIntegrationTestCase {
Files.newInputStream(Paths.get("src/testKit/distribution-download/distribution/files/fake_elasticsearch.zip"))) {
filebytes = stream.readAllBytes();
}
String urlPath = "/downloads/elasticsearch/elasticsearch-7.0.0-windows-x86_64.zip";
wireMock.stubFor(head(urlEqualTo(urlPath)).willReturn(aResponse().withStatus(200)));
wireMock.stubFor(get(urlEqualTo(urlPath)).willReturn(aResponse().withStatus(200).withBody(filebytes)));
wireMock.start();
assertExtractedDistro("7.0.0", "archive", "windows", null, null,
assertExtractedDistro(version, "archive", "windows", null, null,
"tests.download_service", wireMock.baseUrl());
} catch (Exception e) {
// for debugging

View File

@ -90,7 +90,7 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
public void testFlavorForIntegTest() {
assertDistroError(createProject(null),
"testdistro", "5.0.0", Type.INTEG_TEST_ZIP, null, Flavor.OSS, null,
"flavor not allowed for elasticsearch distribution [testdistro]");
"flavor [oss] not allowed for elasticsearch distribution [testdistro] of type [integ_test_zip]");
}
public void testBundledJdkDefault() {

View File

@ -19,9 +19,10 @@
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.testclusters.RestTestRunnerTask
apply plugin: 'elasticsearch.standalone-test'
apply plugin: 'elasticsearch.testclusters'
// This is a top level task which we will add dependencies to below.
// It is a single task that can be used to backcompat tests against all versions.
@ -30,65 +31,53 @@ task bwcTest {
group = 'verification'
}
for (Version version : bwcVersions.indexCompatible) {
String baseName = "v${version}"
for (Version bwcVersion : bwcVersions.indexCompatible) {
String baseName = "v${bwcVersion}"
Task oldClusterTest = tasks.create(name: "${baseName}#oldClusterTest", type: RestIntegTestTask) {
mustRunAfter(precommit)
}
tasks.getByName("${baseName}#oldClusterTestRunner").configure {
systemProperty 'tests.is_old_cluster', 'true'
systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT")
systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo/" + baseName)
}
Object extension = extensions.findByName("${baseName}#oldClusterTestCluster")
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
bwcVersion = version
numBwcNodes = 2
numNodes = 2
clusterName = 'full-cluster-restart'
// some tests rely on the translog not being flushed
setting 'indices.memory.shard_inactive_time', '20m'
if (version.onOrAfter('5.3.0')) {
testClusters {
"${baseName}" {
versions = [ bwcVersion.toString(), project.version ]
numberOfNodes = 2
// some tests rely on the translog not being flushed
setting 'indices.memory.shard_inactive_time', '20m'
setting 'http.content_type.required', 'true'
setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
javaHome = project.file(project.ext.runtimeJavaHome)
}
}
tasks.register("${baseName}#oldClusterTest", RestTestRunnerTask) {
useCluster testClusters."${baseName}"
mustRunAfter(precommit)
doFirst {
project.delete("${buildDir}/cluster/shared/repo/${baseName}")
}
Task upgradedClusterTest = tasks.create(name: "${baseName}#upgradedClusterTest", type: RestIntegTestTask) {
dependsOn(oldClusterTest, "${baseName}#oldClusterTestCluster#node0.stop")
systemProperty 'tests.is_old_cluster', 'true'
}
configure(extensions.findByName("${baseName}#upgradedClusterTestCluster")) {
dependsOn oldClusterTest,
"${baseName}#oldClusterTestCluster#node0.stop",
"${baseName}#oldClusterTestCluster#node1.stop"
clusterName = 'full-cluster-restart'
// some tests rely on the translog not being flushed
setting 'indices.memory.shard_inactive_time', '20m'
numNodes = 2
dataDir = { nodeNum -> oldClusterTest.nodes[nodeNum].dataDir }
cleanShared = false // We want to keep snapshots made by the old cluster!
}
tasks.getByName("${baseName}#upgradedClusterTestRunner").configure {
tasks.register("${baseName}#upgradedClusterTest", RestTestRunnerTask) {
useCluster testClusters."${baseName}"
dependsOn "${baseName}#oldClusterTest"
doFirst {
testClusters."${baseName}".goToNextVersion()
}
systemProperty 'tests.is_old_cluster', 'false'
systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT")
systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo")
}
Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") {
dependsOn = [upgradedClusterTest]
tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach {
it.systemProperty 'tests.old_cluster_version', bwcVersion.toString().minus("-SNAPSHOT")
it.systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }")
it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }")
}
if (project.bwc_tests_enabled) {
bwcTest.dependsOn(versionBwcTest)
bwcTest.dependsOn(
tasks.register("${baseName}#bwcTest") {
dependsOn tasks.named("${baseName}#upgradedClusterTest")
}
)
}
}
@ -116,4 +105,4 @@ task testJar(type: Jar) {
artifacts {
testArtifacts testJar
}
}

View File

@ -264,8 +264,6 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
Map<String, Object> clusterState = entityAsMap(client().performRequest(new Request("GET", "/_cluster/state")));
// Check some global properties:
String clusterName = (String) clusterState.get("cluster_name");
assertEquals("full-cluster-restart", clusterName);
String numberOfShards = (String) XContentMapValues.extractValue(
"metadata.templates.template_1.settings.index.number_of_shards", clusterState);
assertEquals("1", numberOfShards);