Testclusters: implement support to install plugins (#39116)
* methods to run bin script * Add support for specifying and installing plugins * Add OS specific distirbution support * Add test to verify plugin installed * Remove use of Gradle internal OperatingSystem
This commit is contained in:
parent
813351fe26
commit
0f85182abe
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch;
|
||||
|
||||
import org.elasticsearch.gradle.LoggedExec;
|
||||
import org.gradle.api.Action;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.file.CopySpec;
|
||||
|
@ -25,6 +26,7 @@ import org.gradle.api.file.FileCollection;
|
|||
import org.gradle.api.file.FileTree;
|
||||
import org.gradle.api.tasks.WorkResult;
|
||||
import org.gradle.process.ExecResult;
|
||||
import org.gradle.process.ExecSpec;
|
||||
import org.gradle.process.JavaExecSpec;
|
||||
|
||||
import java.io.File;
|
||||
|
@ -70,4 +72,8 @@ public class GradleServicesAdapter {
|
|||
public FileCollection fileTree(File dir) {
|
||||
return project.fileTree(dir);
|
||||
}
|
||||
|
||||
public void loggedExec(Action<ExecSpec> action) {
|
||||
LoggedExec.exec(project, action);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,23 +20,36 @@ package org.elasticsearch.gradle;
|
|||
|
||||
public enum Distribution {
|
||||
|
||||
INTEG_TEST("integ-test", "zip"),
|
||||
ZIP("elasticsearch", "zip"),
|
||||
ZIP_OSS("elasticsearch-oss", "zip");
|
||||
INTEG_TEST("integ-test"),
|
||||
DEFAULT("elasticsearch"),
|
||||
OSS("elasticsearch-oss");
|
||||
|
||||
private final String fileName;
|
||||
private final String fileExtension;
|
||||
|
||||
Distribution(String name, String fileExtension) {
|
||||
Distribution(String name) {
|
||||
this.fileName = name;
|
||||
this.fileExtension = fileExtension;
|
||||
}
|
||||
|
||||
public String getFileName() {
|
||||
public String getArtifactName() {
|
||||
return fileName;
|
||||
}
|
||||
|
||||
public String getFileExtension() {
|
||||
return fileExtension;
|
||||
if (this.equals(INTEG_TEST)) {
|
||||
return "zip";
|
||||
} else {
|
||||
return OS.conditionalString()
|
||||
.onUnix(() -> "tar.gz")
|
||||
.onWindows(() -> "zip")
|
||||
.supply();
|
||||
}
|
||||
}
|
||||
|
||||
public String getClassifier() {
|
||||
return OS.<String>conditional()
|
||||
.onLinux(() -> "linux-x86_64")
|
||||
.onWindows(() -> "windows-x86_64")
|
||||
.onMac(() -> "darwin-x86_64")
|
||||
.supply();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,90 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public enum OS {
|
||||
WINDOWS,
|
||||
MAC,
|
||||
LINUX;
|
||||
|
||||
public static OS current() {
|
||||
String os = System.getProperty("os.name", "");
|
||||
if (os.startsWith("Windows")) {
|
||||
return OS.WINDOWS;
|
||||
}
|
||||
if (os.startsWith("Linux") || os.startsWith("LINUX")) {
|
||||
return OS.LINUX;
|
||||
}
|
||||
if (os.startsWith("Mac")) {
|
||||
return OS.MAC;
|
||||
}
|
||||
throw new IllegalStateException("Can't determine OS from: " + os);
|
||||
}
|
||||
|
||||
public static class Conditional<T> {
|
||||
|
||||
private final Map<OS, Supplier<T>> conditions = new HashMap<>();
|
||||
|
||||
public Conditional<T> onWindows(Supplier<T> supplier) {
|
||||
conditions.put(WINDOWS, supplier);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Conditional<T> onLinux(Supplier<T> supplier) {
|
||||
conditions.put(LINUX, supplier);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Conditional<T> onMac(Supplier<T> supplier) {
|
||||
conditions.put(MAC, supplier);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Conditional<T> onUnix(Supplier<T> supplier) {
|
||||
conditions.put(MAC, supplier);
|
||||
conditions.put(LINUX, supplier);
|
||||
return this;
|
||||
}
|
||||
|
||||
public T supply() {
|
||||
HashSet<OS> missingOS = new HashSet<>(Arrays.asList(OS.values()));
|
||||
missingOS.removeAll(conditions.keySet());
|
||||
if (missingOS.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("No condition specified for " + missingOS);
|
||||
}
|
||||
return conditions.get(OS.current()).get();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static <T> Conditional<T> conditional() {
|
||||
return new Conditional<>();
|
||||
}
|
||||
|
||||
public static Conditional<String> conditionalString() {
|
||||
return conditional();
|
||||
}
|
||||
|
||||
}
|
|
@ -20,10 +20,10 @@ package org.elasticsearch.gradle.testclusters;
|
|||
|
||||
import org.elasticsearch.GradleServicesAdapter;
|
||||
import org.elasticsearch.gradle.Distribution;
|
||||
import org.elasticsearch.gradle.OS;
|
||||
import org.elasticsearch.gradle.Version;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
import org.gradle.internal.os.OperatingSystem;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
|
@ -31,11 +31,14 @@ import java.io.IOException;
|
|||
import java.io.InputStreamReader;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -65,7 +68,9 @@ public class ElasticsearchNode {
|
|||
private static final TimeUnit ES_DESTROY_TIMEOUT_UNIT = TimeUnit.SECONDS;
|
||||
private static final int NODE_UP_TIMEOUT = 30;
|
||||
private static final TimeUnit NODE_UP_TIMEOUT_UNIT = TimeUnit.SECONDS;
|
||||
|
||||
private final LinkedHashMap<String, Predicate<ElasticsearchNode>> waitConditions;
|
||||
private final List<URI> plugins = new ArrayList<>();
|
||||
|
||||
private final Path confPathRepo;
|
||||
private final Path configFile;
|
||||
|
@ -75,6 +80,7 @@ public class ElasticsearchNode {
|
|||
private final Path httpPortsFile;
|
||||
private final Path esStdoutFile;
|
||||
private final Path esStderrFile;
|
||||
private final Path tmpDir;
|
||||
|
||||
private Distribution distribution;
|
||||
private String version;
|
||||
|
@ -96,6 +102,7 @@ public class ElasticsearchNode {
|
|||
httpPortsFile = confPathLogs.resolve("http.ports");
|
||||
esStdoutFile = confPathLogs.resolve("es.stdout.log");
|
||||
esStderrFile = confPathLogs.resolve("es.stderr.log");
|
||||
tmpDir = workingDir.resolve("tmp");
|
||||
this.waitConditions = new LinkedHashMap<>();
|
||||
waitConditions.put("http ports file", node -> Files.exists(node.httpPortsFile));
|
||||
waitConditions.put("transport ports file", node -> Files.exists(node.transportPortFile));
|
||||
|
@ -126,6 +133,16 @@ public class ElasticsearchNode {
|
|||
this.distribution = distribution;
|
||||
}
|
||||
|
||||
public void plugin(URI plugin) {
|
||||
requireNonNull(plugin, "Plugin name can't be null");
|
||||
checkFrozen();
|
||||
this.plugins.add(plugin);
|
||||
}
|
||||
|
||||
public void plugin(File plugin) {
|
||||
plugin(plugin.toURI());
|
||||
}
|
||||
|
||||
public void freeze() {
|
||||
requireNonNull(distribution, "null distribution passed when configuring test cluster `" + this + "`");
|
||||
requireNonNull(version, "null version passed when configuring test cluster `" + this + "`");
|
||||
|
@ -166,12 +183,20 @@ public class ElasticsearchNode {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a stream of lines in the generated logs similar to Files.lines
|
||||
*
|
||||
* @return stream of log lines
|
||||
*/
|
||||
public Stream<String> logLines() throws IOException {
|
||||
return Files.lines(esStdoutFile, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
synchronized void start() {
|
||||
logger.info("Starting `{}`", this);
|
||||
|
||||
Path distroArtifact = artifactsExtractDir
|
||||
.resolve(distribution.getFileExtension())
|
||||
.resolve(distribution.getFileName() + "-" + getVersion());
|
||||
.resolve(distribution.getArtifactName() + "-" + getVersion());
|
||||
|
||||
if (Files.exists(distroArtifact) == false) {
|
||||
throw new TestClustersException("Can not start " + this + ", missing: " + distroArtifact);
|
||||
|
@ -183,34 +208,79 @@ public class ElasticsearchNode {
|
|||
spec.from(distroArtifact.resolve("config").toFile());
|
||||
spec.into(configFile.getParent());
|
||||
});
|
||||
configure();
|
||||
startElasticsearchProcess(distroArtifact);
|
||||
|
||||
try {
|
||||
createWorkingDir(distroArtifact);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
createConfiguration();
|
||||
|
||||
plugins.forEach(plugin -> runElaticsearchBinScript(
|
||||
"elasticsearch-plugin",
|
||||
"install", "--batch", plugin.toString())
|
||||
);
|
||||
|
||||
startElasticsearchProcess();
|
||||
}
|
||||
|
||||
private void startElasticsearchProcess(Path distroArtifact) {
|
||||
logger.info("Running `bin/elasticsearch` in `{}` for {}", workingDir, this);
|
||||
private void runElaticsearchBinScript(String tool, String... args) {
|
||||
services.loggedExec(spec -> {
|
||||
spec.setEnvironment(getESEnvironment());
|
||||
spec.workingDir(workingDir);
|
||||
spec.executable(
|
||||
OS.conditionalString()
|
||||
.onUnix(() -> "./bin/" + tool)
|
||||
.onWindows(() -> "cmd")
|
||||
.supply()
|
||||
);
|
||||
spec.args(
|
||||
OS.<List<String>>conditional()
|
||||
.onWindows(() -> {
|
||||
ArrayList<String> result = new ArrayList<>();
|
||||
result.add("/c");
|
||||
result.add("bin\\" + tool + ".bat");
|
||||
for (String arg : args) {
|
||||
result.add(arg);
|
||||
}
|
||||
return result;
|
||||
})
|
||||
.onUnix(() -> Arrays.asList(args))
|
||||
.supply()
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
private Map<String, String> getESEnvironment() {
|
||||
Map<String, String> environment= new HashMap<>();
|
||||
environment.put("JAVA_HOME", getJavaHome().getAbsolutePath());
|
||||
environment.put("ES_PATH_CONF", configFile.getParent().toString());
|
||||
environment.put("ES_JAVA_OPTS", "-Xms512m -Xmx512m");
|
||||
environment.put("ES_TMPDIR", tmpDir.toString());
|
||||
// Windows requires this as it defaults to `c:\windows` despite ES_TMPDIR
|
||||
|
||||
environment.put("TMP", tmpDir.toString());
|
||||
return environment;
|
||||
}
|
||||
|
||||
private void startElasticsearchProcess() {
|
||||
final ProcessBuilder processBuilder = new ProcessBuilder();
|
||||
if (OperatingSystem.current().isWindows()) {
|
||||
processBuilder.command(
|
||||
"cmd", "/c",
|
||||
distroArtifact.resolve("\\bin\\elasticsearch.bat").toAbsolutePath().toString()
|
||||
);
|
||||
} else {
|
||||
processBuilder.command(
|
||||
distroArtifact.resolve("bin/elasticsearch").toAbsolutePath().toString()
|
||||
);
|
||||
}
|
||||
|
||||
List<String> command = OS.<List<String>>conditional()
|
||||
.onUnix(() -> Arrays.asList("./bin/elasticsearch"))
|
||||
.onWindows(() -> Arrays.asList("cmd", "/c", "bin\\elasticsearch.bat"))
|
||||
.supply();
|
||||
processBuilder.command(command);
|
||||
processBuilder.directory(workingDir.toFile());
|
||||
Map<String, String> environment = processBuilder.environment();
|
||||
// Don't inherit anything from the environment for as that would lack reproducibility
|
||||
environment.clear();
|
||||
environment.putAll(getESEnvironment());
|
||||
// don't buffer all in memory, make sure we don't block on the default pipes
|
||||
processBuilder.redirectError(ProcessBuilder.Redirect.appendTo(esStderrFile.toFile()));
|
||||
processBuilder.redirectOutput(ProcessBuilder.Redirect.appendTo(esStdoutFile.toFile()));
|
||||
logger.info("Running `{}` in `{}` for {} env: {}", command, workingDir, this, environment);
|
||||
try {
|
||||
processBuilder.directory(workingDir.toFile());
|
||||
Map<String, String> environment = processBuilder.environment();
|
||||
// Don't inherit anything from the environment for as that would lack reproductability
|
||||
environment.clear();
|
||||
environment.put("JAVA_HOME", getJavaHome().getAbsolutePath());
|
||||
environment.put("ES_PATH_CONF", configFile.getParent().toAbsolutePath().toString());
|
||||
environment.put("ES_JAVA_OPTIONS", "-Xms512m -Xmx512m");
|
||||
// don't buffer all in memory, make sure we don't block on the default pipes
|
||||
processBuilder.redirectError(ProcessBuilder.Redirect.appendTo(esStderrFile.toFile()));
|
||||
processBuilder.redirectOutput(ProcessBuilder.Redirect.appendTo(esStdoutFile.toFile()));
|
||||
esProcess = processBuilder.start();
|
||||
} catch (IOException e) {
|
||||
throw new TestClustersException("Failed to start ES process for " + this, e);
|
||||
|
@ -311,16 +381,19 @@ public class ElasticsearchNode {
|
|||
}
|
||||
}
|
||||
|
||||
private void configure() {
|
||||
try {
|
||||
Files.createDirectories(configFile.getParent());
|
||||
Files.createDirectories(confPathRepo);
|
||||
Files.createDirectories(confPathData);
|
||||
Files.createDirectories(confPathLogs);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
private void createWorkingDir(Path distroExtractDir) throws IOException {
|
||||
services.sync(spec -> {
|
||||
spec.from(distroExtractDir.toFile());
|
||||
spec.into(workingDir.toFile());
|
||||
});
|
||||
Files.createDirectories(configFile.getParent());
|
||||
Files.createDirectories(confPathRepo);
|
||||
Files.createDirectories(confPathData);
|
||||
Files.createDirectories(confPathLogs);
|
||||
Files.createDirectories(tmpDir);
|
||||
}
|
||||
|
||||
private void createConfiguration() {
|
||||
LinkedHashMap<String, String> config = new LinkedHashMap<>();
|
||||
|
||||
String nodeName = safeName(name);
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.gradle.api.Plugin;
|
|||
import org.gradle.api.Project;
|
||||
import org.gradle.api.Task;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.artifacts.component.ComponentArtifactIdentifier;
|
||||
import org.gradle.api.execution.TaskActionListener;
|
||||
import org.gradle.api.execution.TaskExecutionListener;
|
||||
import org.gradle.api.file.FileTree;
|
||||
|
@ -91,20 +90,6 @@ public class TestClustersPlugin implements Plugin<Project> {
|
|||
"Internal helper configuration used by cluster configuration to download " +
|
||||
"ES distributions and plugins."
|
||||
);
|
||||
helperConfiguration.getIncoming().afterResolve(resolvableDependencies -> {
|
||||
Set<ComponentArtifactIdentifier> nonZipComponents = resolvableDependencies.getArtifacts()
|
||||
.getArtifacts()
|
||||
.stream()
|
||||
.filter(artifact -> artifact.getFile().getName().endsWith(".zip") == false)
|
||||
.map(artifact -> artifact.getId())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
if(nonZipComponents.isEmpty() == false) {
|
||||
throw new IllegalStateException("Dependencies with non-zip artifacts found in configuration '" +
|
||||
TestClustersPlugin.HELPER_CONFIGURATION_NAME + "': " + nonZipComponents
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// When running in the Daemon it's possible for this to hold references to past
|
||||
usedClusters.clear();
|
||||
|
@ -120,10 +105,18 @@ public class TestClustersPlugin implements Plugin<Project> {
|
|||
sync.from((Callable<List<FileTree>>) () ->
|
||||
helperConfiguration.getFiles()
|
||||
.stream()
|
||||
.map(project::zipTree)
|
||||
.map(file -> {
|
||||
if (file.getName().endsWith(".zip")) {
|
||||
return project.zipTree(file);
|
||||
} else if (file.getName().endsWith("tar.gz")) {
|
||||
return project.tarTree(file);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Can't extract " + file + " unknown file extension");
|
||||
}
|
||||
})
|
||||
.collect(Collectors.toList())
|
||||
);
|
||||
sync.into(new File(getTestClustersConfigurationExtractDir(project), "zip"));
|
||||
sync.into(getTestClustersConfigurationExtractDir(project));
|
||||
});
|
||||
|
||||
// When we know what tasks will run, we claim the clusters of those task to differentiate between clusters
|
||||
|
@ -302,9 +295,11 @@ public class TestClustersPlugin implements Plugin<Project> {
|
|||
project.afterEvaluate(ip -> container.forEach(esNode -> {
|
||||
// declare dependencies against artifacts needed by cluster formation.
|
||||
String dependency = String.format(
|
||||
"org.elasticsearch.distribution.zip:%s:%s@zip",
|
||||
esNode.getDistribution().getFileName(),
|
||||
esNode.getVersion()
|
||||
"unused:%s:%s:%s@%s",
|
||||
esNode.getDistribution().getArtifactName(),
|
||||
esNode.getVersion(),
|
||||
esNode.getDistribution().getClassifier(),
|
||||
esNode.getDistribution().getFileExtension()
|
||||
);
|
||||
logger.info("Cluster {} depends on {}", esNode.getName(), dependency);
|
||||
rootProject.getDependencies().add(HELPER_CONFIGURATION_NAME, dependency);
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.gradle.testfixtures;
|
|||
import com.avast.gradle.dockercompose.ComposeExtension;
|
||||
import com.avast.gradle.dockercompose.DockerComposePlugin;
|
||||
import com.avast.gradle.dockercompose.tasks.ComposeUp;
|
||||
import org.elasticsearch.gradle.OS;
|
||||
import org.elasticsearch.gradle.precommit.JarHellTask;
|
||||
import org.elasticsearch.gradle.precommit.TestingConventionsTasks;
|
||||
import org.elasticsearch.gradle.precommit.ThirdPartyAuditTask;
|
||||
|
@ -32,7 +33,6 @@ import org.gradle.api.plugins.BasePlugin;
|
|||
import org.gradle.api.plugins.ExtraPropertiesExtension;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.TaskContainer;
|
||||
import org.gradle.internal.os.OperatingSystem;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
@ -157,7 +157,7 @@ public class TestFixturesPlugin implements Plugin<Project> {
|
|||
|
||||
@Input
|
||||
public boolean dockerComposeSupported(Project project) {
|
||||
if (OperatingSystem.current().isWindows()) {
|
||||
if (OS.current().equals(OS.WINDOWS)) {
|
||||
return false;
|
||||
}
|
||||
final boolean hasDockerCompose = project.file("/usr/local/bin/docker-compose").exists() ||
|
||||
|
|
|
@ -21,11 +21,9 @@ package org.elasticsearch.gradle.testclusters;
|
|||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.testkit.runner.BuildResult;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
import org.junit.Ignore;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
@Ignore // Awaiting a fix in https://github.com/elastic/elasticsearch/issues/37889.
|
||||
public class TestClustersPluginIT extends GradleIntegrationTestCase {
|
||||
|
||||
public void testListClusters() {
|
||||
|
@ -152,6 +150,14 @@ public class TestClustersPluginIT extends GradleIntegrationTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
public void testPluginInstalled() {
|
||||
BuildResult result = getTestClustersRunner(":printLog").build();
|
||||
assertTaskSuccessful(result, ":printLog");
|
||||
assertStartedAndStoppedOnce(result);
|
||||
assertOutputContains(result.getOutput(), "-> Installed dummy");
|
||||
assertOutputContains(result.getOutput(), "loaded plugin [dummy]");
|
||||
}
|
||||
|
||||
private void assertNotStarted(BuildResult result) {
|
||||
assertOutputDoesNotContain(
|
||||
result.getOutput(),
|
||||
|
@ -178,4 +184,6 @@ public class TestClustersPluginIT extends GradleIntegrationTestCase {
|
|||
"Stopping `node{::myTestCluster}`"
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -5,6 +5,9 @@ plugins {
|
|||
|
||||
allprojects { all ->
|
||||
repositories {
|
||||
flatDir {
|
||||
dir System.getProperty("test.local-test-downloads-path")
|
||||
}
|
||||
maven {
|
||||
url System.getProperty("local.repo.path")
|
||||
}
|
||||
|
@ -14,6 +17,7 @@ allprojects { all ->
|
|||
url "http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision
|
||||
}
|
||||
}
|
||||
|
||||
jcenter()
|
||||
}
|
||||
|
||||
|
@ -22,9 +26,10 @@ allprojects { all ->
|
|||
|
||||
all.testClusters {
|
||||
myTestCluster {
|
||||
distribution = 'ZIP'
|
||||
distribution = 'DEFAULT'
|
||||
version = System.getProperty("test.version_under_test")
|
||||
javaHome = file(System.getProperty('java.home'))
|
||||
plugin file("${project(":dummyPlugin").buildDir}/distributions/dummy-${version}.zip")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -40,6 +45,19 @@ allprojects { all ->
|
|||
println "$path: Cluster running @ ${testClusters.myTestCluster.httpSocketURI}"
|
||||
}
|
||||
}
|
||||
syncTestClustersArtifacts {
|
||||
dependsOn ":dummyPlugin:bundlePlugin"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task printLog {
|
||||
useCluster testClusters.myTestCluster
|
||||
doFirst {
|
||||
println "$path: Cluster running @ ${testClusters.myTestCluster.httpSocketURI}"
|
||||
testClusters.myTestCluster.logLines().each {
|
||||
println it
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -77,6 +95,6 @@ task illegalConfigAlter {
|
|||
useCluster testClusters.myTestCluster
|
||||
doFirst {
|
||||
println "Going to alter configuration after use"
|
||||
testClusters.myTestCluster.distribution = 'ZIP_OSS'
|
||||
testClusters.myTestCluster.distribution = 'OSS'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
apply plugin: 'elasticsearch.esplugin'
|
||||
|
||||
version = System.getProperty("test.version_under_test")
|
||||
|
||||
esplugin {
|
||||
name 'dummy'
|
||||
description 'A dummy plugin used for testing'
|
||||
classname 'DummyPlugin'
|
||||
licenseFile rootProject.file('empty.txt')
|
||||
noticeFile rootProject.file('empty.txt')
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
public class DummyPlugin extends Plugin {
|
||||
|
||||
public DummyPlugin(final Settings settings, final Path configPath) {
|
||||
}
|
||||
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
include 'dummyPlugin'
|
||||
include ':alpha'
|
||||
include ':bravo'
|
||||
include ':charlie'
|
||||
include ':charlie'
|
||||
include 'dummyPlugin'
|
Loading…
Reference in New Issue