[Backport] Enable caching of rest tests which use integ-test distribution (#44181)

This commit is contained in:
Mark Vieira 2019-07-10 15:42:28 -07:00 committed by GitHub
parent 00b16e332d
commit 7c2e4b2857
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 795 additions and 163 deletions

View File

@ -60,7 +60,7 @@ class RestIntegTestTask extends DefaultTask {
Boolean includePackaged = false
RestIntegTestTask() {
runner = project.tasks.create("${name}Runner", Test.class)
runner = project.tasks.create("${name}Runner", RestTestRunnerTask.class)
super.dependsOn(runner)
clusterInit = project.tasks.create(name: "${name}Cluster#init", dependsOn: project.testClasses)
runner.dependsOn(clusterInit)
@ -78,10 +78,6 @@ class RestIntegTestTask extends DefaultTask {
runner.useCluster project.testClusters."$name"
}
// disable the build cache for rest test tasks
// there are a number of inputs we aren't properly tracking here so we'll just not cache these for now
runner.getOutputs().doNotCacheIf("Caching is disabled for REST integration tests", Specs.SATISFIES_ALL)
// override/add more for rest tests
runner.maxParallelForks = 1
runner.include('**/*IT.class')

View File

@ -0,0 +1,27 @@
package org.elasticsearch.gradle;
import java.util.List;
public abstract class AbstractLazyPropertyCollection {
final String name;
final Object owner;
public AbstractLazyPropertyCollection(String name) {
this(name, null);
}
public AbstractLazyPropertyCollection(String name, Object owner) {
this.name = name;
this.owner = owner;
}
abstract List<? extends Object> getNormalizedCollection();
void assertNotNull(Object value, String description) {
if (value == null) {
throw new NullPointerException(name + " " + description + " was null" + (owner != null ? " when configuring " + owner : ""));
}
}
}

View File

@ -0,0 +1,205 @@
package org.elasticsearch.gradle;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.Nested;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.function.Supplier;
import java.util.stream.Collectors;
public class LazyPropertyList<T> extends AbstractLazyPropertyCollection implements List<T> {
private final List<PropertyListEntry<T>> delegate = new ArrayList<>();
public LazyPropertyList(String name) {
super(name);
}
public LazyPropertyList(String name, Object owner) {
super(name, owner);
}
@Override
public int size() {
return delegate.size();
}
@Override
public boolean isEmpty() {
return delegate.isEmpty();
}
@Override
public boolean contains(Object o) {
return delegate.stream().anyMatch(entry -> entry.getValue().equals(o));
}
@Override
public Iterator<T> iterator() {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).iterator();
}
@Override
public Object[] toArray() {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).toArray();
}
@Override
public <T1> T1[] toArray(T1[] a) {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).collect(Collectors.toList()).toArray(a);
}
@Override
public boolean add(T t) {
return delegate.add(new PropertyListEntry<>(() -> t, PropertyNormalization.DEFAULT));
}
public boolean add(Supplier<T> supplier) {
return delegate.add(new PropertyListEntry<>(supplier, PropertyNormalization.DEFAULT));
}
public boolean add(Supplier<T> supplier, PropertyNormalization normalization) {
return delegate.add(new PropertyListEntry<>(supplier, normalization));
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support remove()");
}
@Override
public boolean containsAll(Collection<?> c) {
return delegate.stream().map(PropertyListEntry::getValue).collect(Collectors.toList()).containsAll(c);
}
@Override
public boolean addAll(Collection<? extends T> c) {
c.forEach(this::add);
return true;
}
@Override
public boolean addAll(int index, Collection<? extends T> c) {
int i = index;
for (T item : c) {
this.add(i++, item);
}
return true;
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support removeAll()");
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support retainAll()");
}
@Override
public void clear() {
delegate.clear();
}
@Override
public T get(int index) {
PropertyListEntry<T> entry = delegate.get(index);
validate(entry);
return entry.getValue();
}
@Override
public T set(int index, T element) {
return delegate.set(index, new PropertyListEntry<>(() -> element, PropertyNormalization.DEFAULT)).getValue();
}
@Override
public void add(int index, T element) {
delegate.add(index, new PropertyListEntry<>(() -> element, PropertyNormalization.DEFAULT));
}
@Override
public T remove(int index) {
return delegate.remove(index).getValue();
}
@Override
public int indexOf(Object o) {
for (int i = 0; i < delegate.size(); i++) {
if (delegate.get(i).getValue().equals(o)) {
return i;
}
}
return -1;
}
@Override
public int lastIndexOf(Object o) {
int lastIndex = -1;
for (int i = 0; i < delegate.size(); i++) {
if (delegate.get(i).getValue().equals(o)) {
lastIndex = i;
}
}
return lastIndex;
}
@Override
public ListIterator<T> listIterator() {
return delegate.stream().map(PropertyListEntry::getValue).collect(Collectors.toList()).listIterator();
}
@Override
public ListIterator<T> listIterator(int index) {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).collect(Collectors.toList()).listIterator(index);
}
@Override
public List<T> subList(int fromIndex, int toIndex) {
return delegate.stream()
.peek(this::validate)
.map(PropertyListEntry::getValue)
.collect(Collectors.toList())
.subList(fromIndex, toIndex);
}
@Override
@Nested
List<? extends Object> getNormalizedCollection() {
return delegate.stream()
.peek(this::validate)
.filter(entry -> entry.getNormalization() != PropertyNormalization.IGNORE_VALUE)
.collect(Collectors.toList());
}
private void validate(PropertyListEntry<T> entry) {
assertNotNull(entry.getValue(), "entry");
}
private class PropertyListEntry<T> {
private final Supplier<T> supplier;
private final PropertyNormalization normalization;
PropertyListEntry(Supplier<T> supplier, PropertyNormalization normalization) {
this.supplier = supplier;
this.normalization = normalization;
}
public PropertyNormalization getNormalization() {
return normalization;
}
@Input
public T getValue() {
assertNotNull(supplier, "supplier");
return supplier.get();
}
}
}

View File

@ -0,0 +1,167 @@
package org.elasticsearch.gradle;
import org.gradle.api.Named;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.Nested;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Supplier;
import java.util.stream.Collectors;
public class LazyPropertyMap<K, V> extends AbstractLazyPropertyCollection implements Map<K, V> {
private final Map<K, PropertyMapEntry<K, V>> delegate = new LinkedHashMap<>();
private final BiFunction<K, V, ?> normalizationMapper;
public LazyPropertyMap(String name) {
this(name, null);
}
public LazyPropertyMap(String name, Object owner) {
this(name, owner, null);
}
public LazyPropertyMap(String name, Object owner, BiFunction<K, V, ?> normalizationMapper) {
super(name, owner);
this.normalizationMapper = normalizationMapper;
}
@Override
public int size() {
return delegate.size();
}
@Override
public boolean isEmpty() {
return delegate.isEmpty();
}
@Override
public boolean containsKey(Object key) {
return delegate.containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return delegate.values().stream().map(PropertyMapEntry::getValue).anyMatch(v -> v.equals(value));
}
@Override
public V get(Object key) {
PropertyMapEntry<K, V> entry = delegate.get(key);
if (entry != null) {
V value = entry.getValue();
assertNotNull(value, "value for key '" + key + "'");
return value;
} else {
return null;
}
}
@Override
public V put(K key, V value) {
return put(key, value, PropertyNormalization.DEFAULT);
}
public V put(K key, V value, PropertyNormalization normalization) {
assertNotNull(value, "value for key '" + key + "'");
return put(key, () -> value, normalization);
}
public V put(K key, Supplier<V> supplier) {
return put(key, supplier, PropertyNormalization.DEFAULT);
}
public V put(K key, Supplier<V> supplier, PropertyNormalization normalization) {
assertNotNull(supplier, "supplier for key '" + key + "'");
PropertyMapEntry<K, V> previous = delegate.put(key, new PropertyMapEntry<>(key, supplier, normalization));
return previous == null ? null : previous.getValue();
}
@Override
public V remove(Object key) {
PropertyMapEntry<K, V> previous = delegate.remove(key);
return previous == null ? null : previous.getValue();
}
@Override
public void putAll(Map<? extends K, ? extends V> m) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not support putAll()");
}
@Override
public void clear() {
delegate.clear();
}
@Override
public Set<K> keySet() {
return delegate.keySet();
}
@Override
public Collection<V> values() {
return delegate.values().stream().peek(this::validate).map(PropertyMapEntry::getValue).collect(Collectors.toList());
}
@Override
public Set<Entry<K, V>> entrySet() {
return delegate.entrySet().stream()
.peek(this::validate)
.collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getValue())).entrySet();
}
@Override
@Nested
List<? extends Object> getNormalizedCollection() {
return delegate.values().stream()
.peek(this::validate)
.filter(entry -> entry.getNormalization() != PropertyNormalization.IGNORE_VALUE)
.map(entry -> normalizationMapper == null ? entry : normalizationMapper.apply(entry.getKey(), entry.getValue()))
.collect(Collectors.toList());
}
private void validate(Map.Entry<K, PropertyMapEntry<K, V>> entry) {
validate(entry.getValue());
}
private void validate(PropertyMapEntry<K, V> supplier) {
assertNotNull(supplier, "key '" + supplier.getKey() + "' supplier value");
}
private static class PropertyMapEntry<K, V> implements Named {
private final K key;
private final Supplier<V> value;
private final PropertyNormalization normalization;
PropertyMapEntry(K key, Supplier<V> value, PropertyNormalization normalization) {
this.key = key;
this.value = value;
this.normalization = normalization;
}
public PropertyNormalization getNormalization() {
return normalization;
}
@Override
public String getName() {
return getKey().toString();
}
@Input
public K getKey() {
return key;
}
@Input
public V getValue() {
return value.get();
}
}
}

View File

@ -0,0 +1,13 @@
package org.elasticsearch.gradle;
public enum PropertyNormalization {
/**
* Uses default strategy based on runtime property type.
*/
DEFAULT,
/**
* Ignores property value completely for the purposes of input snapshotting.
*/
IGNORE_VALUE
}

View File

@ -0,0 +1,37 @@
package org.elasticsearch.gradle.test;
import org.elasticsearch.gradle.testclusters.ElasticsearchCluster;
import org.gradle.api.tasks.CacheableTask;
import org.gradle.api.tasks.Nested;
import org.gradle.api.tasks.testing.Test;
import java.util.ArrayList;
import java.util.Collection;
import static org.elasticsearch.gradle.Distribution.INTEG_TEST;
/**
* Customized version of Gradle {@link Test} task which tracks a collection of {@link ElasticsearchCluster} as a task input. We must do this
* as a custom task type because the current {@link org.gradle.api.tasks.TaskInputs} runtime API does not have a way to register
* {@link Nested} inputs.
*/
@CacheableTask
public class RestTestRunnerTask extends Test {
private Collection<ElasticsearchCluster> clusters = new ArrayList<>();
public RestTestRunnerTask() {
super();
this.getOutputs().doNotCacheIf("Build cache is only enabled for tests against clusters using the 'integ-test' distribution",
task -> clusters.stream().flatMap(c -> c.getNodes().stream()).anyMatch(n -> n.getDistribution() != INTEG_TEST));
}
@Nested
public Collection<ElasticsearchCluster> getClusters() {
return clusters;
}
public void testCluster(ElasticsearchCluster cluster) {
this.clusters.add(cluster);
}
}

View File

@ -18,15 +18,17 @@
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.GradleServicesAdapter;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.PropertyNormalization;
import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.http.WaitForHttpResource;
import org.gradle.api.Named;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Nested;
import java.io.File;
import java.io.IOException;
@ -47,7 +49,7 @@ import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.stream.Collectors;
public class ElasticsearchCluster implements TestClusterConfiguration {
public class ElasticsearchCluster implements TestClusterConfiguration, Named {
private static final Logger LOGGER = Logging.getLogger(ElasticsearchNode.class);
private static final int CLUSTER_UP_TIMEOUT = 40;
@ -60,19 +62,19 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
private final File workingDirBase;
private final File artifactsExtractDir;
private final LinkedHashMap<String, Predicate<TestClusterConfiguration>> waitConditions = new LinkedHashMap<>();
private final GradleServicesAdapter services;
private final Project project;
public ElasticsearchCluster(String path, String clusterName, Project project, File artifactsExtractDir, File workingDirBase) {
this.path = path;
this.clusterName = clusterName;
this.project = project;
this.workingDirBase = workingDirBase;
this.artifactsExtractDir = artifactsExtractDir;
this.services = GradleServicesAdapter.getInstance(project);
this.nodes = project.container(ElasticsearchNode.class);
this.nodes.add(
new ElasticsearchNode(
path, clusterName + "-0",
services, artifactsExtractDir, workingDirBase
project, artifactsExtractDir, workingDirBase
)
);
// configure the cluster name eagerly so nodes know about it
@ -96,7 +98,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
for (int i = nodes.size() ; i < numberOfNodes; i++) {
this.nodes.add(new ElasticsearchNode(
path, clusterName + "-" + i, services, artifactsExtractDir, workingDirBase
path, clusterName + "-" + i, project, artifactsExtractDir, workingDirBase
));
}
}
@ -153,6 +155,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
nodes.all(each -> each.keystore(key, value));
}
@Override
public void keystore(String key, File value, PropertyNormalization normalization) {
nodes.all(each -> each.keystore(key, value, normalization));
}
@Override
public void keystore(String key, FileSupplier valueSupplier) {
nodes.all(each -> each.keystore(key, valueSupplier));
@ -163,11 +170,21 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
nodes.all(each -> each.setting(key, value));
}
@Override
public void setting(String key, String value, PropertyNormalization normalization) {
nodes.all(each -> each.setting(key, value, normalization));
}
@Override
public void setting(String key, Supplier<CharSequence> valueSupplier) {
nodes.all(each -> each.setting(key, valueSupplier));
}
@Override
public void setting(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
nodes.all(each -> each.setting(key, valueSupplier, normalization));
}
@Override
public void systemProperty(String key, String value) {
nodes.all(each -> each.systemProperty(key, value));
@ -178,6 +195,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
nodes.all(each -> each.systemProperty(key, valueSupplier));
}
@Override
public void systemProperty(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
nodes.all(each -> each.systemProperty(key, valueSupplier, normalization));
}
@Override
public void environment(String key, String value) {
nodes.all(each -> each.environment(key, value));
@ -189,13 +211,13 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
}
@Override
public void jvmArgs(String... values) {
nodes.all(each -> each.jvmArgs(values));
public void environment(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
nodes.all(each -> each.environment(key, valueSupplier, normalization));
}
@Override
public void jvmArgs(Supplier<String[]> valueSupplier) {
nodes.all(each -> each.jvmArgs(valueSupplier));
public void jvmArgs(String... values) {
nodes.all(each -> each.jvmArgs(values));
}
@Override
@ -246,6 +268,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
nodes.all(node -> node.extraConfigFile(destination, from));
}
@Override
public void extraConfigFile(String destination, File from, PropertyNormalization normalization) {
nodes.all(node -> node.extraConfigFile(destination, from, normalization));
}
@Override
public void user(Map<String, String> userSpec) {
nodes.all(node -> node.user(userSpec));
@ -356,6 +383,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration {
});
}
@Nested
public NamedDomainObjectContainer<ElasticsearchNode> getNodes() {
return nodes;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;

View File

@ -18,14 +18,29 @@
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.GradleServicesAdapter;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.LazyPropertyList;
import org.elasticsearch.gradle.LazyPropertyMap;
import org.elasticsearch.gradle.LoggedExec;
import org.elasticsearch.gradle.OS;
import org.elasticsearch.gradle.PropertyNormalization;
import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.http.WaitForHttpResource;
import org.gradle.api.Action;
import org.gradle.api.Named;
import org.gradle.api.Project;
import org.gradle.api.file.FileCollection;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Classpath;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFile;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.Nested;
import org.gradle.api.tasks.PathSensitive;
import org.gradle.api.tasks.PathSensitivity;
import org.gradle.api.tasks.util.PatternFilterable;
import java.io.ByteArrayInputStream;
import java.io.File;
@ -41,8 +56,8 @@ import java.nio.file.StandardOpenOption;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
@ -78,24 +93,23 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private final String path;
private final String name;
private final GradleServicesAdapter services;
private final Project project;
private final AtomicBoolean configurationFrozen = new AtomicBoolean(false);
private final Path artifactsExtractDir;
private final Path workingDir;
private final LinkedHashMap<String, Predicate<TestClusterConfiguration>> waitConditions = new LinkedHashMap<>();
private final List<URI> plugins = new ArrayList<>();
private final List<File> modules = new ArrayList<>();
private final Map<String, Supplier<CharSequence>> settings = new LinkedHashMap<>();
private final Map<String, Supplier<CharSequence>> keystoreSettings = new LinkedHashMap<>();
private final Map<String, FileSupplier> keystoreFiles = new LinkedHashMap<>();
private final Map<String, Supplier<CharSequence>> systemProperties = new LinkedHashMap<>();
private final Map<String, Supplier<CharSequence>> environment = new LinkedHashMap<>();
private final List<Supplier<List<CharSequence>>> jvmArgs = new ArrayList<>();
private final Map<String, File> extraConfigFiles = new HashMap<>();
final LinkedHashMap<String, String> defaultConfig = new LinkedHashMap<>();
private final LazyPropertyMap<String, CharSequence> settings = new LazyPropertyMap<>("Settings", this);
private final LazyPropertyMap<String, CharSequence> keystoreSettings = new LazyPropertyMap<>("Keystore", this);
private final LazyPropertyMap<String, File> keystoreFiles = new LazyPropertyMap<>("Keystore files", this, FileEntry::new);
private final LazyPropertyMap<String, CharSequence> systemProperties = new LazyPropertyMap<>("System properties", this);
private final LazyPropertyMap<String, CharSequence> environment = new LazyPropertyMap<>("Environment", this);
private final LazyPropertyList<CharSequence> jvmArgs = new LazyPropertyList<>("JVM arguments", this);
private final LazyPropertyMap<String, File> extraConfigFiles = new LazyPropertyMap<>("Extra config files", this, FileEntry::new);
private final List<Map<String, String>> credentials = new ArrayList<>();
final LinkedHashMap<String, String> defaultConfig = new LinkedHashMap<>();
private final Path confPathRepo;
private final Path configFile;
@ -114,10 +128,10 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private Function<String, String> nameCustomization = Function.identity();
private boolean isWorkingDirConfigured = false;
ElasticsearchNode(String path, String name, GradleServicesAdapter services, File artifactsExtractDir, File workingDirBase) {
ElasticsearchNode(String path, String name, Project project, File artifactsExtractDir, File workingDirBase) {
this.path = path;
this.name = name;
this.services = services;
this.project = project;
this.artifactsExtractDir = artifactsExtractDir.toPath();
this.workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath();
confPathRepo = workingDir.resolve("repo");
@ -136,6 +150,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
return nameCustomization.apply(name);
}
@Input
public String getVersion() {
return version;
}
@ -147,6 +162,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
this.version = version;
}
@Input
public Distribution getDistribution() {
return distribution;
}
@ -177,88 +193,81 @@ public class ElasticsearchNode implements TestClusterConfiguration {
@Override
public void keystore(String key, String value) {
addSupplier("Keystore", keystoreSettings, key, value);
keystoreSettings.put(key, value);
}
@Override
public void keystore(String key, Supplier<CharSequence> valueSupplier) {
addSupplier("Keystore", keystoreSettings, key, valueSupplier);
keystoreSettings.put(key, valueSupplier);
}
@Override
public void keystore(String key, File value) {
requireNonNull(value, "keystore value was null when configuring test cluster`" + this + "`");
keystore(key, () -> value);
keystoreFiles.put(key, value);
}
@Override
public void keystore(String key, File value, PropertyNormalization normalization) {
keystoreFiles.put(key, value, normalization);
}
@Override
public void keystore(String key, FileSupplier valueSupplier) {
requireNonNull(key, "Keystore" + " key was null when configuring test cluster `" + this + "`");
requireNonNull(valueSupplier, "Keystore" + " value supplier was null when configuring test cluster `" + this + "`");
keystoreFiles.put(key, valueSupplier);
}
@Override
public void setting(String key, String value) {
addSupplier("Settings", settings, key, value);
settings.put(key, value);
}
@Override
public void setting(String key, String value, PropertyNormalization normalization) {
settings.put(key, value, normalization);
}
@Override
public void setting(String key, Supplier<CharSequence> valueSupplier) {
addSupplier("Setting", settings, key, valueSupplier);
settings.put(key, valueSupplier);
}
@Override
public void setting(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
settings.put(key, valueSupplier, normalization);
}
@Override
public void systemProperty(String key, String value) {
addSupplier("Java System property", systemProperties, key, value);
systemProperties.put(key, value);
}
@Override
public void systemProperty(String key, Supplier<CharSequence> valueSupplier) {
addSupplier("Java System property", systemProperties, key, valueSupplier);
systemProperties.put(key, valueSupplier);
}
@Override
public void systemProperty(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
systemProperties.put(key, valueSupplier, normalization);
}
@Override
public void environment(String key, String value) {
addSupplier("Environment variable", environment, key, value);
environment.put(key, value);
}
@Override
public void environment(String key, Supplier<CharSequence> valueSupplier) {
addSupplier("Environment variable", environment, key, valueSupplier);
environment.put(key, valueSupplier);
}
@Override
public void environment(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization) {
environment.put(key, valueSupplier, normalization);
}
public void jvmArgs(String... values) {
for (String value : values) {
requireNonNull(value, "jvm argument was null when configuring test cluster `" + this + "`");
}
jvmArgs.add(() -> Arrays.asList(values));
}
public void jvmArgs(Supplier<String[]> valueSupplier) {
requireNonNull(valueSupplier, "jvm argument supplier was null when configuring test cluster `" + this + "`");
jvmArgs.add(() -> Arrays.asList(valueSupplier.get()));
}
private void addSupplier(String name, Map<String, Supplier<CharSequence>> collector, String key, Supplier<CharSequence> valueSupplier) {
requireNonNull(key, name + " key was null when configuring test cluster `" + this + "`");
requireNonNull(valueSupplier, name + " value supplier was null when configuring test cluster `" + this + "`");
collector.put(key, valueSupplier);
}
private void addSupplier(String name, Map<String, Supplier<CharSequence>> collector, String key, String actualValue) {
requireNonNull(actualValue, name + " value was null when configuring test cluster `" + this + "`");
addSupplier(name, collector, key, () -> actualValue);
}
private void checkSuppliers(String name, Collection<Supplier<CharSequence>> collector) {
collector.forEach(suplier ->
requireNonNull(
suplier.get().toString(),
name + " supplied value was null when configuring test cluster `" + this + "`"
)
);
jvmArgs.addAll(Arrays.asList(values));
}
public Path getConfigDir() {
@ -301,15 +310,11 @@ public class ElasticsearchNode implements TestClusterConfiguration {
public synchronized void start() {
LOGGER.info("Starting `{}`", this);
Path distroArtifact = artifactsExtractDir
.resolve(distribution.getGroup())
.resolve("elasticsearch-" + getVersion());
if (Files.exists(distroArtifact) == false) {
throw new TestClustersException("Can not start " + this + ", missing: " + distroArtifact);
if (Files.exists(getExtractedDistributionDir()) == false) {
throw new TestClustersException("Can not start " + this + ", missing: " + getExtractedDistributionDir());
}
if (Files.isDirectory(distroArtifact) == false) {
throw new TestClustersException("Can not start " + this + ", is not a directory: " + distroArtifact);
if (Files.isDirectory(getExtractedDistributionDir()) == false) {
throw new TestClustersException("Can not start " + this + ", is not a directory: " + getExtractedDistributionDir());
}
try {
@ -317,14 +322,14 @@ public class ElasticsearchNode implements TestClusterConfiguration {
logToProcessStdout("Configuring working directory: " + workingDir);
// Only configure working dir once so we don't loose data on restarts
isWorkingDirConfigured = true;
createWorkingDir(distroArtifact);
createWorkingDir(getExtractedDistributionDir());
}
} catch (IOException e) {
throw new UncheckedIOException("Failed to create working directory for " + this, e);
}
createConfiguration();
if(plugins.isEmpty() == false) {
if (plugins.isEmpty() == false) {
logToProcessStdout("Installing " + plugins.size() + " plugins");
plugins.forEach(plugin -> runElaticsearchBinScript(
"elasticsearch-plugin",
@ -336,13 +341,12 @@ public class ElasticsearchNode implements TestClusterConfiguration {
logToProcessStdout("Adding " + keystoreSettings.size() + " keystore settings and " + keystoreFiles.size() + " keystore files");
runElaticsearchBinScript("elasticsearch-keystore", "create");
checkSuppliers("Keystore", keystoreSettings.values());
keystoreSettings.forEach((key, value) ->
runElaticsearchBinScriptWithInput(value.get().toString(), "elasticsearch-keystore", "add", "-x", key)
runElaticsearchBinScriptWithInput(value.toString(), "elasticsearch-keystore", "add", "-x", key)
);
for (Map.Entry<String, FileSupplier> entry : keystoreFiles.entrySet()) {
File file = entry.getValue().get();
for (Map.Entry<String, File> entry : keystoreFiles.entrySet()) {
File file = entry.getValue();
requireNonNull(file, "supplied keystoreFile was null when configuring " + this);
if (file.exists() == false) {
throw new TestClustersException("supplied keystore file " + file + " does not exist, require for " + this);
@ -362,9 +366,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
credentials.forEach(paramMap -> runElaticsearchBinScript(
"elasticsearch-users",
paramMap.entrySet().stream()
.flatMap(entry -> Stream.of(entry.getKey(), entry.getValue()))
.toArray(String[]::new)
paramMap.entrySet().stream()
.flatMap(entry -> Stream.of(entry.getKey(), entry.getValue()))
.toArray(String[]::new)
));
}
@ -402,7 +406,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private boolean isSettingMissingOrTrue(String name) {
return Boolean.valueOf(settings.getOrDefault(name, () -> "false").get().toString());
return Boolean.valueOf(settings.getOrDefault(name, "false").toString());
}
private void copyExtraConfigFiles() {
@ -410,19 +414,19 @@ public class ElasticsearchNode implements TestClusterConfiguration {
logToProcessStdout("Setting up " + extraConfigFiles.size() + " additional config files");
}
extraConfigFiles.forEach((destination, from) -> {
if (Files.exists(from.toPath()) == false) {
throw new TestClustersException("Can't create extra config file from " + from + " for " + this +
" as it does not exist");
}
Path dst = configFile.getParent().resolve(destination);
try {
Files.createDirectories(dst.getParent());
Files.copy(from.toPath(), dst, StandardCopyOption.REPLACE_EXISTING);
LOGGER.info("Added extra config file {} for {}", destination, this);
} catch (IOException e) {
throw new UncheckedIOException("Can't create extra config file for", e);
}
});
if (Files.exists(from.toPath()) == false) {
throw new TestClustersException("Can't create extra config file from " + from + " for " + this +
" as it does not exist");
}
Path dst = configFile.getParent().resolve(destination);
try {
Files.createDirectories(dst.getParent());
Files.copy(from.toPath(), dst, StandardCopyOption.REPLACE_EXISTING);
LOGGER.info("Added extra config file {} for {}", destination, this);
} catch (IOException e) {
throw new UncheckedIOException("Can't create extra config file for", e);
}
});
}
private void installModules() {
@ -433,9 +437,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
// only install modules that are not already bundled with the integ-test distribution
if (Files.exists(destination) == false) {
services.copy(spec -> {
project.copy(spec -> {
if (module.getName().toLowerCase().endsWith(".zip")) {
spec.from(services.zipTree(module));
spec.from(project.zipTree(module));
} else if (module.isDirectory()) {
spec.from(module);
} else {
@ -460,6 +464,15 @@ public class ElasticsearchNode implements TestClusterConfiguration {
extraConfigFiles.put(destination, from);
}
@Override
public void extraConfigFile(String destination, File from, PropertyNormalization normalization) {
if (destination.contains("..")) {
throw new IllegalArgumentException("extra config file destination can't be relative, was " + destination +
" for " + this);
}
extraConfigFiles.put(destination, from, normalization);
}
@Override
public void user(Map<String, String> userSpec) {
Set<String> keys = new HashSet<>(userSpec.keySet());
@ -469,9 +482,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
if (keys.isEmpty() == false) {
throw new TestClustersException("Unknown keys in user definition " + keys + " for " + this);
}
Map<String,String> cred = new LinkedHashMap<>();
cred.put("useradd", userSpec.getOrDefault("username","test_user"));
cred.put("-p", userSpec.getOrDefault("password","x-pack-test-password"));
Map<String, String> cred = new LinkedHashMap<>();
cred.put("useradd", userSpec.getOrDefault("username", "test_user"));
cred.put("-p", userSpec.getOrDefault("password", "x-pack-test-password"));
cred.put("-r", userSpec.getOrDefault("role", "superuser"));
credentials.add(cred);
}
@ -485,7 +498,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
"Is this the distribution you expect it to be ?");
}
try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) {
services.loggedExec(spec -> {
LoggedExec.exec(project, spec -> {
spec.setEnvironment(getESEnvironment());
spec.workingDir(workingDir);
spec.executable(
@ -526,19 +539,14 @@ public class ElasticsearchNode implements TestClusterConfiguration {
defaultEnv.put("ES_PATH_CONF", configFile.getParent().toString());
String systemPropertiesString = "";
if (systemProperties.isEmpty() == false) {
checkSuppliers("Java System property", systemProperties.values());
systemPropertiesString = " " + systemProperties.entrySet().stream()
.map(entry -> "-D" + entry.getKey() + "=" + entry.getValue().get())
.map(entry -> "-D" + entry.getKey() + "=" + entry.getValue())
.collect(Collectors.joining(" "));
}
String jvmArgsString = "";
if (jvmArgs.isEmpty() == false) {
jvmArgsString = " " + jvmArgs.stream()
.map(Supplier::get)
.peek(charSequences -> requireNonNull(charSequences, "Jvm argument supplier returned null while configuring " + this))
.flatMap(Collection::stream)
.peek(argument -> {
requireNonNull(argument, "Jvm argument supplier returned null while configuring " + this);
if (argument.toString().startsWith("-D")) {
throw new TestClustersException("Invalid jvm argument `" + argument +
"` configure as systemProperty instead for " + this
@ -562,8 +570,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
);
}
checkSuppliers("Environment variable", environment.values());
environment.forEach((key, value) -> defaultEnv.put(key, value.get().toString()));
environment.forEach((key, value) -> defaultEnv.put(key, value.toString()));
return defaultEnv;
}
@ -687,7 +694,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private void logFileContents(String description, Path from) {
LOGGER.error("{} `{}`", description, this);
try(Stream<String> lines = Files.lines(from, StandardCharsets.UTF_8)) {
try (Stream<String> lines = Files.lines(from, StandardCharsets.UTF_8)) {
lines
.map(line -> " " + line)
.forEach(LOGGER::error);
@ -723,12 +730,12 @@ public class ElasticsearchNode implements TestClusterConfiguration {
* We remove write permissions to make sure files are note mistakenly edited ( e.x. the config file ) and changes
* reflected across all copies. Permissions are retained to be able to replace the links.
*
* @param sourceRoot where to copy from
* @param sourceRoot where to copy from
* @param destinationRoot destination to link to
*/
private void syncWithLinks(Path sourceRoot, Path destinationRoot) {
if (Files.exists(destinationRoot)) {
services.delete(destinationRoot);
project.delete(destinationRoot);
}
try (Stream<Path> stream = Files.walk(sourceRoot)) {
@ -761,7 +768,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
}
private void createConfiguration() {
private void createConfiguration() {
String nodeName = nameCustomization.apply(safeName(name));
if (nodeName != null) {
defaultConfig.put("node.name", nodeName);
@ -790,24 +797,21 @@ public class ElasticsearchNode implements TestClusterConfiguration {
// over and the REST client will not retry on circuit breaking exceptions yet (see #31986 for details). Once the REST client
// can retry on circuit breaking exceptions, we can revert again to the default configuration.
if (Version.fromString(version).getMajor() >= 7) {
defaultConfig.put("indices.breaker.total.use_real_memory", "false");
defaultConfig.put("indices.breaker.total.use_real_memory", "false");
}
// Don't wait for state, just start up quickly. This will also allow new and old nodes in the BWC case to become the master
defaultConfig.put("discovery.initial_state_timeout", "0s");
defaultConfig.put("discovery.initial_state_timeout", "0s");
checkSuppliers("Settings", settings.values());
Map<String, String> userConfig = settings.entrySet().stream()
.collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue().get().toString()));
HashSet<String> overriden = new HashSet<>(defaultConfig.keySet());
overriden.retainAll(userConfig.keySet());
overriden.retainAll(settings.keySet());
overriden.removeAll(OVERRIDABLE_SETTINGS);
if (overriden.isEmpty() ==false) {
if (overriden.isEmpty() == false) {
throw new IllegalArgumentException(
"Testclusters does not allow the following settings to be changed:" + overriden + " for " + this
);
}
// Make sure no duplicate config keys
userConfig.keySet().stream()
settings.keySet().stream()
.filter(OVERRIDABLE_SETTINGS::contains)
.forEach(defaultConfig::remove);
@ -818,7 +822,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
Files.write(
configFile,
Stream.concat(
userConfig.entrySet().stream(),
settings.entrySet().stream(),
defaultConfig.entrySet().stream()
)
.map(entry -> entry.getKey() + ": " + entry.getValue())
@ -833,7 +837,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private void checkFrozen() {
if (configurationFrozen.get()) {
throw new IllegalStateException("Configuration for " + this + " can not be altered, already locked");
throw new IllegalStateException("Configuration for " + this + " can not be altered, already locked");
}
}
@ -858,11 +862,97 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private List<String> readPortsFile(Path file) throws IOException {
try(Stream<String> lines = Files.lines(file, StandardCharsets.UTF_8)) {
try (Stream<String> lines = Files.lines(file, StandardCharsets.UTF_8)) {
return lines.map(String::trim).collect(Collectors.toList());
}
}
private Path getExtractedDistributionDir() {
return artifactsExtractDir.resolve(distribution.getGroup()).resolve("elasticsearch-" + getVersion());
}
private List<File> getInstalledFileSet(Action<? super PatternFilterable> filter) {
return Stream.concat(
plugins.stream().filter(uri -> uri.getScheme().equalsIgnoreCase("file")).map(File::new),
modules.stream()
)
.filter(File::exists)
// TODO: We may be able to simplify this with Gradle 5.6
// https://docs.gradle.org/nightly/release-notes.html#improved-handling-of-zip-archives-on-classpaths
.map(zipFile -> project.zipTree(zipFile).matching(filter))
.flatMap(tree -> tree.getFiles().stream())
.sorted(Comparator.comparing(File::getName))
.collect(Collectors.toList());
}
@Input
private Set<URI> getRemotePlugins() {
Set<URI> file = plugins.stream().filter(uri -> uri.getScheme().equalsIgnoreCase("file") == false).collect(Collectors.toSet());
return file;
}
@Classpath
private List<File> getInstalledClasspath() {
return getInstalledFileSet(filter -> filter.include("**/*.jar"));
}
@InputFiles
@PathSensitive(PathSensitivity.RELATIVE)
private List<File> getInstalledFiles() {
return getInstalledFileSet(filter -> filter.exclude("**/*.jar"));
}
@Classpath
private List<File> getDistributionClasspath() {
ArrayList<File> files = new ArrayList<>(project.fileTree(getExtractedDistributionDir())
.matching(filter -> filter.include("**/*.jar"))
.getFiles());
files.sort(Comparator.comparing(File::getName));
return files;
}
@InputFiles
@PathSensitive(PathSensitivity.RELATIVE)
private FileCollection getDistributionFiles() {
return project.fileTree(getExtractedDistributionDir()).minus(project.files(getDistributionClasspath()));
}
@Nested
private Map<String, CharSequence> getKeystoreSettings() {
return keystoreSettings;
}
@Nested
private Map<String, File> getKeystoreFiles() {
return keystoreFiles;
}
@Nested
private Map<String, CharSequence> getSettings() {
return settings;
}
@Nested
private Map<String, CharSequence> getSystemProperties() {
return systemProperties;
}
@Nested
private Map<String, CharSequence> getEnvironment() {
return environment;
}
@Nested
private List<CharSequence> getJvmArgs() {
return jvmArgs;
}
@Nested
private Map<String, File> getExtraConfigFiles() {
return extraConfigFiles;
}
@Override
public boolean isProcessAlive() {
requireNonNull(
@ -882,9 +972,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
ADDITIONAL_CONFIG_TIMEOUT_UNIT.toMillis(ADDITIONAL_CONFIG_TIMEOUT *
(
plugins.size() +
keystoreFiles.size() +
keystoreSettings.size() +
credentials.size()
keystoreFiles.size() +
keystoreSettings.size() +
credentials.size()
)
),
TimeUnit.MILLISECONDS,
@ -911,6 +1001,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
return "node{" + path + ":" + name + "}";
}
@Input
List<Map<String, String>> getCredentials() {
return credentials;
}
@ -930,7 +1021,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
public boolean isHttpSslEnabled() {
return Boolean.valueOf(
settings.getOrDefault("xpack.security.http.ssl.enabled", () -> "false").get().toString()
settings.getOrDefault("xpack.security.http.ssl.enabled", "false").toString()
);
}
@ -938,28 +1029,50 @@ public class ElasticsearchNode implements TestClusterConfiguration {
if (settings.containsKey("xpack.security.http.ssl.certificate_authorities")) {
wait.setCertificateAuthorities(
getConfigDir()
.resolve(settings.get("xpack.security.http.ssl.certificate_authorities").get().toString())
.resolve(settings.get("xpack.security.http.ssl.certificate_authorities").toString())
.toFile()
);
}
if (settings.containsKey("xpack.security.http.ssl.certificate")) {
wait.setCertificateAuthorities(
getConfigDir()
.resolve(settings.get("xpack.security.http.ssl.certificate").get().toString())
.resolve(settings.get("xpack.security.http.ssl.certificate").toString())
.toFile()
);
}
if (settings.containsKey("xpack.security.http.ssl.keystore.path")) {
wait.setTrustStoreFile(
getConfigDir()
.resolve(settings.get("xpack.security.http.ssl.keystore.path").get().toString())
.resolve(settings.get("xpack.security.http.ssl.keystore.path").toString())
.toFile()
);
}
if (keystoreSettings.containsKey("xpack.security.http.ssl.keystore.secure_password")) {
wait.setTrustStorePassword(
keystoreSettings.get("xpack.security.http.ssl.keystore.secure_password").get().toString()
keystoreSettings.get("xpack.security.http.ssl.keystore.secure_password").toString()
);
}
}
private static class FileEntry implements Named {
private String name;
private File file;
FileEntry(String name, File file) {
this.name = name;
this.file = file;
}
@Input
@Override
public String getName() {
return name;
}
@InputFile
@PathSensitive(PathSensitivity.NONE)
public File getFile() {
return file;
}
}
}

View File

@ -20,6 +20,7 @@ package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.Distribution;
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.PropertyNormalization;
import org.gradle.api.logging.Logging;
import org.slf4j.Logger;
@ -52,23 +53,31 @@ public interface TestClusterConfiguration {
void keystore(String key, File value);
void keystore(String key, File value, PropertyNormalization normalization);
void keystore(String key, FileSupplier valueSupplier);
void setting(String key, String value);
void setting(String key, String value, PropertyNormalization normalization);
void setting(String key, Supplier<CharSequence> valueSupplier);
void setting(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization);
void systemProperty(String key, String value);
void systemProperty(String key, Supplier<CharSequence> valueSupplier);
void systemProperty(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization);
void environment(String key, String value);
void environment(String key, Supplier<CharSequence> valueSupplier);
void jvmArgs(String... values);
void environment(String key, Supplier<CharSequence> valueSupplier, PropertyNormalization normalization);
void jvmArgs(Supplier<String[]> valueSupplier);
void jvmArgs(String... values);
void freeze();
@ -80,6 +89,8 @@ public interface TestClusterConfiguration {
void extraConfigFile(String destination, File from);
void extraConfigFile(String destination, File from, PropertyNormalization normalization);
void user(Map<String, String> userSpec);
String getHttpSocketURI();
@ -158,7 +169,5 @@ public interface TestClusterConfiguration {
.replaceAll("[^a-zA-Z0-9]+", "-");
}
boolean isProcessAlive();
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.gradle.testclusters;
import groovy.lang.Closure;
import org.elasticsearch.gradle.BwcVersions;
import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.test.RestTestRunnerTask;
import org.elasticsearch.gradle.tool.Boilerplate;
import org.gradle.api.Action;
import org.gradle.api.NamedDomainObjectContainer;
@ -155,6 +156,9 @@ public class TestClustersPlugin implements Plugin<Project> {
((Task) thisObject).dependsOn(
project.getRootProject().getTasks().getByName(SYNC_ARTIFACTS_TASK_NAME)
);
if (thisObject instanceof RestTestRunnerTask) {
((RestTestRunnerTask) thisObject).testCluster(cluster);
}
}
})
);

View File

@ -22,6 +22,8 @@ import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.test.AntFixture
import org.elasticsearch.gradle.test.RestIntegTestTask
import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
@ -74,9 +76,9 @@ integTest.enabled = false
setting 'discovery.seed_providers', 'ec2'
setting 'network.host', '_ec2_'
setting 'discovery.ec2.endpoint', { "http://${-> fixture.addressAndPort}" }
setting 'discovery.ec2.endpoint', { "http://${-> fixture.addressAndPort}" }, IGNORE_VALUE
systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.addressAndPort}" }
systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.addressAndPort}" }, IGNORE_VALUE
}
}
@ -103,7 +105,7 @@ ec2FixtureContainerCredentials.env 'ACTIVATE_CONTAINER_CREDENTIALS', true
testClusters.integTestContainerCredentials {
environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI',
{ "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" }
{ "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" }, IGNORE_VALUE
}
// Extra config for InstanceProfile

View File

@ -17,9 +17,12 @@
* under the License.
*/
import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.test.AntFixture
import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
@ -54,13 +57,13 @@ testClusters.integTest {
numberOfNodes = gceNumberOfNodes
plugin file(project(':plugins:discovery-gce').bundlePlugin.archiveFile)
// use gce fixture for Auth calls instead of http://metadata.google.internal
environment 'GCE_METADATA_HOST', { "http://${gceFixture.addressAndPort}" }
environment 'GCE_METADATA_HOST', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
// allows to configure hidden settings (`cloud.gce.host` and `cloud.gce.root_url`)
systemProperty 'es.allow_reroute_gce_settings', 'true'
setting 'discovery.seed_providers', 'gce'
// use gce fixture for metadata server calls instead of http://metadata.google.internal
setting 'cloud.gce.host', { "http://${gceFixture.addressAndPort}" }
setting 'cloud.gce.host', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
// use gce fixture for API calls instead of https://www.googleapis.com
setting 'cloud.gce.root_url', { "http://${gceFixture.addressAndPort}" }
setting 'cloud.gce.root_url', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
}

View File

@ -17,9 +17,13 @@
* under the License.
*/
import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.test.AntFixture
import static org.elasticsearch.gradle.PropertyNormalization.DEFAULT
import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
@ -79,9 +83,9 @@ testClusters.integTest {
// Use a closure on the string to delay evaluation until tests are executed. The endpoint_suffix is used
// in a hacky way to change the protocol and endpoint. We must fix that.
setting 'azure.client.integration_test.endpoint_suffix',
{ "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=http://${azureStorageFixture.addressAndPort }" }
{ "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=http://${azureStorageFixture.addressAndPort }" }, IGNORE_VALUE
String firstPartOfSeed = project.rootProject.testSeed.tokenize(':').get(0)
setting 'thread_pool.repository_azure.max', (Math.abs(Long.parseUnsignedLong(firstPartOfSeed, 16) % 10) + 1).toString()
setting 'thread_pool.repository_azure.max', (Math.abs(Long.parseUnsignedLong(firstPartOfSeed, 16) % 10) + 1).toString(), System.getProperty('ignore.tests.seed') == null ? DEFAULT : IGNORE_VALUE
} else {
println "Using an external service to test the repository-azure plugin"
}

View File

@ -17,12 +17,15 @@
* under the License.
*/
import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.test.AntFixture
import java.security.KeyPair
import java.security.KeyPairGenerator
import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
@ -90,13 +93,13 @@ integTest {
testClusters.integTest {
plugin file(project(':plugins:repository-gcs').bundlePlugin.archiveFile)
keystore 'gcs.client.integration_test.credentials_file', serviceAccountFile
keystore 'gcs.client.integration_test.credentials_file', serviceAccountFile, IGNORE_VALUE
if (useFixture) {
tasks.integTest.dependsOn createServiceAccountFile, googleCloudStorageFixture
/* Use a closure on the string to delay evaluation until tests are executed */
setting 'gcs.client.integration_test.endpoint', { "http://${googleCloudStorageFixture.addressAndPort}" }
setting 'gcs.client.integration_test.token_uri', { "http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/token" }
setting 'gcs.client.integration_test.endpoint', { "http://${googleCloudStorageFixture.addressAndPort}" }, IGNORE_VALUE
setting 'gcs.client.integration_test.token_uri', { "http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/token" }, IGNORE_VALUE
} else {
println "Using an external service to test the repository-gcs plugin"
}

View File

@ -24,6 +24,8 @@ import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
apply plugin: 'elasticsearch.test.fixtures'
esplugin {
@ -66,7 +68,14 @@ dependencies {
hdfsFixture project(':test:fixtures:hdfs-fixture')
// Set the keytab files in the classpath so that we can access them from test code without the security manager
// freaking out.
testRuntime fileTree(dir: project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab").parent, include: ['*.keytab'])
testRuntime files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab").parent)
}
normalization {
runtimeClasspath {
// ignore generated keytab files for the purposes of build avoidance
ignore '*.keytab'
}
}
dependencyLicenses {
@ -154,7 +163,7 @@ for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSec
systemProperty "java.security.krb5.conf", krb5conf
extraConfigFile(
"repository-hdfs/krb5.keytab",
file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}")
file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
)
}
}

View File

@ -1,8 +1,9 @@
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.test.AntFixture
import org.elasticsearch.gradle.test.RestIntegTestTask
import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
@ -168,6 +169,13 @@ if (useFixture) {
File minioAddressFile = new File(project.buildDir, 'generated-resources/s3Fixture.address')
normalization {
runtimeClasspath {
// ignore generated address file for the purposes of build avoidance
ignore 's3Fixture.address'
}
}
thirdPartyTest {
dependsOn tasks.bundlePlugin, tasks.postProcessFixture
outputs.file(minioAddressFile)
@ -195,7 +203,7 @@ if (useFixture) {
testClusters.integTestMinio {
keystore 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey
keystore 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey
setting 's3.client.integration_test_permanent.endpoint', minioAddress
setting 's3.client.integration_test_permanent.endpoint', minioAddress, IGNORE_VALUE
plugin file(tasks.bundlePlugin.archiveFile)
}
@ -275,12 +283,12 @@ testClusters.integTest {
keystore 's3.client.integration_test_temporary.session_token', s3TemporarySessionToken
if (useFixture) {
setting 's3.client.integration_test_permanent.endpoint', { "http://${s3Fixture.addressAndPort}" }
setting 's3.client.integration_test_temporary.endpoint', { "http://${s3Fixture.addressAndPort}" }
setting 's3.client.integration_test_ec2.endpoint', { "http://${s3Fixture.addressAndPort}" }
setting 's3.client.integration_test_permanent.endpoint', { "http://${s3Fixture.addressAndPort}" }, IGNORE_VALUE
setting 's3.client.integration_test_temporary.endpoint', { "http://${s3Fixture.addressAndPort}" }, IGNORE_VALUE
setting 's3.client.integration_test_ec2.endpoint', { "http://${s3Fixture.addressAndPort}" }, IGNORE_VALUE
// to redirect InstanceProfileCredentialsProvider to custom auth point
systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${s3Fixture.addressAndPort}" }
systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${s3Fixture.addressAndPort}" }, IGNORE_VALUE
} else {
println "Using an external service to test the repository-s3 plugin"
}
@ -302,9 +310,9 @@ if (useFixture) {
check.dependsOn(integTestECS)
testClusters.integTestECS {
setting 's3.client.integration_test_ecs.endpoint', { "http://${s3Fixture.addressAndPort}" }
setting 's3.client.integration_test_ecs.endpoint', { "http://${s3Fixture.addressAndPort}" }, IGNORE_VALUE
plugin file(tasks.bundlePlugin.archiveFile)
environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', { "http://${s3Fixture.addressAndPort}/ecs_credentials_endpoint" }
environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', { "http://${s3Fixture.addressAndPort}/ecs_credentials_endpoint" }, IGNORE_VALUE
}
}