Auto-format buildSrc (#51043)

Backport / reimplementation of #50786 on 7.x.

Opt-in `buildSrc` for automatic formatting. This required a config tweak
in order to pick up all the Java sources, and as a result more files are
now found in the Enrich plugin, that were previously missed.

I also moved the 2 Java files in `buildSrc/src/main/groovy` into the Java
directory, which required some follow-up changes.
This commit is contained in:
Rory Hunter 2020-01-16 10:26:27 +00:00 committed by GitHub
parent 02dfd71efa
commit 80d925e225
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
93 changed files with 2119 additions and 1664 deletions

View File

@ -106,6 +106,7 @@ subprojects {
// is greater than the number of unformatted projects, this can be
// switched to an exclude list, and eventualy removed completely.
def projectPathsToFormat = [
':build-tools',
':x-pack:plugin:enrich'
]
@ -114,6 +115,10 @@ subprojects {
spotless {
java {
// Normally this isn't necessary, but we have Java sources in
// non-standard places
target '**/*.java'
removeUnusedImports()
eclipse().configFile rootProject.file('.eclipseformat.xml')
trimTrailingWhitespace()

View File

@ -65,14 +65,14 @@ public class Reaper implements Closeable {
}
Path inputDir = Paths.get(args[0]);
try (Reaper reaper = new Reaper(inputDir)){
try (Reaper reaper = new Reaper(inputDir)) {
System.in.read();
reaper.reap();
}
}
private void reap() {
try (Stream<Path> stream = Files.list(inputDir)){
try (Stream<Path> stream = Files.list(inputDir)) {
final List<Path> inputFiles = stream.filter(p -> p.getFileName().toString().endsWith(".cmd")).collect(Collectors.toList());
for (Path inputFile : inputFiles) {
@ -118,7 +118,7 @@ public class Reaper implements Closeable {
@Override
public void close() {
if (failed == false) {
try (Stream<Path> stream = Files.walk(inputDir)){
try (Stream<Path> stream = Files.walk(inputDir)) {
stream.sorted(Comparator.reverseOrder()).forEach(this::delete);
} catch (IOException e) {
throw new UncheckedIOException(e);

View File

@ -109,15 +109,20 @@ public class BwcVersions {
}
protected BwcVersions(List<String> versionLines, Version currentVersionProperty) {
this(versionLines.stream()
.map(LINE_PATTERN::matcher)
.filter(Matcher::matches)
.map(match -> new Version(
Integer.parseInt(match.group(1)),
Integer.parseInt(match.group(2)),
Integer.parseInt(match.group(3))
))
.collect(Collectors.toCollection(TreeSet::new)), currentVersionProperty);
this(
versionLines.stream()
.map(LINE_PATTERN::matcher)
.filter(Matcher::matches)
.map(
match -> new Version(
Integer.parseInt(match.group(1)),
Integer.parseInt(match.group(2)),
Integer.parseInt(match.group(3))
)
)
.collect(Collectors.toCollection(TreeSet::new)),
currentVersionProperty
);
}
// for testkit tests, until BwcVersions is extracted into an extension
@ -140,8 +145,10 @@ public class BwcVersions {
Map<Version, UnreleasedVersionInfo> unreleased = new HashMap<>();
for (Version unreleasedVersion : getUnreleased()) {
unreleased.put(unreleasedVersion,
new UnreleasedVersionInfo(unreleasedVersion, getBranchFor(unreleasedVersion), getGradleProjectPathFor(unreleasedVersion)));
unreleased.put(
unreleasedVersion,
new UnreleasedVersionInfo(unreleasedVersion, getBranchFor(unreleasedVersion), getGradleProjectPathFor(unreleasedVersion))
);
}
this.unreleased = Collections.unmodifiableMap(unreleased);
}
@ -149,18 +156,18 @@ public class BwcVersions {
private void assertNoOlderThanTwoMajors() {
Set<Integer> majors = groupByMajor.keySet();
if (majors.size() != 2 && currentVersion.getMinor() != 0 && currentVersion.getRevision() != 0) {
throw new IllegalStateException(
"Expected exactly 2 majors in parsed versions but found: " + majors
);
throw new IllegalStateException("Expected exactly 2 majors in parsed versions but found: " + majors);
}
}
private void assertCurrentVersionMatchesParsed(Version currentVersionProperty) {
if (currentVersionProperty.equals(currentVersion) == false) {
throw new IllegalStateException(
"Parsed versions latest version does not match the one configured in build properties. " +
"Parsed latest version is " + currentVersion + " but the build has " +
currentVersionProperty
"Parsed versions latest version does not match the one configured in build properties. "
+ "Parsed latest version is "
+ currentVersion
+ " but the build has "
+ currentVersionProperty
);
}
}
@ -175,12 +182,7 @@ public class BwcVersions {
public void forPreviousUnreleased(Consumer<UnreleasedVersionInfo> consumer) {
List<UnreleasedVersionInfo> collect = getUnreleased().stream()
.filter(version -> version.equals(currentVersion) == false)
.map(version -> new UnreleasedVersionInfo(
version,
getBranchFor(version),
getGradleProjectPathFor(version)
)
)
.map(version -> new UnreleasedVersionInfo(version, getBranchFor(version), getGradleProjectPathFor(version)))
.collect(Collectors.toList());
collect.forEach(uvi -> consumer.accept(uvi));
@ -196,22 +198,18 @@ public class BwcVersions {
Map<Integer, List<Version>> releasedMajorGroupedByMinor = getReleasedMajorGroupedByMinor();
if (version.getRevision() == 0) {
List<Version> unreleasedStagedOrMinor = getUnreleased().stream()
.filter(v -> v.getRevision() == 0)
.collect(Collectors.toList());
List<Version> unreleasedStagedOrMinor = getUnreleased().stream().filter(v -> v.getRevision() == 0).collect(Collectors.toList());
if (unreleasedStagedOrMinor.size() > 2) {
if (unreleasedStagedOrMinor.get(unreleasedStagedOrMinor.size() - 2).equals(version)) {
return ":distribution:bwc:minor";
} else{
} else {
return ":distribution:bwc:staged";
}
} else {
return ":distribution:bwc:minor";
}
} else {
if (releasedMajorGroupedByMinor
.getOrDefault(version.getMinor(), emptyList())
.contains(version)) {
if (releasedMajorGroupedByMinor.getOrDefault(version.getMinor(), emptyList()).contains(version)) {
return ":distribution:bwc:bugfix";
} else {
return ":distribution:bwc:maintenance";
@ -229,7 +227,7 @@ public class BwcVersions {
return "master";
case ":distribution:bwc:minor":
// The .x branch will always point to the latest minor (for that major), so a "minor" project will be on the .x branch
// unless there is more recent (higher) minor.
// unless there is more recent (higher) minor.
final Version latestInMajor = getLatestVersionByKey(groupByMajor, version.getMajor());
if (latestInMajor.getMinor() == version.getMinor()) {
return version.getMajor() + ".x";
@ -280,23 +278,16 @@ public class BwcVersions {
}
}
return unmodifiableList(
unreleased.stream()
.sorted()
.distinct()
.collect(Collectors.toList())
);
return unmodifiableList(unreleased.stream().sorted().distinct().collect(Collectors.toList()));
}
private Version getLatestInMinor(int major, int minor) {
return groupByMajor.get(major).stream()
.filter(v -> v.getMinor() == minor)
.max(Version::compareTo)
.orElse(null);
return groupByMajor.get(major).stream().filter(v -> v.getMinor() == minor).max(Version::compareTo).orElse(null);
}
private Version getLatestVersionByKey(Map<Integer, List<Version>> groupByMajor, int key) {
return groupByMajor.getOrDefault(key, emptyList()).stream()
return groupByMajor.getOrDefault(key, emptyList())
.stream()
.max(Version::compareTo)
.orElseThrow(() -> new IllegalStateException("Unexpected number of versions in collection"));
}
@ -308,11 +299,9 @@ public class BwcVersions {
final Map<Integer, List<Version>> groupByMinor;
if (currentMajorVersions.size() == 1) {
// Current is an unreleased major: x.0.0 so we have to look for other unreleased versions in the previous major
groupByMinor = previousMajorVersions.stream()
.collect(Collectors.groupingBy(Version::getMinor, Collectors.toList()));
groupByMinor = previousMajorVersions.stream().collect(Collectors.groupingBy(Version::getMinor, Collectors.toList()));
} else {
groupByMinor = currentMajorVersions.stream()
.collect(Collectors.groupingBy(Version::getMinor, Collectors.toList()));
groupByMinor = currentMajorVersions.stream().collect(Collectors.groupingBy(Version::getMinor, Collectors.toList()));
}
return groupByMinor;
}
@ -322,8 +311,9 @@ public class BwcVersions {
notReallyReleased.removeAll(authoritativeReleasedVersions);
if (notReallyReleased.isEmpty() == false) {
throw new IllegalStateException(
"out-of-date released versions" +
"\nFollowing versions are not really released, but the build thinks they are: " + notReallyReleased
"out-of-date released versions"
+ "\nFollowing versions are not really released, but the build thinks they are: "
+ notReallyReleased
);
}
@ -331,17 +321,19 @@ public class BwcVersions {
incorrectlyConsideredUnreleased.retainAll(getUnreleased());
if (incorrectlyConsideredUnreleased.isEmpty() == false) {
throw new IllegalStateException(
"out-of-date released versions" +
"\nBuild considers versions unreleased, " +
"but they are released according to an authoritative source: " + incorrectlyConsideredUnreleased +
"\nThe next versions probably needs to be added to Version.java (CURRENT doesn't count)."
"out-of-date released versions"
+ "\nBuild considers versions unreleased, "
+ "but they are released according to an authoritative source: "
+ incorrectlyConsideredUnreleased
+ "\nThe next versions probably needs to be added to Version.java (CURRENT doesn't count)."
);
}
}
private List<Version> getReleased() {
List<Version> unreleased = getUnreleased();
return groupByMajor.values().stream()
return groupByMajor.values()
.stream()
.flatMap(Collection::stream)
.filter(each -> unreleased.contains(each) == false)
.collect(Collectors.toList());
@ -349,10 +341,7 @@ public class BwcVersions {
public List<Version> getIndexCompatible() {
return unmodifiableList(
Stream.concat(
groupByMajor.get(currentVersion.getMajor() - 1).stream(),
groupByMajor.get(currentVersion.getMajor()).stream()
)
Stream.concat(groupByMajor.get(currentVersion.getMajor() - 1).stream(), groupByMajor.get(currentVersion.getMajor()).stream())
.filter(version -> version.equals(currentVersion) == false)
.collect(Collectors.toList())
);
@ -364,10 +353,7 @@ public class BwcVersions {
List<Version> prevMajors = groupByMajor.get(currentVersion.getMajor() - 1);
int minor = prevMajors.get(prevMajors.size() - 1).getMinor();
for (int i = prevMajors.size() - 1;
i > 0 && prevMajors.get(i).getMinor() == minor;
i--
) {
for (int i = prevMajors.size() - 1; i > 0 && prevMajors.get(i).getMinor() == minor; i--) {
wireCompat.add(prevMajors.get(i));
}
wireCompat.addAll(groupByMajor.get(currentVersion.getMajor()));

View File

@ -55,7 +55,9 @@ public class ConcatFilesTask extends DefaultTask {
}
@InputFiles
public FileTree getFiles() { return files; }
public FileTree getFiles() {
return files;
}
public void setHeaderLine(String headerLine) {
this.headerLine = headerLine;
@ -63,7 +65,9 @@ public class ConcatFilesTask extends DefaultTask {
@Input
@Optional
public String getHeaderLine() { return headerLine; }
public String getHeaderLine() {
return headerLine;
}
public void setTarget(File target) {
this.target = target;
@ -77,10 +81,7 @@ public class ConcatFilesTask extends DefaultTask {
@TaskAction
public void concatFiles() throws IOException {
if (getHeaderLine() != null) {
Files.write(
getTarget().toPath(),
(getHeaderLine() + '\n').getBytes(StandardCharsets.UTF_8)
);
Files.write(getTarget().toPath(), (getHeaderLine() + '\n').getBytes(StandardCharsets.UTF_8));
}
// To remove duplicate lines
@ -88,9 +89,7 @@ public class ConcatFilesTask extends DefaultTask {
for (File f : getFiles()) {
uniqueLines.addAll(Files.readAllLines(f.toPath(), StandardCharsets.UTF_8));
}
Files.write(
getTarget().toPath(), uniqueLines, StandardCharsets.UTF_8, StandardOpenOption.APPEND
);
Files.write(getTarget().toPath(), uniqueLines, StandardCharsets.UTF_8, StandardOpenOption.APPEND);
}
}

View File

@ -107,8 +107,10 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
if (distribution.getType().shouldExtract()) {
// for the distribution extracted, add a root level task that does the extraction, and depend on that
// extracted configuration as an artifact consisting of the extracted distribution directory
dependencies.add(distribution.getExtracted().configuration.getName(),
projectDependency(project, ":", configName("extracted_elasticsearch", distribution)));
dependencies.add(
distribution.getExtracted().configuration.getName(),
projectDependency(project, ":", configName("extracted_elasticsearch", distribution))
);
// ensure a root level download task exists
setupRootDownload(project.getRootProject(), distribution);
}
@ -141,7 +143,7 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
TaskProvider<Sync> extractTask = rootProject.getTasks().register(extractTaskName, Sync.class, syncTask -> {
syncTask.dependsOn(downloadConfig);
syncTask.into(extractDir);
syncTask.from((Callable<FileTree>)() -> {
syncTask.from((Callable<FileTree>) () -> {
File archiveFile = archiveGetter.get();
String archivePath = archiveFile.toString();
if (archivePath.endsWith(".zip")) {
@ -160,9 +162,12 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
}
});
});
rootProject.getArtifacts().add(extractedConfigName,
rootProject.getLayout().getProjectDirectory().dir(extractDir),
artifact -> artifact.builtBy(extractTask));
rootProject.getArtifacts()
.add(
extractedConfigName,
rootProject.getLayout().getProjectDirectory().dir(extractDir),
artifact -> artifact.builtBy(extractTask)
);
}
}
@ -229,7 +234,6 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
return "org.elasticsearch.distribution.integ-test-zip:elasticsearch:" + distribution.getVersion() + "@zip";
}
Version distroVersion = Version.fromString(distribution.getVersion());
String extension = distribution.getType().toString();
String classifier = ":x86_64";
@ -302,7 +306,7 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
Platform platform = distribution.getPlatform();
projectName += platform.toString() + (platform == Platform.WINDOWS ? "-zip" : "-tar");
} else {
projectName = distribution.getFlavor().equals(Flavor.DEFAULT) ?"zip" : "oss-zip";
projectName = distribution.getFlavor().equals(Flavor.DEFAULT) ? "zip" : "oss-zip";
}
} else if (distribution.getType() == Type.DOCKER) {
projectName += "docker-export";
@ -313,9 +317,15 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
}
private static String configName(String prefix, ElasticsearchDistribution distribution) {
return prefix + "_" + distribution.getVersion() + "_" + distribution.getType() + "_" +
(distribution.getPlatform() == null ? "" : distribution.getPlatform() + "_")
+ distribution.getFlavor() + (distribution.getBundledJdk() ? "" : "_nojdk");
return String.format(
"%s_%s_%s_%s%s%s",
prefix,
distribution.getVersion(),
distribution.getType(),
distribution.getPlatform() == null ? "" : distribution.getPlatform() + "_",
distribution.getFlavor(),
distribution.getBundledJdk() ? "" : "_nojdk"
);
}
private static String capitalize(String s) {

View File

@ -120,8 +120,12 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
private final Property<Flavor> flavor;
private final Property<Boolean> bundledJdk;
ElasticsearchDistribution(String name, ObjectFactory objectFactory, Configuration fileConfiguration,
Configuration extractedConfiguration) {
ElasticsearchDistribution(
String name,
ObjectFactory objectFactory,
Configuration fileConfiguration,
Configuration extractedConfiguration
) {
this.name = name;
this.configuration = fileConfiguration;
this.version = objectFactory.property(String.class).convention(VersionProperties.getElasticsearch());
@ -188,8 +192,9 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
case DEB:
case DOCKER:
case RPM:
throw new UnsupportedOperationException("distribution type [" + getType() + "] for " +
"elasticsearch distribution [" + name + "] cannot be extracted");
throw new UnsupportedOperationException(
"distribution type [" + getType() + "] for " + "elasticsearch distribution [" + name + "] cannot be extracted"
);
default:
return extracted;
@ -217,15 +222,18 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
if (getType() == Type.INTEG_TEST_ZIP) {
if (platform.getOrNull() != null) {
throw new IllegalArgumentException(
"platform not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
"platform not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]"
);
}
if (flavor.getOrNull() != null) {
throw new IllegalArgumentException(
"flavor [" + flavor.get() + "] not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
"flavor [" + flavor.get() + "] not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]"
);
}
if (bundledJdk.getOrNull() != null) {
throw new IllegalArgumentException(
"bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
"bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]"
);
}
return;
}
@ -237,8 +245,9 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
}
} else { // rpm, deb or docker
if (platform.isPresent()) {
throw new IllegalArgumentException("platform not allowed for elasticsearch distribution ["
+ name + "] of type [" + getType() + "]");
throw new IllegalArgumentException(
"platform not allowed for elasticsearch distribution [" + name + "] of type [" + getType() + "]"
);
}
}

View File

@ -82,8 +82,8 @@ public class ExportElasticsearchBuildResourcesTask extends DefaultTask {
public File copy(String resource) {
if (getState().getExecuted() || getState().getExecuting()) {
throw new GradleException("buildResources can't be configured after the task ran. " +
"Make sure task is not used after configuration time"
throw new GradleException(
"buildResources can't be configured after the task ran. " + "Make sure task is not used after configuration time"
);
}
resources.add(resource);
@ -96,19 +96,18 @@ public class ExportElasticsearchBuildResourcesTask extends DefaultTask {
setDidWork(false);
throw new StopExecutionException();
}
resources.stream().parallel()
.forEach(resourcePath -> {
Path destination = outputDir.get().file(resourcePath).getAsFile().toPath();
try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) {
Files.createDirectories(destination.getParent());
if (is == null) {
throw new GradleException("Can't export `" + resourcePath + "` from build-tools: not found");
}
Files.copy(is, destination, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
throw new GradleException("Can't write resource `" + resourcePath + "` to " + destination, e);
resources.stream().parallel().forEach(resourcePath -> {
Path destination = outputDir.get().file(resourcePath).getAsFile().toPath();
try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) {
Files.createDirectories(destination.getParent());
if (is == null) {
throw new GradleException("Can't export `" + resourcePath + "` from build-tools: not found");
}
});
Files.copy(is, destination, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
throw new GradleException("Can't write resource `" + resourcePath + "` to " + destination, e);
}
});
}
}

View File

@ -3,5 +3,4 @@ package org.elasticsearch.gradle;
import java.io.File;
import java.util.function.Supplier;
public interface FileSupplier extends Supplier<File> {
}
public interface FileSupplier extends Supplier<File> {}

View File

@ -35,8 +35,7 @@ import java.util.regex.Pattern;
public class Jdk implements Buildable, Iterable<File> {
private static final List<String> ALLOWED_VENDORS = Collections.unmodifiableList(Arrays.asList("adoptopenjdk", "openjdk"));
static final Pattern VERSION_PATTERN =
Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+(?:\\.\\d+)?)(@([a-f0-9]{32}))?");
static final Pattern VERSION_PATTERN = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+(?:\\.\\d+)?)(@([a-f0-9]{32}))?");
private static final List<String> ALLOWED_PLATFORMS = Collections.unmodifiableList(Arrays.asList("darwin", "linux", "windows", "mac"));
private final String name;
@ -87,7 +86,8 @@ public class Jdk implements Buildable, Iterable<File> {
public void setPlatform(String platform) {
if (ALLOWED_PLATFORMS.contains(platform) == false) {
throw new IllegalArgumentException(
"unknown platform [" + platform + "] for jdk [" + name + "], must be one of " + ALLOWED_PLATFORMS);
"unknown platform [" + platform + "] for jdk [" + name + "], must be one of " + ALLOWED_PLATFORMS
);
}
this.platform.set(platform);
}
@ -135,7 +135,7 @@ public class Jdk implements Buildable, Iterable<File> {
}
version.finalizeValue();
platform.finalizeValue();
vendor.finalizeValue();;
vendor.finalizeValue();
}
@Override

View File

@ -56,9 +56,7 @@ public class JdkDownloadPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
NamedDomainObjectContainer<Jdk> jdksContainer = project.container(Jdk.class, name ->
new Jdk(name, project)
);
NamedDomainObjectContainer<Jdk> jdksContainer = project.container(Jdk.class, name -> new Jdk(name, project));
project.getExtensions().add(CONTAINER_NAME, jdksContainer);
project.afterEvaluate(p -> {
@ -134,7 +132,8 @@ public class JdkDownloadPlugin implements Plugin<Project> {
Locale.ROOT,
"adoptopenjdk/OpenJDK%sU-jdk_x64_[module]_hotspot_[revision]_%s.[ext]",
jdkMajor,
jdkBuild);
jdkBuild
);
ivyRepo.patternLayout(layout -> layout.artifact(pattern));
ivyRepo.content(content -> content.includeGroup("adoptopenjdk"));
});
@ -146,8 +145,11 @@ public class JdkDownloadPlugin implements Plugin<Project> {
ivyRepo.setName(repoName);
ivyRepo.setUrl("https://download.oracle.com");
ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact);
ivyRepo.patternLayout(layout -> layout.artifact(
"java/GA/jdk" + jdkVersion + "/" + hash + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"));
ivyRepo.patternLayout(
layout -> layout.artifact(
"java/GA/jdk" + jdkVersion + "/" + hash + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"
)
);
ivyRepo.content(content -> content.includeGroup("openjdk"));
});
} else {
@ -156,8 +158,11 @@ public class JdkDownloadPlugin implements Plugin<Project> {
ivyRepo.setName(repoName);
ivyRepo.setUrl("https://download.oracle.com");
ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact);
ivyRepo.patternLayout(layout ->
layout.artifact("java/GA/jdk" + jdkMajor + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"));
ivyRepo.patternLayout(
layout -> layout.artifact(
"java/GA/jdk" + jdkMajor + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"
)
);
ivyRepo.content(content -> content.includeGroup("openjdk"));
});
}
@ -173,15 +178,17 @@ public class JdkDownloadPlugin implements Plugin<Project> {
jdkConfig = configurations.create(remoteConfigName);
configurations.create(localConfigName);
}
String platformDep = platform.equals("darwin") || platform.equals("osx") ?
(vendor.equals("adoptopenjdk") ? "mac" : "osx") : platform;
String platformDep = platform.equals("darwin") || platform.equals("osx")
? (vendor.equals("adoptopenjdk") ? "mac" : "osx")
: platform;
String extension = platform.equals("windows") ? "zip" : "tar.gz";
String jdkDep = vendor + ":" + platformDep + ":" + jdkVersion + "@" + extension;
rootProject.getDependencies().add(configName(vendor, version, platform), jdkDep);
// add task for extraction
final Provider<Directory> extractPath =
rootProject.getLayout().getBuildDirectory().dir("jdks/" + vendor + "-" + jdkVersion + "_" + platform);
final Provider<Directory> extractPath = rootProject.getLayout()
.getBuildDirectory()
.dir("jdks/" + vendor + "-" + jdkVersion + "_" + platform);
// delay resolving jdkConfig until runtime
Supplier<File> jdkArchiveGetter = jdkConfig::getSingleFile;
@ -208,7 +215,9 @@ public class JdkDownloadPlugin implements Plugin<Project> {
String[] pathSegments = details.getRelativePath().getSegments();
int index = 0;
for (; index < pathSegments.length; index++) {
if (pathSegments[index].matches("jdk-.*")) break;
if (pathSegments[index].matches("jdk-.*")) {
break;
}
}
assert index + 1 <= pathSegments.length;
String[] newPathSegments = Arrays.copyOfRange(pathSegments, index + 1, pathSegments.length);
@ -235,39 +244,38 @@ public class JdkDownloadPlugin implements Plugin<Project> {
extractTask = rootProject.getTasks().register(extractTaskName, SymbolicLinkPreservingUntarTask.class, task -> {
task.getTarFile().set(jdkConfiguration.getSingleFile());
task.getExtractPath().set(extractPath);
task.setTransform(
name -> {
/*
* We want to remove up to the and including the jdk-.* relative paths. That is a JDK archive is structured as:
* jdk-12.0.1/
* jdk-12.0.1/Contents
* ...
*
* and we want to remove the leading jdk-12.0.1. Note however that there could also be a leading ./ as in
* ./
* ./jdk-12.0.1/
* ./jdk-12.0.1/Contents
*
* so we account for this and search the path components until we find the jdk-12.0.1, and strip the leading
* components.
*/
final Path entryName = Paths.get(name);
int index = 0;
for (; index < entryName.getNameCount(); index++) {
if (entryName.getName(index).toString().matches("jdk-.*")) break;
task.setTransform(name -> {
/*
* We want to remove up to the and including the jdk-.* relative paths. That is a JDK archive is structured as:
* jdk-12.0.1/
* jdk-12.0.1/Contents
* ...
*
* and we want to remove the leading jdk-12.0.1. Note however that there could also be a leading ./ as in
* ./
* ./jdk-12.0.1/
* ./jdk-12.0.1/Contents
*
* so we account for this and search the path components until we find the jdk-12.0.1, and strip the leading
* components.
*/
final Path entryName = Paths.get(name);
int index = 0;
for (; index < entryName.getNameCount(); index++) {
if (entryName.getName(index).toString().matches("jdk-.*")) {
break;
}
if (index + 1 >= entryName.getNameCount()) {
// this happens on the top-level directories in the archive, which we are removing
return null;
}
// finally remove the top-level directories from the output path
return entryName.subpath(index + 1, entryName.getNameCount());
});
}
if (index + 1 >= entryName.getNameCount()) {
// this happens on the top-level directories in the archive, which we are removing
return null;
}
// finally remove the top-level directories from the output path
return entryName.subpath(index + 1, entryName.getNameCount());
});
});
}
rootProject.getArtifacts().add(localConfigName,
extractPath,
artifact -> artifact.builtBy(extractTask));
rootProject.getArtifacts().add(localConfigName, extractPath, artifact -> artifact.builtBy(extractTask));
}
private static String configName(String vendor, String version, String platform) {

View File

@ -110,14 +110,17 @@ public class LazyPropertyMap<K, V> extends AbstractLazyPropertyCollection implem
@Override
public Set<Entry<K, V>> entrySet() {
return delegate.entrySet().stream()
return delegate.entrySet()
.stream()
.peek(this::validate)
.collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getValue())).entrySet();
.collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getValue()))
.entrySet();
}
@Override
public List<? extends Object> getNormalizedCollection() {
return delegate.values().stream()
return delegate.values()
.stream()
.peek(this::validate)
.filter(entry -> entry.getNormalization() != PropertyNormalization.IGNORE_VALUE)
.map(entry -> normalizationMapper == null ? entry : normalizationMapper.apply(entry.getKey(), entry.getValue()))

View File

@ -44,7 +44,9 @@ public abstract class LoggingOutputStream extends OutputStream {
@Override
public void write(final int b) throws IOException {
if (b == 0) return;
if (b == 0) {
return;
}
if (b == '\n') {
// always flush with newlines instead of adding to the buffer
flush();
@ -69,7 +71,9 @@ public abstract class LoggingOutputStream extends OutputStream {
@Override
public void flush() {
if (end == start) return;
if (end == start) {
return;
}
logLine(new String(buffer, start, end - start));
start = end;
}

View File

@ -38,10 +38,13 @@ public class ReaperPlugin implements Plugin<Project> {
project.getPlugins().apply(GlobalBuildInfoPlugin.class);
Path inputDir = project.getRootDir().toPath().resolve(".gradle")
.resolve("reaper").resolve("build-" + ProcessHandle.current().pid());
ReaperService service = project.getExtensions().create("reaper", ReaperService.class,
project, project.getBuildDir().toPath(), inputDir);
Path inputDir = project.getRootDir()
.toPath()
.resolve(".gradle")
.resolve("reaper")
.resolve("build-" + ProcessHandle.current().pid());
ReaperService service = project.getExtensions()
.create("reaper", ReaperService.class, project, project.getBuildDir().toPath(), inputDir);
project.getGradle().buildFinished(result -> service.shutdown());
}

View File

@ -61,8 +61,8 @@ public class ReaperService {
*/
public void registerPid(String serviceId, long pid) {
String[] killPidCommand = OS.<String[]>conditional()
.onWindows(() -> new String[]{"Taskill", "/F", "/PID", String.valueOf(pid)})
.onUnix(() -> new String[]{"kill", "-9", String.valueOf(pid)})
.onWindows(() -> new String[] { "Taskill", "/F", "/PID", String.valueOf(pid) })
.onUnix(() -> new String[] { "kill", "-9", String.valueOf(pid) })
.supply();
registerCommand(serviceId, killPidCommand);
}
@ -81,9 +81,7 @@ public class ReaperService {
}
private Path getCmdFile(String serviceId) {
return inputDir.resolve(
serviceId.replaceAll("[^a-zA-Z0-9]","-") + ".cmd"
);
return inputDir.resolve(serviceId.replaceAll("[^a-zA-Z0-9]", "-") + ".cmd");
}
public void unregister(String serviceId) {
@ -101,8 +99,7 @@ public class ReaperService {
reaperProcess.getOutputStream().close();
logger.info("Waiting for reaper to exit normally");
if (reaperProcess.waitFor() != 0) {
throw new GradleException("Reaper process failed. Check log at "
+ inputDir.resolve("error.log") + " for details");
throw new GradleException("Reaper process failed. Check log at " + inputDir.resolve("error.log") + " for details");
}
} catch (Exception e) {
throw new RuntimeException(e);
@ -122,9 +119,12 @@ public class ReaperService {
// start the reaper
ProcessBuilder builder = new ProcessBuilder(
Jvm.current().getJavaExecutable().toString(), // same jvm as gradle
"-Xms4m", "-Xmx16m", // no need for a big heap, just need to read some files and execute
"-jar", jarPath.toString(),
inputDir.toString());
"-Xms4m",
"-Xmx16m", // no need for a big heap, just need to read some files and execute
"-jar",
jarPath.toString(),
inputDir.toString()
);
logger.info("Launching reaper: " + String.join(" ", builder.command()));
// be explicit for stdin, we use closing of the pipe to signal shutdown to the reaper
builder.redirectInput(ProcessBuilder.Redirect.PIPE);
@ -148,12 +148,7 @@ public class ReaperService {
if (matcher.matches()) {
String path = matcher.group(1);
return Paths.get(
OS.<String>conditional()
.onWindows(() -> path.substring(1))
.onUnix(() -> path)
.supply()
);
return Paths.get(OS.<String>conditional().onWindows(() -> path.substring(1)).onUnix(() -> path).supply());
} else {
throw new RuntimeException("Unable to locate " + REAPER_CLASS + " on build classpath.");
}

View File

@ -112,7 +112,7 @@ public class WaitForHttpResource {
ssl = null;
}
IOException failure = null;
for (; ; ) {
while (true) {
try {
checkResource(ssl);
return true;
@ -161,11 +161,12 @@ public class WaitForHttpResource {
private void configureBasicAuth(HttpURLConnection connection) {
if (username != null) {
if (password == null) {
throw new IllegalStateException("Basic Auth user [" + username
+ "] has been set, but no password has been configured");
throw new IllegalStateException("Basic Auth user [" + username + "] has been set, but no password has been configured");
}
connection.setRequestProperty("Authorization",
"Basic " + Base64.getEncoder().encodeToString((username + ":" + password).getBytes(StandardCharsets.UTF_8)));
connection.setRequestProperty(
"Authorization",
"Basic " + Base64.getEncoder().encodeToString((username + ":" + password).getBytes(StandardCharsets.UTF_8))
);
}
}

View File

@ -28,6 +28,7 @@ import java.io.Writer;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import static java.nio.charset.StandardCharsets.UTF_8;
@ -122,8 +123,14 @@ public class GenerateGlobalBuildInfoTask extends DefaultTask {
public void generate() {
String javaVendorVersion = System.getProperty("java.vendor.version", System.getProperty("java.vendor"));
String gradleJavaVersion = System.getProperty("java.version");
String gradleJavaVersionDetails = javaVendorVersion + " " + gradleJavaVersion + " [" + System.getProperty("java.vm.name")
+ " " + System.getProperty("java.vm.version") + "]";
String gradleJavaVersionDetails = javaVendorVersion
+ " "
+ gradleJavaVersion
+ " ["
+ System.getProperty("java.vm.name")
+ " "
+ System.getProperty("java.vm.version")
+ "]";
String compilerJavaVersionDetails = gradleJavaVersionDetails;
JavaVersion compilerJavaVersionEnum = JavaVersion.current();
@ -159,21 +166,24 @@ public class GenerateGlobalBuildInfoTask extends DefaultTask {
}
try (BufferedWriter writer = new BufferedWriter(new FileWriter(outputFile.getAsFile().get()))) {
final String osName = System.getProperty("os.name");
final String osVersion = System.getProperty("os.version");
final String osArch = System.getProperty("os.arch");
final JavaVersion parsedVersion = JavaVersion.toVersion(gradleJavaVersion);
writer.write(" Gradle Version : " + getProject().getGradle().getGradleVersion() + "\n");
writer.write(" OS Info : " + System.getProperty("os.name") + " " + System.getProperty("os.version")
+ " (" + System.getProperty("os.arch") + ")\n");
writer.write(" OS Info : " + osName + " " + osVersion + " (" + osArch + ")\n");
if (gradleJavaVersionDetails.equals(compilerJavaVersionDetails) == false
|| gradleJavaVersionDetails.equals(runtimeJavaVersionDetails) == false) {
writer.write(" Compiler JDK Version : " + compilerJavaVersionEnum + " (" + compilerJavaVersionDetails + ")\n");
writer.write(" Compiler java.home : " + compilerJavaHome + "\n");
writer.write(" Runtime JDK Version : " + runtimeJavaVersionEnum + " (" + runtimeJavaVersionDetails + ")\n");
writer.write(" Runtime java.home : " + runtimeJavaHome + "\n");
writer.write(" Gradle JDK Version : " + JavaVersion.toVersion(gradleJavaVersion)
+ " (" + gradleJavaVersionDetails + ")\n");
writer.write(" Gradle JDK Version : " + parsedVersion + " (" + gradleJavaVersionDetails + ")\n");
writer.write(" Gradle java.home : " + gradleJavaHome);
} else {
writer.write(" JDK Version : " + JavaVersion.toVersion(gradleJavaVersion)
+ " (" + gradleJavaVersionDetails + ")\n");
writer.write(" JDK Version : " + parsedVersion + " (" + gradleJavaVersionDetails + ")\n");
writer.write(" JAVA_HOME : " + gradleJavaHome);
}
} catch (IOException e) {
@ -182,14 +192,24 @@ public class GenerateGlobalBuildInfoTask extends DefaultTask {
// enforce Java version
if (compilerJavaVersionEnum.compareTo(minimumCompilerVersion) < 0) {
String message = "The compiler java.home must be set to a JDK installation directory for Java " + minimumCompilerVersion +
" but is [" + compilerJavaHome + "] corresponding to [" + compilerJavaVersionEnum + "]";
String message = String.format(
Locale.ROOT,
"The compiler java.home must be set to a JDK installation directory for Java %s but is [%s] " + "corresponding to [%s]",
minimumCompilerVersion,
compilerJavaHome,
compilerJavaVersionEnum
);
throw new GradleException(message);
}
if (runtimeJavaVersionEnum.compareTo(minimumRuntimeVersion) < 0) {
String message = "The runtime java.home must be set to a JDK installation directory for Java " + minimumRuntimeVersion +
" but is [" + runtimeJavaHome + "] corresponding to [" + runtimeJavaVersionEnum + "]";
String message = String.format(
Locale.ROOT,
"The runtime java.home must be set to a JDK installation directory for Java %s but is [%s] " + "corresponding to [%s]",
minimumRuntimeVersion,
runtimeJavaHome,
runtimeJavaVersionEnum
);
throw new GradleException(message);
}
@ -207,8 +227,15 @@ public class GenerateGlobalBuildInfoTask extends DefaultTask {
expectedJavaVersionEnum = JavaVersion.toVersion(Integer.toString(version));
}
if (javaVersionEnum != expectedJavaVersionEnum) {
String message = "The environment variable JAVA" + version + "_HOME must be set to a JDK installation directory for Java " +
expectedJavaVersionEnum + " but is [" + javaHome + "] corresponding to [" + javaVersionEnum + "]";
String message = String.format(
Locale.ROOT,
"The environment variable JAVA%d_HOME must be set to a JDK installation directory for Java"
+ " %s but is [%s] corresponding to [%s]",
version,
expectedJavaVersionEnum,
javaHome,
javaVersionEnum
);
throw new GradleException(message);
}
}
@ -230,11 +257,11 @@ public class GenerateGlobalBuildInfoTask extends DefaultTask {
* Finds printable java version of the given JAVA_HOME
*/
private String findJavaVersionDetails(File javaHome) {
String versionInfoScript = "print(" +
"java.lang.System.getProperty(\"java.vendor.version\", java.lang.System.getProperty(\"java.vendor\")) + \" \" + " +
"java.lang.System.getProperty(\"java.version\") + \" [\" + " +
"java.lang.System.getProperty(\"java.vm.name\") + \" \" + " +
"java.lang.System.getProperty(\"java.vm.version\") + \"]\");";
String versionInfoScript = "print("
+ "java.lang.System.getProperty(\"java.vendor.version\", java.lang.System.getProperty(\"java.vendor\")) + \" \" + "
+ "java.lang.System.getProperty(\"java.version\") + \" [\" + "
+ "java.lang.System.getProperty(\"java.vm.name\") + \" \" + "
+ "java.lang.System.getProperty(\"java.vm.version\") + \"]\");";
return runJavaAsScript(javaHome, versionInfoScript).trim();
}

View File

@ -66,8 +66,8 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
}
}
GenerateGlobalBuildInfoTask generateTask = project.getTasks().create("generateGlobalBuildInfo",
GenerateGlobalBuildInfoTask.class, task -> {
GenerateGlobalBuildInfoTask generateTask = project.getTasks()
.create("generateGlobalBuildInfo", GenerateGlobalBuildInfoTask.class, task -> {
task.setJavaVersions(javaVersions);
task.setMinimumCompilerVersion(minimumCompilerVersion);
task.setMinimumRuntimeVersion(minimumRuntimeVersion);
@ -105,14 +105,16 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
params.setDefaultParallel(findDefaultParallel(project));
});
project.allprojects(p -> {
// Make sure than any task execution generates and prints build info
p.getTasks().configureEach(task -> {
if (task != generateTask && task != printTask) {
task.dependsOn(printTask);
}
});
});
project.allprojects(
p -> {
// Make sure than any task execution generates and prints build info
p.getTasks().configureEach(task -> {
if (task != generateTask && task != printTask) {
task.dependsOn(printTask);
}
});
}
);
}
private static File findCompilerJavaHome() {
@ -140,11 +142,16 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
private static String findJavaHome(String version) {
String versionedJavaHome = System.getenv(getJavaHomeEnvVarName(version));
if (versionedJavaHome == null) {
throw new GradleException(
"$" + getJavaHomeEnvVarName(version) + " must be set to build Elasticsearch. " +
"Note that if the variable was just set you might have to run `./gradlew --stop` for " +
"it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details."
final String exceptionMessage = String.format(
Locale.ROOT,
"$%s must be set to build Elasticsearch. "
+ "Note that if the variable was just set you "
+ "might have to run `./gradlew --stop` for "
+ "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details.",
getJavaHomeEnvVarName(version)
);
throw new GradleException(exceptionMessage);
}
return versionedJavaHome;
}
@ -154,9 +161,9 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
}
private static String getResourceContents(String resourcePath) {
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(GlobalBuildInfoPlugin.class.getResourceAsStream(resourcePath))
)) {
try (
BufferedReader reader = new BufferedReader(new InputStreamReader(GlobalBuildInfoPlugin.class.getResourceAsStream(resourcePath)))
) {
StringBuilder b = new StringBuilder();
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
if (b.length() != 0) {
@ -191,7 +198,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
if (name.equals("physical id")) {
currentID = value;
}
// Number of cores not including hyper-threading
// Number of cores not including hyper-threading
if (name.equals("cpu cores")) {
assert currentID.isEmpty() == false;
socketToCore.put("currentID", Integer.valueOf(value));
@ -295,9 +302,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
private static String readFirstLine(final Path path) throws IOException {
String firstLine;
try (Stream<String> lines = Files.lines(path, StandardCharsets.UTF_8)) {
firstLine = lines
.findFirst()
.orElseThrow(() -> new IOException("file [" + path + "] is empty"));
firstLine = lines.findFirst().orElseThrow(() -> new IOException("file [" + path + "] is empty"));
}
return firstLine;
}

View File

@ -44,6 +44,7 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
@ -209,7 +210,8 @@ public class DependencyLicensesTask extends DefaultTask {
}
private void checkDependencies(Map<String, Boolean> licenses, Map<String, Boolean> notices, Set<File> shaFiles)
throws NoSuchAlgorithmException, IOException {
throws NoSuchAlgorithmException,
IOException {
for (File dependency : dependencies) {
String jarName = dependency.getName();
String depName = regex.matcher(jarName).replaceFirst("");
@ -223,8 +225,8 @@ public class DependencyLicensesTask extends DefaultTask {
}
}
private void validateSha(Set<File> shaFiles, File dependency, String jarName, String depName)
throws NoSuchAlgorithmException, IOException {
private void validateSha(Set<File> shaFiles, File dependency, String jarName, String depName) throws NoSuchAlgorithmException,
IOException {
if (ignoreShas.contains(depName)) {
// local deps should not have sha files!
if (getShaFile(jarName).exists()) {
@ -269,10 +271,18 @@ public class DependencyLicensesTask extends DefaultTask {
String sha = getSha1(jar);
if (expectedSha.equals(sha) == false) {
throw new GradleException(
"SHA has changed! Expected " + expectedSha + " for " + jarName + " but got " + sha + ". " +
"\nThis usually indicates a corrupt dependency cache or artifacts changed upstream." +
"\nEither wipe your cache, fix the upstream artifact, or delete " + shaFile + " and run updateShas");
final String exceptionMessage = String.format(
Locale.ROOT,
"SHA has changed! Expected %s for %s but got %s."
+ "\nThis usually indicates a corrupt dependency cache or artifacts changed upstream."
+ "\nEither wipe your cache, fix the upstream artifact, or delete %s and run updateShas",
expectedSha,
jarName,
sha,
shaFile
);
throw new GradleException(exceptionMessage);
}
shaFiles.remove(shaFile);
}
@ -314,13 +324,11 @@ public class DependencyLicensesTask extends DefaultTask {
throw new GradleException("\"" + licensesDir.getPath() + "\" isn't a valid directory");
}
return Arrays.stream(array)
.filter(file -> file.getName().endsWith(SHA_EXTENSION))
.collect(Collectors.toSet());
return Arrays.stream(array).filter(file -> file.getName().endsWith(SHA_EXTENSION)).collect(Collectors.toSet());
}
String getSha1(File file) throws IOException, NoSuchAlgorithmException {
byte[] bytes = Files.readAllBytes(file.toPath());
byte[] bytes = Files.readAllBytes(file.toPath());
MessageDigest digest = MessageDigest.getInstance("SHA-1");
char[] encoded = Hex.encodeHex(digest.digest(bytes));

View File

@ -50,10 +50,10 @@ public class FilePermissionsTask extends DefaultTask {
* A pattern set of which files should be checked.
*/
private final PatternFilterable filesFilter = new PatternSet()
// we always include all source files, and exclude what should not be checked
.include("**")
// exclude sh files that might have the executable bit set
.exclude("**/*.sh");
// we always include all source files, and exclude what should not be checked
.include("**")
// exclude sh files that might have the executable bit set
.exclude("**/*.sh");
private File outputMarker = new File(getProject().getBuildDir(), "markers/filePermissions");
@ -64,11 +64,11 @@ public class FilePermissionsTask extends DefaultTask {
private static boolean isExecutableFile(File file) {
try {
Set<PosixFilePermission> permissions = Files.getFileAttributeView(file.toPath(), PosixFileAttributeView.class)
.readAttributes()
.permissions();
.readAttributes()
.permissions();
return permissions.contains(PosixFilePermission.OTHERS_EXECUTE)
|| permissions.contains(PosixFilePermission.OWNER_EXECUTE)
|| permissions.contains(PosixFilePermission.GROUP_EXECUTE);
|| permissions.contains(PosixFilePermission.OWNER_EXECUTE)
|| permissions.contains(PosixFilePermission.GROUP_EXECUTE);
} catch (IOException e) {
throw new IllegalStateException("unable to read the file " + file + " attributes", e);
}
@ -80,10 +80,11 @@ public class FilePermissionsTask extends DefaultTask {
@InputFiles
@SkipWhenEmpty
public FileCollection getFiles() {
return Boilerplate.getJavaSourceSets(getProject()).stream()
.map(sourceSet -> sourceSet.getAllSource().matching(filesFilter))
.reduce(FileTree::plus)
.orElse(getProject().files().getAsFileTree());
return Boilerplate.getJavaSourceSets(getProject())
.stream()
.map(sourceSet -> sourceSet.getAllSource().matching(filesFilter))
.reduce(FileTree::plus)
.orElse(getProject().files().getAsFileTree());
}
@TaskAction
@ -91,10 +92,11 @@ public class FilePermissionsTask extends DefaultTask {
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
throw new StopExecutionException();
}
List<String> failures = getFiles().getFiles().stream()
.filter(FilePermissionsTask::isExecutableFile)
.map(file -> "Source file is executable: " + file)
.collect(Collectors.toList());
List<String> failures = getFiles().getFiles()
.stream()
.filter(FilePermissionsTask::isExecutableFile)
.map(file -> "Source file is executable: " + file)
.collect(Collectors.toList());
if (!failures.isEmpty()) {
throw new GradleException("Found invalid file permissions:\n" + String.join("\n", failures));

View File

@ -88,7 +88,9 @@ public class ForbiddenPatternsTask extends DefaultTask {
@InputFiles
@SkipWhenEmpty
public FileCollection getFiles() {
return getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets()
return getProject().getConvention()
.getPlugin(JavaPluginConvention.class)
.getSourceSets()
.stream()
.map(sourceSet -> sourceSet.getAllSource().matching(filesFilter))
.reduce(FileTree::plus)
@ -101,8 +103,8 @@ public class ForbiddenPatternsTask extends DefaultTask {
List<String> failures = new ArrayList<>();
for (File f : getFiles()) {
List<String> lines;
try(Stream<String> stream = Files.lines(f.toPath(), StandardCharsets.UTF_8)) {
lines = stream.collect(Collectors.toList());
try (Stream<String> stream = Files.lines(f.toPath(), StandardCharsets.UTF_8)) {
lines = stream.collect(Collectors.toList());
} catch (UncheckedIOException e) {
throw new IllegalArgumentException("Failed to read " + f + " as UTF_8", e);
}
@ -112,13 +114,17 @@ public class ForbiddenPatternsTask extends DefaultTask {
.collect(Collectors.toList());
String path = getProject().getRootProject().getProjectDir().toURI().relativize(f.toURI()).toString();
failures.addAll(invalidLines.stream()
.map(l -> new AbstractMap.SimpleEntry<>(l+1, lines.get(l)))
.flatMap(kv -> patterns.entrySet().stream()
.filter(p -> Pattern.compile(p.getValue()).matcher(kv.getValue()).find())
.map(p -> "- " + p.getKey() + " on line " + kv.getKey() + " of " + path)
)
.collect(Collectors.toList()));
failures.addAll(
invalidLines.stream()
.map(l -> new AbstractMap.SimpleEntry<>(l + 1, lines.get(l)))
.flatMap(
kv -> patterns.entrySet()
.stream()
.filter(p -> Pattern.compile(p.getValue()).matcher(kv.getValue()).find())
.map(p -> "- " + p.getKey() + " on line " + kv.getKey() + " of " + path)
)
.collect(Collectors.toList())
);
}
if (failures.isEmpty() == false) {
throw new GradleException("Found invalid patterns:\n" + String.join("\n", failures));
@ -143,7 +149,7 @@ public class ForbiddenPatternsTask extends DefaultTask {
filesFilter.exclude(excludes);
}
public void rule(Map<String,String> props) {
public void rule(Map<String, String> props) {
String name = props.remove("name");
if (name == null) {
throw new InvalidUserDataException("Missing [name] for invalid pattern rule");
@ -153,8 +159,7 @@ public class ForbiddenPatternsTask extends DefaultTask {
throw new InvalidUserDataException("Missing [pattern] for invalid pattern rule");
}
if (props.isEmpty() == false) {
throw new InvalidUserDataException("Unknown arguments for ForbiddenPatterns rule mapping: "
+ props.keySet().toString());
throw new InvalidUserDataException("Unknown arguments for ForbiddenPatterns rule mapping: " + props.keySet().toString());
}
// TODO: fail if pattern contains a newline, it won't work (currently)
patterns.put(name, pattern);

View File

@ -67,10 +67,15 @@ public class LoggerUsageTask extends PrecommitTask {
@PathSensitive(PathSensitivity.RELATIVE)
@SkipWhenEmpty
public FileCollection getClassDirectories() {
return getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets().stream()
return getProject().getConvention()
.getPlugin(JavaPluginConvention.class)
.getSourceSets()
.stream()
// Don't pick up all source sets like the java9 ones as logger-check doesn't support the class format
.filter(sourceSet -> sourceSet.getName().equals(SourceSet.MAIN_SOURCE_SET_NAME)
|| sourceSet.getName().equals(SourceSet.TEST_SOURCE_SET_NAME))
.filter(
sourceSet -> sourceSet.getName().equals(SourceSet.MAIN_SOURCE_SET_NAME)
|| sourceSet.getName().equals(SourceSet.TEST_SOURCE_SET_NAME)
)
.map(sourceSet -> sourceSet.getOutput().getClassesDirs())
.reduce(FileCollection::plus)
.orElse(getProject().files())

View File

@ -36,7 +36,7 @@ public class PrecommitTask extends DefaultTask {
@TaskAction
public void writeMarker() throws IOException {
Files.write(getSuccessMarker().toPath(), new byte[]{}, StandardOpenOption.CREATE);
Files.write(getSuccessMarker().toPath(), new byte[] {}, StandardOpenOption.CREATE);
}
}

View File

@ -67,6 +67,7 @@ public class TestingConventionRule implements Serializable {
public void taskName(Pattern expression) {
taskNames.add(expression);
}
public void taskName(String expression) {
taskNames.add(Pattern.compile(expression));
}
@ -86,8 +87,12 @@ public class TestingConventionRule implements Serializable {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TestingConventionRule that = (TestingConventionRule) o;
return Objects.equals(suffix, that.suffix);
}

View File

@ -71,19 +71,22 @@ public class TestingConventionsTasks extends DefaultTask {
@Input
public Map<String, Set<File>> getClassFilesPerEnabledTask() {
return getProject().getTasks().withType(Test.class).stream()
return getProject().getTasks()
.withType(Test.class)
.stream()
.filter(Task::getEnabled)
.collect(Collectors.toMap(
Task::getPath,
task -> task.getCandidateClassFiles().getFiles()
));
.collect(Collectors.toMap(Task::getPath, task -> task.getCandidateClassFiles().getFiles()));
}
@Input
public Map<String, File> getTestClassNames() {
if (testClassNames == null) {
testClassNames = Boilerplate.getJavaSourceSets(getProject()).getByName("test").getOutput().getClassesDirs()
.getFiles().stream()
testClassNames = Boilerplate.getJavaSourceSets(getProject())
.getByName("test")
.getOutput()
.getClassesDirs()
.getFiles()
.stream()
.filter(File::exists)
.flatMap(testRoot -> walkPathAndLoadClasses(testRoot).entrySet().stream())
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
@ -113,8 +116,11 @@ public class TestingConventionsTasks extends DefaultTask {
return Collections.emptySet();
}
return javaSourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME)
.getOutput().getClassesDirs().getAsFileTree()
.getFiles().stream()
.getOutput()
.getClassesDirs()
.getAsFileTree()
.getFiles()
.stream()
.filter(file -> file.getName().endsWith(".class"))
.map(File::getName)
.map(name -> name.substring(0, name.length() - 6))
@ -126,21 +132,22 @@ public class TestingConventionsTasks extends DefaultTask {
public void doCheck() throws IOException {
final String problems;
try (URLClassLoader isolatedClassLoader = new URLClassLoader(
getTestsClassPath().getFiles().stream().map(this::fileToUrl).toArray(URL[]::new)
)) {
try (
URLClassLoader isolatedClassLoader = new URLClassLoader(
getTestsClassPath().getFiles().stream().map(this::fileToUrl).toArray(URL[]::new)
)
) {
Predicate<Class<?>> isStaticClass = clazz -> Modifier.isStatic(clazz.getModifiers());
Predicate<Class<?>> isPublicClass = clazz -> Modifier.isPublic(clazz.getModifiers());
Predicate<Class<?>> isAbstractClass = clazz -> Modifier.isAbstract(clazz.getModifiers());
final Map<File, ? extends Class<?>> classes = getTestClassNames().entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getValue,
entry -> loadClassWithoutInitializing(entry.getKey(), isolatedClassLoader))
);
final Map<File, ? extends Class<?>> classes = getTestClassNames().entrySet()
.stream()
.collect(Collectors.toMap(Map.Entry::getValue, entry -> loadClassWithoutInitializing(entry.getKey(), isolatedClassLoader)));
final FileTree allTestClassFiles = getProject().files(
classes.values().stream()
classes.values()
.stream()
.filter(isStaticClass.negate())
.filter(isPublicClass)
.filter((Predicate<Class<?>>) this::implementsNamingConvention)
@ -150,11 +157,13 @@ public class TestingConventionsTasks extends DefaultTask {
final Map<String, Set<File>> classFilesPerTask = getClassFilesPerEnabledTask();
final Map<String, Set<Class<?>>> testClassesPerTask = classFilesPerTask.entrySet().stream()
final Map<String, Set<Class<?>>> testClassesPerTask = classFilesPerTask.entrySet()
.stream()
.collect(
Collectors.toMap(
Map.Entry::getKey,
entry -> entry.getValue().stream()
entry -> entry.getValue()
.stream()
.map(classes::get)
.filter(this::implementsNamingConvention)
.collect(Collectors.toSet())
@ -172,23 +181,27 @@ public class TestingConventionsTasks extends DefaultTask {
.collect(
Collectors.toMap(
TestingConventionRule::getSuffix,
rule -> rule.getBaseClasses().stream()
rule -> rule.getBaseClasses()
.stream()
.map(each -> loadClassWithoutInitializing(each, isolatedClassLoader))
.collect(Collectors.toSet())
));
)
);
}
problems = collectProblems(
checkNoneExists(
"Test classes implemented by inner classes will not run",
classes.values().stream()
classes.values()
.stream()
.filter(isStaticClass)
.filter(isPublicClass)
.filter(((Predicate<Class<?>>) this::implementsNamingConvention).or(this::seemsLikeATest))
),
checkNoneExists(
"Seem like test classes but don't match naming convention",
classes.values().stream()
classes.values()
.stream()
.filter(isStaticClass.negate())
.filter(isPublicClass)
.filter(isAbstractClass.negate())
@ -199,54 +212,42 @@ public class TestingConventionsTasks extends DefaultTask {
// TODO: check for abstract classes that implement the naming conventions
// No empty enabled tasks
collectProblems(
testClassesPerTask.entrySet().stream()
.map(entry ->
checkAtLeastOneExists(
"test class included in task " + entry.getKey(),
entry.getValue().stream()
)
)
testClassesPerTask.entrySet()
.stream()
.map(entry -> checkAtLeastOneExists("test class included in task " + entry.getKey(), entry.getValue().stream()))
.sorted()
.collect(Collectors.joining("\n"))
),
checkNoneExists(
"Test classes are not included in any enabled task (" +
classFilesPerTask.keySet().stream()
.collect(Collectors.joining(",")) + ")",
allTestClassFiles.getFiles().stream()
.filter(testFile ->
classFilesPerTask.values().stream()
.anyMatch(fileSet -> fileSet.contains(testFile)) == false
)
"Test classes are not included in any enabled task ("
+ classFilesPerTask.keySet().stream().collect(Collectors.joining(","))
+ ")",
allTestClassFiles.getFiles()
.stream()
.filter(testFile -> classFilesPerTask.values().stream().anyMatch(fileSet -> fileSet.contains(testFile)) == false)
.map(classes::get)
),
collectProblems(
suffixToBaseClass.entrySet().stream()
.filter(entry -> entry.getValue().isEmpty() == false)
.map(entry -> {
return checkNoneExists(
"Tests classes with suffix `" + entry.getKey() + "` should extend " +
entry.getValue().stream().map(Class::getName).collect(Collectors.joining(" or ")) +
" but the following classes do not",
classes.values().stream()
.filter(clazz -> clazz.getName().endsWith(entry.getKey()))
.filter(clazz -> entry.getValue().stream()
.anyMatch(test -> test.isAssignableFrom(clazz)) == false)
);
}).sorted()
.collect(Collectors.joining("\n"))
),
collectProblems(suffixToBaseClass.entrySet().stream().filter(entry -> entry.getValue().isEmpty() == false).map(entry -> {
return checkNoneExists(
"Tests classes with suffix `"
+ entry.getKey()
+ "` should extend "
+ entry.getValue().stream().map(Class::getName).collect(Collectors.joining(" or "))
+ " but the following classes do not",
classes.values()
.stream()
.filter(clazz -> clazz.getName().endsWith(entry.getKey()))
.filter(clazz -> entry.getValue().stream().anyMatch(test -> test.isAssignableFrom(clazz)) == false)
);
}).sorted().collect(Collectors.joining("\n"))),
// TODO: check that the testing tasks are included in the right task based on the name ( from the rule )
checkNoneExists(
"Classes matching the test naming convention should be in test not main",
getMainClassNamedLikeTests()
)
checkNoneExists("Classes matching the test naming convention should be in test not main", getMainClassNamedLikeTests())
);
}
if (problems.isEmpty()) {
getSuccessMarker().getParentFile().mkdirs();
Files.write(getSuccessMarker().toPath(), new byte[]{}, StandardOpenOption.CREATE);
Files.write(getSuccessMarker().toPath(), new byte[] {}, StandardOpenOption.CREATE);
} else {
getLogger().error(problems);
throw new IllegalStateException("Testing conventions are not honored");
@ -254,17 +255,11 @@ public class TestingConventionsTasks extends DefaultTask {
}
private String collectProblems(String... problems) {
return Stream.of(problems)
.map(String::trim)
.filter(s -> s.isEmpty() == false)
.collect(Collectors.joining("\n"));
return Stream.of(problems).map(String::trim).filter(s -> s.isEmpty() == false).collect(Collectors.joining("\n"));
}
private String checkNoneExists(String message, Stream<? extends Class<?>> stream) {
String problem = stream
.map(each -> " * " + each.getName())
.sorted()
.collect(Collectors.joining("\n"));
String problem = stream.map(each -> " * " + each.getName()).sorted().collect(Collectors.joining("\n"));
if (problem.isEmpty() == false) {
return message + ":\n" + problem;
} else {
@ -273,10 +268,7 @@ public class TestingConventionsTasks extends DefaultTask {
}
private String checkNoneExists(String message, Set<? extends String> candidates) {
String problem = candidates.stream()
.map(each -> " * " + each)
.sorted()
.collect(Collectors.joining("\n"));
String problem = candidates.stream().map(each -> " * " + each).sorted().collect(Collectors.joining("\n"));
if (problem.isEmpty() == false) {
return message + ":\n" + problem;
} else {
@ -309,8 +301,12 @@ public class TestingConventionsTasks extends DefaultTask {
return true;
}
if (isAnnotated(method, junitAnnotation)) {
getLogger().debug("{} is a test because it has method '{}' annotated with '{}'",
clazz.getName(), method.getName(), junitAnnotation.getName());
getLogger().debug(
"{} is a test because it has method '{}' annotated with '{}'",
clazz.getName(),
method.getName(),
junitAnnotation.getName()
);
return true;
}
}
@ -318,9 +314,7 @@ public class TestingConventionsTasks extends DefaultTask {
return false;
} catch (NoClassDefFoundError e) {
// Include the message to get more info to get more a more useful message when running Gradle without -s
throw new IllegalStateException(
"Failed to inspect class " + clazz.getName() + ". Missing class? " + e.getMessage(),
e);
throw new IllegalStateException("Failed to inspect class " + clazz.getName() + ". Missing class? " + e.getMessage(), e);
}
}
@ -329,9 +323,7 @@ public class TestingConventionsTasks extends DefaultTask {
}
private boolean implementsNamingConvention(String className) {
if (naming.stream()
.map(TestingConventionRule::getSuffix)
.anyMatch(suffix -> className.endsWith(suffix))) {
if (naming.stream().map(TestingConventionRule::getSuffix).anyMatch(suffix -> className.endsWith(suffix))) {
getLogger().debug("{} is a test because it matches the naming convention", className);
return true;
}
@ -339,9 +331,7 @@ public class TestingConventionsTasks extends DefaultTask {
}
private boolean matchesTestMethodNamingConvention(Method method) {
return method.getName().startsWith(TEST_METHOD_PREFIX) &&
Modifier.isStatic(method.getModifiers()) == false
;
return method.getName().startsWith(TEST_METHOD_PREFIX) && Modifier.isStatic(method.getModifiers()) == false;
}
private boolean isAnnotated(Method method, Class<?> annotation) {

View File

@ -69,9 +69,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
"WARNING: Class '(.*)' cannot be loaded \\(.*\\)\\. Please fix the classpath!"
);
private static final Pattern VIOLATION_PATTERN = Pattern.compile(
"\\s\\sin ([a-zA-Z0-9$.]+) \\(.*\\)"
);
private static final Pattern VIOLATION_PATTERN = Pattern.compile("\\s\\sin ([a-zA-Z0-9$.]+) \\(.*\\)");
private static final int SIG_KILL_EXIT_VALUE = 137;
private static final List<Integer> EXPECTED_EXIT_CODES = Arrays.asList(
CliMain.EXIT_SUCCESS,
@ -124,10 +122,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
@Internal
public File getJarExpandDir() {
return new File(
new File(getProject().getBuildDir(), "precommit/thirdPartyAudit"),
getName()
);
return new File(new File(getProject().getBuildDir(), "precommit/thirdPartyAudit"), getName());
}
@OutputFile
@ -154,7 +149,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
}
}
public void ignoreJarHellWithJDK(String ...classes) {
public void ignoreJarHellWithJDK(String... classes) {
for (String each : classes) {
jdkJarHellExcludes.add(each);
}
@ -174,16 +169,15 @@ public class ThirdPartyAuditTask extends DefaultTask {
@Classpath
@SkipWhenEmpty
public Set<File> getJarsToScan() {
// These are SelfResolvingDependency, and some of them backed by file collections, like the Gradle API files,
// These are SelfResolvingDependency, and some of them backed by file collections, like the Gradle API files,
// or dependencies added as `files(...)`, we can't be sure if those are third party or not.
// err on the side of scanning these to make sure we don't miss anything
Spec<Dependency> reallyThirdParty = dep -> dep.getGroup() != null &&
dep.getGroup().startsWith("org.elasticsearch") == false;
Set<File> jars = getRuntimeConfiguration()
Spec<Dependency> reallyThirdParty = dep -> dep.getGroup() != null && dep.getGroup().startsWith("org.elasticsearch") == false;
Set<File> jars = getRuntimeConfiguration().getResolvedConfiguration().getFiles(reallyThirdParty);
Set<File> compileOnlyConfiguration = getProject().getConfigurations()
.getByName("compileOnly")
.getResolvedConfiguration()
.getFiles(reallyThirdParty);
Set<File> compileOnlyConfiguration = getProject().getConfigurations().getByName("compileOnly").getResolvedConfiguration()
.getFiles(reallyThirdParty);
// don't scan provided dependencies that we already scanned, e.x. don't scan cores dependencies for every plugin
if (compileOnlyConfiguration != null) {
jars.removeAll(compileOnlyConfiguration);
@ -221,8 +215,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
if (bogousExcludesCount != 0 && bogousExcludesCount == missingClassExcludes.size() + violationsExcludes.size()) {
logForbiddenAPIsOutput(forbiddenApisOutput);
throw new IllegalStateException(
"All excluded classes seem to have no issues. " +
"This is sometimes an indication that the check silently failed"
"All excluded classes seem to have no issues. " + "This is sometimes an indication that the check silently failed"
);
}
assertNoPointlessExclusions("are not missing", missingClassExcludes, missingClasses);
@ -232,10 +225,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
assertNoPointlessExclusions("do not generate jar hell with the JDK", jdkJarHellExcludes, jdkJarHellClasses);
if (missingClassExcludes == null && (missingClasses.isEmpty() == false)) {
getLogger().info(
"Found missing classes, but task is configured to ignore all of them:\n {}",
formatClassList(missingClasses)
);
getLogger().info("Found missing classes, but task is configured to ignore all of them:\n {}", formatClassList(missingClasses));
missingClasses.clear();
}
@ -247,7 +237,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
if (missingClasses.isEmpty() == false) {
getLogger().error("Missing classes:\n{}", formatClassList(missingClasses));
}
if(violationsClasses.isEmpty() == false) {
if (violationsClasses.isEmpty() == false) {
getLogger().error("Classes with violations:\n{}", formatClassList(violationsClasses));
}
throw new IllegalStateException("Audit of third party dependencies failed");
@ -257,7 +247,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
// Mark successful third party audit check
getSuccessMarker().getParentFile().mkdirs();
Files.write(getSuccessMarker().toPath(), new byte[]{});
Files.write(getSuccessMarker().toPath(), new byte[] {});
}
private void logForbiddenAPIsOutput(String forbiddenApisOutput) {
@ -310,8 +300,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
jdkJarHellClasses.removeAll(jdkJarHellExcludes);
if (jdkJarHellClasses.isEmpty() == false) {
throw new IllegalStateException(
"Audit of third party dependencies failed:\n" +
" Jar Hell with the JDK:\n" + formatClassList(jdkJarHellClasses)
"Audit of third party dependencies failed:\n" + " Jar Hell with the JDK:\n" + formatClassList(jdkJarHellClasses)
);
}
}
@ -328,10 +317,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
}
private String formatClassList(Set<String> classList) {
return classList.stream()
.map(name -> " * " + name)
.sorted()
.collect(Collectors.joining("\n"));
return classList.stream().map(name -> " * " + name).sorted().collect(Collectors.joining("\n"));
}
private String runForbiddenAPIsCli() throws IOException {
@ -347,11 +333,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
);
spec.jvmArgs("-Xmx1g");
spec.setMain("de.thetaphi.forbiddenapis.cli.CliMain");
spec.args(
"-f", getSignatureFile().getAbsolutePath(),
"-d", getJarExpandDir(),
"--allowmissingclasses"
);
spec.args("-f", getSignatureFile().getAbsolutePath(), "-d", getJarExpandDir(), "--allowmissingclasses");
spec.setErrorOutput(errorOut);
if (getLogger().isInfoEnabled() == false) {
spec.setStandardOutput(new NullOutputStream());
@ -359,9 +341,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
spec.setIgnoreExitValue(true);
});
if (OS.current().equals(OS.LINUX) && result.getExitValue() == SIG_KILL_EXIT_VALUE) {
throw new IllegalStateException(
"Third party audit was killed buy SIGKILL, could be a victim of the Linux OOM killer"
);
throw new IllegalStateException("Third party audit was killed buy SIGKILL, could be a victim of the Linux OOM killer");
}
final String forbiddenApisOutput;
try (ByteArrayOutputStream outputStream = errorOut) {

View File

@ -79,7 +79,8 @@ public class SymbolicLinkPreservingTar extends Tar {
SymbolicLinkPreservingTarCopyAction(
final Provider<RegularFile> tarFile,
final ArchiveOutputStreamFactory compressor,
final boolean isPreserveFileTimestamps) {
final boolean isPreserveFileTimestamps
) {
this.tarFile = tarFile;
this.compressor = compressor;
this.isPreserveFileTimestamps = isPreserveFileTimestamps;
@ -87,8 +88,10 @@ public class SymbolicLinkPreservingTar extends Tar {
@Override
public WorkResult execute(final CopyActionProcessingStream stream) {
try (OutputStream out = compressor.createArchiveOutputStream(tarFile.get().getAsFile());
TarArchiveOutputStream tar = new TarArchiveOutputStream(out)) {
try (
OutputStream out = compressor.createArchiveOutputStream(tarFile.get().getAsFile());
TarArchiveOutputStream tar = new TarArchiveOutputStream(out)
) {
tar.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
stream.process(new SymbolicLinkPreservingTarStreamAction(tar));
} catch (final IOException e) {
@ -136,7 +139,8 @@ public class SymbolicLinkPreservingTar extends Tar {
return false;
}
for (final File symbolicLink : visitedSymbolicLinks) {
if (isChildOf(symbolicLink, file)) return true;
if (isChildOf(symbolicLink, file))
return true;
}
return false;
}

View File

@ -95,8 +95,11 @@ public class SymbolicLinkPreservingUntarTask extends DefaultTask {
final void execute() {
// ensure the target extraction path is empty
getProject().delete(extractPath);
try (TarArchiveInputStream tar =
new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(tarFile.getAsFile().get())))) {
try (
TarArchiveInputStream tar = new TarArchiveInputStream(
new GzipCompressorInputStream(new FileInputStream(tarFile.getAsFile().get()))
)
) {
final Path destinationPath = extractPath.get().getAsFile().toPath();
TarArchiveEntry entry = tar.getNextTarEntry();
while (entry != null) {
@ -127,9 +130,10 @@ public class SymbolicLinkPreservingUntarTask extends DefaultTask {
final PosixFileAttributeView view = Files.getFileAttributeView(destination, PosixFileAttributeView.class);
if (view != null) {
final Set<PosixFilePermission> permissions = PosixFilePermissions.fromString(
permissions((entry.getMode() >> 6) & 07) +
permissions((entry.getMode() >> 3) & 07) +
permissions((entry.getMode() >> 0) & 07));
permissions((entry.getMode() >> 6) & 07) + permissions((entry.getMode() >> 3) & 07) + permissions(
(entry.getMode() >> 0) & 07
)
);
Files.setPosixFilePermissions(destination, permissions);
}
}

View File

@ -110,9 +110,9 @@ public class BatsTestTask extends DefaultTask {
List<Object> command = new ArrayList<>();
command.add("bats");
command.add("--tap");
command.addAll(testsDir.getAsFileTree().getFiles().stream()
.filter(f -> f.getName().endsWith(".bats"))
.sorted().collect(Collectors.toList()));
command.addAll(
testsDir.getAsFileTree().getFiles().stream().filter(f -> f.getName().endsWith(".bats")).sorted().collect(Collectors.toList())
);
getProject().exec(spec -> {
spec.setWorkingDir(distributionsDir.getAsFile());
spec.environment(System.getenv());

View File

@ -19,7 +19,24 @@
package org.elasticsearch.gradle.test;
import org.elasticsearch.gradle.BuildPlugin;
import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertLinuxPath;
import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.elasticsearch.gradle.BwcVersions;
import org.elasticsearch.gradle.DistributionDownloadPlugin;
import org.elasticsearch.gradle.ElasticsearchDistribution;
@ -52,28 +69,10 @@ import org.gradle.api.tasks.TaskInputs;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.testing.Test;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertLinuxPath;
import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath;
public class DistroTestPlugin implements Plugin<Project> {
private static final Logger logger = Logging.getLogger(DistroTestPlugin.class);
private static final String GRADLE_JDK_VERSION = "13.0.1+9@cec27d702aa74d5a8630c65ae61e4305";
private static final String GRADLE_JDK_VERSION = "13.0.1+9@cec27d702aa74d5a8630c65ae61e4305";
private static final String GRADLE_JDK_VENDOR = "openjdk";
// all distributions used by distro tests. this is temporary until tests are per distribution
@ -91,7 +90,7 @@ public class DistroTestPlugin implements Plugin<Project> {
final boolean runDockerTests = shouldRunDockerTests(project);
project.getPluginManager().apply(DistributionDownloadPlugin.class);
project.getPluginManager().apply(BuildPlugin.class);
project.getPluginManager().apply("elasticsearch.build");
// TODO: it would be useful to also have the SYSTEM_JAVA_HOME setup in the root project, so that running from GCP only needs
// a java for gradle to run, and the tests are self sufficient and consistent with the java they use
@ -116,11 +115,12 @@ public class DistroTestPlugin implements Plugin<Project> {
Map<String, TaskProvider<?>> batsTests = new HashMap<>();
batsTests.put("bats oss", configureBatsTest(project, "oss", distributionsDir, copyDistributionsTask));
batsTests.put("bats default", configureBatsTest(project, "default", distributionsDir, copyDistributionsTask));
configureBatsTest(project, "plugins",distributionsDir, copyDistributionsTask, copyPluginsTask).configure(t ->
t.setPluginsDir(pluginsDir)
configureBatsTest(project, "plugins", distributionsDir, copyDistributionsTask, copyPluginsTask).configure(
t -> t.setPluginsDir(pluginsDir)
);
configureBatsTest(project, "upgrade", distributionsDir, copyDistributionsTask, copyUpgradeTask).configure(
t -> t.setUpgradeDir(upgradeDir)
);
configureBatsTest(project, "upgrade", distributionsDir, copyDistributionsTask, copyUpgradeTask).configure(t ->
t.setUpgradeDir(upgradeDir));
project.subprojects(vmProject -> {
vmProject.getPluginManager().apply(VagrantBasePlugin.class);
@ -134,8 +134,12 @@ public class DistroTestPlugin implements Plugin<Project> {
Platform platform = distribution.getPlatform();
// this condition ensures windows boxes get windows distributions, and linux boxes get linux distributions
if (isWindows(vmProject) == (platform == Platform.WINDOWS)) {
TaskProvider<GradleDistroTestTask> vmTask =
configureVMWrapperTask(vmProject, distribution.getName() + " distribution", destructiveTaskName, vmDependencies);
TaskProvider<GradleDistroTestTask> vmTask = configureVMWrapperTask(
vmProject,
distribution.getName() + " distribution",
destructiveTaskName,
vmDependencies
);
vmTask.configure(t -> t.dependsOn(distribution));
distroTest.configure(t -> {
@ -168,7 +172,12 @@ public class DistroTestPlugin implements Plugin<Project> {
}
private static Jdk createJdk(
NamedDomainObjectContainer<Jdk> jdksContainer, String name, String vendor, String version, String platform) {
NamedDomainObjectContainer<Jdk> jdksContainer,
String name,
String vendor,
String version,
String platform
) {
Jdk jdk = jdksContainer.create(name);
jdk.setVendor(vendor);
jdk.setVersion(version);
@ -212,17 +221,14 @@ public class DistroTestPlugin implements Plugin<Project> {
vagrant.vmEnv("PATH", convertPath(project, vagrant, gradleJdk, "/bin:$PATH", "\\bin;$Env:PATH"));
// pass these along to get correct build scans
if (System.getenv("JENKINS_URL") != null) {
Stream.of("JOB_NAME", "JENKINS_URL", "BUILD_NUMBER", "BUILD_URL").forEach(name ->
vagrant.vmEnv(name, System.getenv(name))
);
Stream.of("JOB_NAME", "JENKINS_URL", "BUILD_NUMBER", "BUILD_URL").forEach(name -> vagrant.vmEnv(name, System.getenv(name)));
}
vagrant.setIsWindowsVM(isWindows(project));
return Arrays.asList(gradleJdk);
}
private static Object convertPath(Project project, VagrantExtension vagrant, Jdk jdk,
String additionaLinux, String additionalWindows) {
private static Object convertPath(Project project, VagrantExtension vagrant, Jdk jdk, String additionaLinux, String additionalWindows) {
return new Object() {
@Override
public String toString() {
@ -237,115 +243,116 @@ public class DistroTestPlugin implements Plugin<Project> {
private static TaskProvider<Copy> configureCopyDistributionsTask(Project project, Provider<Directory> distributionsDir) {
// temporary, until we have tasks per distribution
return project.getTasks().register(COPY_DISTRIBUTIONS_TASK, Copy.class,
t -> {
t.into(distributionsDir);
t.from(project.getConfigurations().getByName(DISTRIBUTIONS_CONFIGURATION));
return project.getTasks().register(COPY_DISTRIBUTIONS_TASK, Copy.class, t -> {
t.into(distributionsDir);
t.from(project.getConfigurations().getByName(DISTRIBUTIONS_CONFIGURATION));
Path distributionsPath = distributionsDir.get().getAsFile().toPath();
TaskInputs inputs = t.getInputs();
inputs.property("version", VersionProperties.getElasticsearch());
t.doLast(action -> {
try {
Files.writeString(distributionsPath.resolve("version"), VersionProperties.getElasticsearch());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
Path distributionsPath = distributionsDir.get().getAsFile().toPath();
TaskInputs inputs = t.getInputs();
inputs.property("version", VersionProperties.getElasticsearch());
t.doLast(action -> {
try {
Files.write(distributionsPath.resolve("version"), VersionProperties.getElasticsearch().getBytes());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
});
}
private static TaskProvider<Copy> configureCopyUpgradeTask(Project project, Version upgradeVersion,
Provider<Directory> upgradeDir) {
private static TaskProvider<Copy> configureCopyUpgradeTask(Project project, Version upgradeVersion, Provider<Directory> upgradeDir) {
// temporary, until we have tasks per distribution
return project.getTasks().register(COPY_UPGRADE_TASK, Copy.class,
t -> {
t.into(upgradeDir);
t.from(project.getConfigurations().getByName(UPGRADE_CONFIGURATION));
return project.getTasks().register(COPY_UPGRADE_TASK, Copy.class, t -> {
t.into(upgradeDir);
t.from(project.getConfigurations().getByName(UPGRADE_CONFIGURATION));
Path upgradePath = upgradeDir.get().getAsFile().toPath();
Path upgradePath = upgradeDir.get().getAsFile().toPath();
// write bwc version, and append -SNAPSHOT if it is an unreleased version
ExtraPropertiesExtension extraProperties = project.getExtensions().getByType(ExtraPropertiesExtension.class);
BwcVersions bwcVersions = (BwcVersions) extraProperties.get("bwcVersions");
final String upgradeFromVersion;
if (bwcVersions.unreleasedInfo(upgradeVersion) != null) {
upgradeFromVersion = upgradeVersion.toString() + "-SNAPSHOT";
} else {
upgradeFromVersion = upgradeVersion.toString();
}
TaskInputs inputs = t.getInputs();
inputs.property("upgrade_from_version", upgradeFromVersion);
// TODO: this is serializable, need to think how to represent this as an input
//inputs.property("bwc_versions", bwcVersions);
t.doLast(action -> {
try {
Files.writeString(upgradePath.resolve("version"), VersionProperties.getElasticsearch());
Files.writeString(upgradePath.resolve("upgrade_from_version"), upgradeFromVersion);
Path upgradeMarkerPath = upgradePath.resolve("upgrade_is_oss");
project.delete(upgradeMarkerPath);
// this is always true, but bats tests rely on it. It is just temporary until bats is removed.
if (upgradeVersion.onOrAfter("6.3.0")) {
Files.writeString(upgradeMarkerPath, "");
}
} catch (IOException e) {
throw new UncheckedIOException(e);
// write bwc version, and append -SNAPSHOT if it is an unreleased version
ExtraPropertiesExtension extraProperties = project.getExtensions().getByType(ExtraPropertiesExtension.class);
BwcVersions bwcVersions = (BwcVersions) extraProperties.get("bwcVersions");
final String upgradeFromVersion;
if (bwcVersions.unreleasedInfo(upgradeVersion) != null) {
upgradeFromVersion = upgradeVersion.toString() + "-SNAPSHOT";
} else {
upgradeFromVersion = upgradeVersion.toString();
}
TaskInputs inputs = t.getInputs();
inputs.property("upgrade_from_version", upgradeFromVersion);
// TODO: this is serializable, need to think how to represent this as an input
// inputs.property("bwc_versions", bwcVersions);
t.doLast(action -> {
try {
Files.write(upgradePath.resolve("version"), VersionProperties.getElasticsearch().getBytes());
Files.write(upgradePath.resolve("upgrade_from_version"), upgradeFromVersion.getBytes());
Path upgradeMarkerPath = upgradePath.resolve("upgrade_is_oss");
project.delete(upgradeMarkerPath);
// this is always true, but bats tests rely on it. It is just temporary until bats is removed.
if (upgradeVersion.onOrAfter("6.3.0")) {
Files.write(upgradeMarkerPath, new byte[0]);
}
});
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
});
}
private static TaskProvider<Copy> configureCopyPluginsTask(Project project, Provider<Directory> pluginsDir) {
Configuration pluginsConfiguration = project.getConfigurations().create(PLUGINS_CONFIGURATION);
// temporary, until we have tasks per distribution
return project.getTasks().register(COPY_PLUGINS_TASK, Copy.class,
t -> {
t.into(pluginsDir);
t.from(pluginsConfiguration);
});
return project.getTasks().register(COPY_PLUGINS_TASK, Copy.class, t -> {
t.into(pluginsDir);
t.from(pluginsConfiguration);
});
}
private static TaskProvider<GradleDistroTestTask> configureVMWrapperTask(Project project, String type, String destructiveTaskPath,
List<Object> dependsOn) {
private static TaskProvider<GradleDistroTestTask> configureVMWrapperTask(
Project project,
String type,
String destructiveTaskPath,
List<Object> dependsOn
) {
int taskNameStart = destructiveTaskPath.lastIndexOf(':') + "destructive".length() + 1;
String taskname = destructiveTaskPath.substring(taskNameStart);
taskname = taskname.substring(0, 1).toLowerCase(Locale.ROOT) + taskname.substring(1);
return project.getTasks().register(taskname, GradleDistroTestTask.class,
t -> {
t.setGroup(JavaBasePlugin.VERIFICATION_GROUP);
t.setDescription("Runs " + type + " tests within vagrant");
t.setTaskName(destructiveTaskPath);
t.extraArg("-D'" + IN_VM_SYSPROP + "'");
t.dependsOn(dependsOn);
});
return project.getTasks().register(taskname, GradleDistroTestTask.class, t -> {
t.setGroup(JavaBasePlugin.VERIFICATION_GROUP);
t.setDescription("Runs " + type + " tests within vagrant");
t.setTaskName(destructiveTaskPath);
t.extraArg("-D'" + IN_VM_SYSPROP + "'");
t.dependsOn(dependsOn);
});
}
private static TaskProvider<?> configureDistroTest(Project project, ElasticsearchDistribution distribution) {
return project.getTasks().register(destructiveDistroTestTaskName(distribution), Test.class,
t -> {
t.setMaxParallelForks(1);
t.setWorkingDir(project.getProjectDir());
t.systemProperty(DISTRIBUTION_SYSPROP, distribution.toString());
if (System.getProperty(IN_VM_SYSPROP) == null) {
t.dependsOn(distribution);
}
});
return project.getTasks().register(destructiveDistroTestTaskName(distribution), Test.class, t -> {
t.setMaxParallelForks(1);
t.setWorkingDir(project.getProjectDir());
t.systemProperty(DISTRIBUTION_SYSPROP, distribution.toString());
if (System.getProperty(IN_VM_SYSPROP) == null) {
t.dependsOn(distribution);
}
});
}
private static TaskProvider<BatsTestTask> configureBatsTest(Project project, String type, Provider<Directory> distributionsDir,
Object... deps) {
return project.getTasks().register("destructiveBatsTest." + type, BatsTestTask.class,
t -> {
Directory batsDir = project.getLayout().getProjectDirectory().dir("bats");
t.setTestsDir(batsDir.dir(type));
t.setUtilsDir(batsDir.dir("utils"));
t.setDistributionsDir(distributionsDir);
t.setPackageName("elasticsearch" + (type.equals("oss") ? "-oss" : ""));
if (System.getProperty(IN_VM_SYSPROP) == null) {
t.dependsOn(deps);
}
});
private static TaskProvider<BatsTestTask> configureBatsTest(
Project project,
String type,
Provider<Directory> distributionsDir,
Object... deps
) {
return project.getTasks().register("destructiveBatsTest." + type, BatsTestTask.class, t -> {
Directory batsDir = project.getLayout().getProjectDirectory().dir("bats");
t.setTestsDir(batsDir.dir(type));
t.setUtilsDir(batsDir.dir("utils"));
t.setDistributionsDir(distributionsDir);
t.setPackageName("elasticsearch" + (type.equals("oss") ? "-oss" : ""));
if (System.getProperty(IN_VM_SYSPROP) == null) {
t.dependsOn(deps);
}
});
}
private List<ElasticsearchDistribution> configureDistributions(Project project, Version upgradeVersion, boolean runDockerTests) {
@ -385,8 +392,15 @@ public class DistroTestPlugin implements Plugin<Project> {
for (Platform platform : Arrays.asList(Platform.LINUX, Platform.WINDOWS)) {
for (Flavor flavor : Flavor.values()) {
for (boolean bundledJdk : Arrays.asList(true, false)) {
addDistro(distributions, Type.ARCHIVE, platform, flavor, bundledJdk,
VersionProperties.getElasticsearch(), currentDistros);
addDistro(
distributions,
Type.ARCHIVE,
platform,
flavor,
bundledJdk,
VersionProperties.getElasticsearch(),
currentDistros
);
}
}
}
@ -400,16 +414,23 @@ public class DistroTestPlugin implements Plugin<Project> {
packagingConfig.setExtendsFrom(distroConfigs);
Configuration packagingUpgradeConfig = project.getConfigurations().create(UPGRADE_CONFIGURATION);
List<Configuration> distroUpgradeConfigs = upgradeDistros.stream().map(ElasticsearchDistribution::getConfiguration)
List<Configuration> distroUpgradeConfigs = upgradeDistros.stream()
.map(ElasticsearchDistribution::getConfiguration)
.collect(Collectors.toList());
packagingUpgradeConfig.setExtendsFrom(distroUpgradeConfigs);
return currentDistros;
}
private static void addDistro(NamedDomainObjectContainer<ElasticsearchDistribution> distributions,
Type type, Platform platform, Flavor flavor, boolean bundledJdk, String version,
List<ElasticsearchDistribution> container) {
private static void addDistro(
NamedDomainObjectContainer<ElasticsearchDistribution> distributions,
Type type,
Platform platform,
Flavor flavor,
boolean bundledJdk,
String version,
List<ElasticsearchDistribution> container
) {
String name = distroId(type, platform, flavor, bundledJdk) + "-" + version;
if (distributions.findByName(name) != null) {
@ -438,11 +459,7 @@ public class DistroTestPlugin implements Plugin<Project> {
private static String destructiveDistroTestTaskName(ElasticsearchDistribution distro) {
Type type = distro.getType();
return "destructiveDistroTest." + distroId(
type,
distro.getPlatform(),
distro.getFlavor(),
distro.getBundledJdk());
return "destructiveDistroTest." + distroId(type, distro.getPlatform(), distro.getFlavor(), distro.getBundledJdk());
}
static Map<String, String> parseOsRelease(final List<String> osReleaseLines) {
@ -466,7 +483,7 @@ public class DistroTestPlugin implements Plugin<Project> {
private static List<String> getLinuxExclusionList(Project project) {
final String exclusionsFilename = "dockerOnLinuxExclusions";
final Path exclusionsPath = project.getRootDir().toPath().resolve(Path.of(".ci", exclusionsFilename));
final Path exclusionsPath = project.getRootDir().toPath().resolve(".ci").resolve(exclusionsFilename);
try {
return Files.readAllLines(exclusionsPath)
@ -486,7 +503,6 @@ public class DistroTestPlugin implements Plugin<Project> {
* method determines whether the Docker tests should be run on the host
* OS. Essentially, unless an OS and version is specifically excluded, we expect
* to be able to run Docker and test the Docker images.
* @param project
*/
private static boolean shouldRunDockerTests(Project project) {
switch (OS.current()) {

View File

@ -193,12 +193,14 @@ public class ErrorReportingTestListener implements TestOutputListener, TestListe
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Descriptor that = (Descriptor) o;
return Objects.equals(name, that.name) &&
Objects.equals(className, that.className) &&
Objects.equals(parent, that.parent);
return Objects.equals(name, that.name) && Objects.equals(className, that.className) && Objects.equals(parent, that.parent);
}
@Override

View File

@ -62,22 +62,16 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
private final LinkedHashMap<String, Predicate<TestClusterConfiguration>> waitConditions = new LinkedHashMap<>();
private final Project project;
private final ReaperService reaper;
private int nodeIndex = 0;
private int nodeIndex = 0;
public ElasticsearchCluster(String path, String clusterName, Project project,
ReaperService reaper, File workingDirBase) {
public ElasticsearchCluster(String path, String clusterName, Project project, ReaperService reaper, File workingDirBase) {
this.path = path;
this.clusterName = clusterName;
this.project = project;
this.reaper = reaper;
this.workingDirBase = workingDirBase;
this.nodes = project.container(ElasticsearchNode.class);
this.nodes.add(
new ElasticsearchNode(
path, clusterName + "-0",
project, reaper, workingDirBase
)
);
this.nodes.add(new ElasticsearchNode(path, clusterName + "-0", project, reaper, workingDirBase));
// configure the cluster name eagerly so nodes know about it
this.nodes.all((node) -> node.defaultConfig.put("cluster.name", safeName(clusterName)));
@ -97,10 +91,8 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
);
}
for (int i = nodes.size() ; i < numberOfNodes; i++) {
this.nodes.add(new ElasticsearchNode(
path, clusterName + "-" + i, project, reaper, workingDirBase
));
for (int i = nodes.size(); i < numberOfNodes; i++) {
this.nodes.add(new ElasticsearchNode(path, clusterName + "-" + i, project, reaper, workingDirBase));
}
}
@ -259,17 +251,14 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
@Override
public void start() {
commonNodeConfig();
nodes
.stream()
.filter(node -> {
if (node.getVersion().onOrAfter("6.5.0")) {
return true;
} else {
// We already started it to set seed nodes
return node.equals(nodes.iterator().next()) == false;
}
})
.forEach(ElasticsearchNode::start);
nodes.stream().filter(node -> {
if (node.getVersion().onOrAfter("6.5.0")) {
return true;
} else {
// We already started it to set seed nodes
return node.equals(nodes.iterator().next()) == false;
}
}).forEach(ElasticsearchNode::start);
}
private void commonNodeConfig() {
@ -297,13 +286,12 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
private void commonNodeConfig(ElasticsearchNode node, String nodeNames, ElasticsearchNode firstNode) {
if (node.getVersion().onOrAfter("7.0.0")) {
node.defaultConfig.keySet().stream()
node.defaultConfig.keySet()
.stream()
.filter(name -> name.startsWith("discovery.zen."))
.collect(Collectors.toList())
.forEach(node.defaultConfig::remove);
if (nodeNames != null &&
node.settings.getOrDefault("discovery.type", "anything").equals("single-node") == false
) {
if (nodeNames != null && node.settings.getOrDefault("discovery.type", "anything").equals("single-node") == false) {
node.defaultConfig.put("cluster.initial_master_nodes", "[" + nodeNames + "]");
}
node.defaultConfig.put("discovery.seed_providers", "file");
@ -447,9 +435,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
public ElasticsearchNode singleNode() {
if (nodes.size() != 1) {
throw new IllegalStateException(
"Can't treat " + this + " as single node as it has " + nodes.size() + " nodes"
);
throw new IllegalStateException("Can't treat " + this + " as single node as it has " + nodes.size() + " nodes");
}
return getFirstNode();
}
@ -490,11 +476,14 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ElasticsearchCluster that = (ElasticsearchCluster) o;
return Objects.equals(clusterName, that.clusterName) &&
Objects.equals(path, that.path);
return Objects.equals(clusterName, that.clusterName) && Objects.equals(path, that.path);
}
@Override

View File

@ -424,26 +424,20 @@ public class ElasticsearchNode implements TestClusterConfiguration {
if (plugins.isEmpty() == false) {
logToProcessStdout("Installing " + plugins.size() + " plugins");
plugins.forEach(plugin -> runElasticsearchBinScript(
"elasticsearch-plugin",
"install", "--batch", plugin.toString())
);
plugins.forEach(plugin -> runElasticsearchBinScript("elasticsearch-plugin", "install", "--batch", plugin.toString()));
}
if (getVersion().before("6.3.0") && testDistribution == TestDistribution.DEFAULT) {
LOGGER.info("emulating the {} flavor for {} by installing x-pack", testDistribution, getVersion());
runElasticsearchBinScript(
"elasticsearch-plugin",
"install", "--batch", "x-pack"
);
runElasticsearchBinScript("elasticsearch-plugin", "install", "--batch", "x-pack");
}
if (keystoreSettings.isEmpty() == false || keystoreFiles.isEmpty() == false) {
logToProcessStdout("Adding " + keystoreSettings.size() + " keystore settings and " + keystoreFiles.size() + " keystore files");
runElasticsearchBinScript("elasticsearch-keystore", "create");
keystoreSettings.forEach((key, value) ->
runElasticsearchBinScriptWithInput(value.toString(), "elasticsearch-keystore", "add", "-x", key)
keystoreSettings.forEach(
(key, value) -> runElasticsearchBinScriptWithInput(value.toString(), "elasticsearch-keystore", "add", "-x", key)
);
for (Map.Entry<String, File> entry : keystoreFiles.entrySet()) {
@ -469,12 +463,12 @@ public class ElasticsearchNode implements TestClusterConfiguration {
if (credentials.isEmpty() == false) {
logToProcessStdout("Setting up " + credentials.size() + " users");
credentials.forEach(paramMap -> runElasticsearchBinScript(
getVersion().onOrAfter("6.3.0") ? "elasticsearch-users" : "x-pack/users",
paramMap.entrySet().stream()
.flatMap(entry -> Stream.of(entry.getKey(), entry.getValue()))
.toArray(String[]::new)
));
credentials.forEach(
paramMap -> runElasticsearchBinScript(
getVersion().onOrAfter("6.3.0") ? "elasticsearch-users" : "x-pack/users",
paramMap.entrySet().stream().flatMap(entry -> Stream.of(entry.getKey(), entry.getValue())).toArray(String[]::new)
)
);
}
if (cliSetup.isEmpty() == false) {
@ -497,7 +491,8 @@ public class ElasticsearchNode implements TestClusterConfiguration {
Files.write(
esStdoutFile,
("[" + Instant.now().toString() + "] [BUILD] " + message + "\n").getBytes(StandardCharsets.UTF_8),
StandardOpenOption.CREATE, StandardOpenOption.APPEND
StandardOpenOption.CREATE,
StandardOpenOption.APPEND
);
} catch (IOException e) {
throw new UncheckedIOException(e);
@ -530,8 +525,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
extraConfigFiles.forEach((destination, from) -> {
if (Files.exists(from.toPath()) == false) {
throw new TestClustersException("Can't create extra config file from " + from + " for " + this +
" as it does not exist");
throw new TestClustersException("Can't create extra config file from " + from + " for " + this + " as it does not exist");
}
Path dst = configFile.getParent().resolve(destination);
try {
@ -548,9 +542,8 @@ public class ElasticsearchNode implements TestClusterConfiguration {
if (testDistribution == TestDistribution.INTEG_TEST) {
logToProcessStdout("Installing " + modules.size() + "modules");
for (File module : modules) {
Path destination = getDistroDir().resolve("modules").resolve(module.getName().replace(".zip", "")
.replace("-" + getVersion(), "")
.replace("-SNAPSHOT", ""));
Path destination = getDistroDir().resolve("modules")
.resolve(module.getName().replace(".zip", "").replace("-" + getVersion(), "").replace("-SNAPSHOT", ""));
// only install modules that are not already bundled with the integ-test distribution
if (Files.exists(destination) == false) {
@ -567,16 +560,14 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
}
} else {
LOGGER.info("Not installing " + modules.size() + "(s) since the " + distributions + " distribution already " +
"has them");
LOGGER.info("Not installing " + modules.size() + "(s) since the " + distributions + " distribution already " + "has them");
}
}
@Override
public void extraConfigFile(String destination, File from) {
if (destination.contains("..")) {
throw new IllegalArgumentException("extra config file destination can't be relative, was " + destination +
" for " + this);
throw new IllegalArgumentException("extra config file destination can't be relative, was " + destination + " for " + this);
}
extraConfigFiles.put(destination, from);
}
@ -584,8 +575,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
@Override
public void extraConfigFile(String destination, File from, PropertyNormalization normalization) {
if (destination.contains("..")) {
throw new IllegalArgumentException("extra config file destination can't be relative, was " + destination +
" for " + this);
throw new IllegalArgumentException("extra config file destination can't be relative, was " + destination + " for " + this);
}
extraConfigFiles.put(destination, from, normalization);
}
@ -607,37 +597,26 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private void runElasticsearchBinScriptWithInput(String input, String tool, CharSequence... args) {
if (
Files.exists(getDistroDir().resolve("bin").resolve(tool)) == false &&
Files.exists(getDistroDir().resolve("bin").resolve(tool + ".bat")) == false
) {
throw new TestClustersException("Can't run bin script: `" + tool + "` does not exist. " +
"Is this the distribution you expect it to be ?");
if (Files.exists(getDistroDir().resolve("bin").resolve(tool)) == false
&& Files.exists(getDistroDir().resolve("bin").resolve(tool + ".bat")) == false) {
throw new TestClustersException(
"Can't run bin script: `" + tool + "` does not exist. Is this the distribution you expect it to be ?"
);
}
try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) {
LoggedExec.exec(project, spec -> {
spec.setEnvironment(getESEnvironment());
spec.workingDir(getDistroDir());
spec.executable(
OS.conditionalString()
.onUnix(() -> "./bin/" + tool)
.onWindows(() -> "cmd")
.supply()
);
spec.args(
OS.<List<CharSequence>>conditional()
.onWindows(() -> {
ArrayList<CharSequence> result = new ArrayList<>();
result.add("/c");
result.add("bin\\" + tool + ".bat");
for (CharSequence arg : args) {
result.add(arg);
}
return result;
})
.onUnix(() -> Arrays.asList(args))
.supply()
);
spec.executable(OS.conditionalString().onUnix(() -> "./bin/" + tool).onWindows(() -> "cmd").supply());
spec.args(OS.<List<CharSequence>>conditional().onWindows(() -> {
ArrayList<CharSequence> result = new ArrayList<>();
result.add("/c");
result.add("bin\\" + tool + ".bat");
for (CharSequence arg : args) {
result.add(arg);
}
return result;
}).onUnix(() -> Arrays.asList(args)).supply());
spec.setStandardInput(byteArrayInputStream);
});
@ -652,34 +631,34 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private Map<String, String> getESEnvironment() {
Map<String, String> defaultEnv = new HashMap<>();
if ( getJavaHome() != null) {
if (getJavaHome() != null) {
defaultEnv.put("JAVA_HOME", getJavaHome().getAbsolutePath());
}
defaultEnv.put("ES_PATH_CONF", configFile.getParent().toString());
String systemPropertiesString = "";
if (systemProperties.isEmpty() == false) {
systemPropertiesString = " " + systemProperties.entrySet().stream()
.map(entry -> "-D" + entry.getKey() + "=" + entry.getValue())
.collect(Collectors.joining(" "));
systemPropertiesString = " "
+ systemProperties.entrySet()
.stream()
.map(entry -> "-D" + entry.getKey() + "=" + entry.getValue())
.collect(Collectors.joining(" "));
}
String jvmArgsString = "";
if (jvmArgs.isEmpty() == false) {
jvmArgsString = " " + jvmArgs.stream()
.peek(argument -> {
if (argument.toString().startsWith("-D")) {
throw new TestClustersException("Invalid jvm argument `" + argument +
"` configure as systemProperty instead for " + this
);
}
})
.collect(Collectors.joining(" "));
jvmArgsString = " " + jvmArgs.stream().peek(argument -> {
if (argument.toString().startsWith("-D")) {
throw new TestClustersException(
"Invalid jvm argument `" + argument + "` configure as systemProperty instead for " + this
);
}
}).collect(Collectors.joining(" "));
}
String heapSize = System.getProperty("tests.heap.size", "512m");
defaultEnv.put("ES_JAVA_OPTS", "-Xms" + heapSize + " -Xmx" + heapSize + " -ea -esa " +
systemPropertiesString + " " +
jvmArgsString + " " +
// Support passing in additional JVM arguments
System.getProperty("tests.jvm.argline", "")
defaultEnv.put(
"ES_JAVA_OPTS",
"-Xms" + heapSize + " -Xmx" + heapSize + " -ea -esa " + systemPropertiesString + " " + jvmArgsString + " " +
// Support passing in additional JVM arguments
System.getProperty("tests.jvm.argline", "")
);
defaultEnv.put("ES_TMPDIR", tmpDir.toString());
// Windows requires this as it defaults to `c:\windows` despite ES_TMPDIR
@ -692,9 +671,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
Set<String> commonKeys = new HashSet<>(environment.keySet());
commonKeys.retainAll(defaultEnv.keySet());
if (commonKeys.isEmpty() == false) {
throw new IllegalStateException(
"testcluster does not allow overwriting the following env vars " + commonKeys + " for " + this
);
throw new IllegalStateException("testcluster does not allow overwriting the following env vars " + commonKeys + " for " + this);
}
environment.forEach((key, value) -> defaultEnv.put(key, value.toString()));
@ -711,7 +688,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
processBuilder.command(command);
processBuilder.directory(workingDir.toFile());
Map<String, String> environment = processBuilder.environment();
// Don't inherit anything from the environment for as that would lack reproducibility
// Don't inherit anything from the environment for as that would lack reproducibility
environment.clear();
environment.putAll(getESEnvironment());
@ -823,10 +800,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
// and in that case the ML processes will be grandchildren of the wrapper.
List<ProcessHandle> children = processHandle.children().collect(Collectors.toList());
try {
logProcessInfo(
"Terminating elasticsearch process" + (forcibly ? " forcibly " : "gracefully") + ":",
processHandle.info()
);
logProcessInfo("Terminating elasticsearch process" + (forcibly ? " forcibly " : "gracefully") + ":", processHandle.info());
if (forcibly) {
processHandle.destroyForcibly();
@ -836,8 +810,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
if (processHandle.isAlive() == false) {
return;
}
LOGGER.info("process did not terminate after {} {}, stopping it forcefully",
ES_DESTROY_TIMEOUT, ES_DESTROY_TIMEOUT_UNIT);
LOGGER.info("process did not terminate after {} {}, stopping it forcefully", ES_DESTROY_TIMEOUT, ES_DESTROY_TIMEOUT_UNIT);
processHandle.destroyForcibly();
}
@ -851,11 +824,11 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private void logProcessInfo(String prefix, ProcessHandle.Info info) {
LOGGER.info(prefix + " commandLine:`{}` command:`{}` args:`{}`",
info.commandLine().orElse("-"), info.command().orElse("-"),
Arrays.stream(info.arguments().orElse(new String[]{}))
.map(each -> "'" + each + "'")
.collect(Collectors.joining(" "))
LOGGER.info(
prefix + " commandLine:`{}` command:`{}` args:`{}`",
info.commandLine().orElse("-"),
info.command().orElse("-"),
Arrays.stream(info.arguments().orElse(new String[] {})).map(each -> "'" + each + "'").collect(Collectors.joining(" "))
);
}
@ -863,7 +836,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
final Map<String, Integer> errorsAndWarnings = new LinkedHashMap<>();
LinkedList<String> ring = new LinkedList<>();
try (LineNumberReader reader = new LineNumberReader(Files.newBufferedReader(from))) {
for (String line = reader.readLine(); line != null ; line = reader.readLine()) {
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
final String lineToAdd;
if (ring.isEmpty()) {
lineToAdd = line;
@ -873,12 +846,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
// check to see if the previous message (possibly combined from multiple lines) was an error or
// warning as we want to show all of them
String previousMessage = normalizeLogLine(ring.getLast());
if (MESSAGES_WE_DONT_CARE_ABOUT.stream().noneMatch(previousMessage::contains) &&
(previousMessage.contains("ERROR") || previousMessage.contains("WARN"))) {
errorsAndWarnings.put(
previousMessage,
errorsAndWarnings.getOrDefault(previousMessage, 0) + 1
);
if (MESSAGES_WE_DONT_CARE_ABOUT.stream().noneMatch(previousMessage::contains)
&& (previousMessage.contains("ERROR") || previousMessage.contains("WARN"))) {
errorsAndWarnings.put(previousMessage, errorsAndWarnings.getOrDefault(previousMessage, 0) + 1);
}
} else {
// We combine multi line log messages to make sure we never break exceptions apart
@ -991,9 +961,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
Files.createLink(destination, source);
} catch (IOException e) {
// Note does not work for network drives, e.g. Vagrant
throw new UncheckedIOException(
"Failed to create hard link " + destination + " pointing to " + source, e
);
throw new UncheckedIOException("Failed to create hard link " + destination + " pointing to " + source, e);
}
}
});
@ -1049,21 +1017,17 @@ public class ElasticsearchNode implements TestClusterConfiguration {
);
}
// Make sure no duplicate config keys
settings.keySet().stream()
.filter(OVERRIDABLE_SETTINGS::contains)
.forEach(defaultConfig::remove);
settings.keySet().stream().filter(OVERRIDABLE_SETTINGS::contains).forEach(defaultConfig::remove);
try {
Files.write(
configFile,
Stream.concat(
settings.entrySet().stream(),
defaultConfig.entrySet().stream()
)
Stream.concat(settings.entrySet().stream(), defaultConfig.entrySet().stream())
.map(entry -> entry.getKey() + ": " + entry.getValue())
.collect(Collectors.joining("\n"))
.getBytes(StandardCharsets.UTF_8),
StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE
StandardOpenOption.TRUNCATE_EXISTING,
StandardOpenOption.CREATE
);
final List<Path> configFiles;
@ -1093,9 +1057,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
try {
return readPortsFile(transportPortFile);
} catch (IOException e) {
throw new UncheckedIOException(
"Failed to read transport ports file: " + transportPortFile + " for " + this, e
);
throw new UncheckedIOException("Failed to read transport ports file: " + transportPortFile + " for " + this, e);
}
}
@ -1103,9 +1065,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
try {
return readPortsFile(httpPortsFile);
} catch (IOException e) {
throw new UncheckedIOException(
"Failed to read http ports file: " + httpPortsFile + " for " + this, e
);
throw new UncheckedIOException("Failed to read http ports file: " + httpPortsFile + " for " + this, e);
}
}
@ -1120,10 +1080,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private List<File> getInstalledFileSet(Action<? super PatternFilterable> filter) {
return Stream.concat(
plugins.stream().filter(uri -> uri.getScheme().equalsIgnoreCase("file")).map(File::new),
modules.stream()
)
return Stream.concat(plugins.stream().filter(uri -> uri.getScheme().equalsIgnoreCase("file")).map(File::new), modules.stream())
.filter(File::exists)
// TODO: We may be able to simplify this with Gradle 5.6
// https://docs.gradle.org/nightly/release-notes.html#improved-handling-of-zip-archives-on-classpaths
@ -1164,11 +1121,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private Set<File> getDistributionFiles(Action<PatternFilterable> patternFilter) {
Set<File> files = new TreeSet<>();
for (ElasticsearchDistribution distribution : distributions) {
files.addAll(
project.fileTree(Paths.get(distribution.getExtracted().toString()))
.matching(patternFilter)
.getFiles()
);
files.addAll(project.fileTree(Paths.get(distribution.getExtracted().toString())).matching(patternFilter).getFiles());
}
return files;
}
@ -1216,40 +1169,29 @@ public class ElasticsearchNode implements TestClusterConfiguration {
@Override
@Internal
public boolean isProcessAlive() {
requireNonNull(
esProcess,
"Can't wait for `" + this + "` as it's not started. Does the task have `useCluster` ?"
);
requireNonNull(esProcess, "Can't wait for `" + this + "` as it's not started. Does the task have `useCluster` ?");
return esProcess.isAlive();
}
void waitForAllConditions() {
waitForConditions(
waitConditions,
System.currentTimeMillis(),
NODE_UP_TIMEOUT_UNIT.toMillis(NODE_UP_TIMEOUT) +
// Installing plugins at config time and loading them when nods start requires additional time we need to
// account for
ADDITIONAL_CONFIG_TIMEOUT_UNIT.toMillis(ADDITIONAL_CONFIG_TIMEOUT *
(
plugins.size() +
keystoreFiles.size() +
keystoreSettings.size() +
credentials.size()
)
),
TimeUnit.MILLISECONDS,
this
);
waitForConditions(waitConditions, System.currentTimeMillis(), NODE_UP_TIMEOUT_UNIT.toMillis(NODE_UP_TIMEOUT) +
// Installing plugins at config time and loading them when nods start requires additional time we need to
// account for
ADDITIONAL_CONFIG_TIMEOUT_UNIT.toMillis(
ADDITIONAL_CONFIG_TIMEOUT * (plugins.size() + keystoreFiles.size() + keystoreSettings.size() + credentials.size())
), TimeUnit.MILLISECONDS, this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ElasticsearchNode that = (ElasticsearchNode) o;
return Objects.equals(name, that.name) &&
Objects.equals(path, that.path);
return Objects.equals(name, that.name) && Objects.equals(path, that.path);
}
@Override
@ -1282,37 +1224,23 @@ public class ElasticsearchNode implements TestClusterConfiguration {
@Internal
public boolean isHttpSslEnabled() {
return Boolean.valueOf(
settings.getOrDefault("xpack.security.http.ssl.enabled", "false").toString()
);
return Boolean.valueOf(settings.getOrDefault("xpack.security.http.ssl.enabled", "false").toString());
}
void configureHttpWait(WaitForHttpResource wait) {
if (settings.containsKey("xpack.security.http.ssl.certificate_authorities")) {
wait.setCertificateAuthorities(
getConfigDir()
.resolve(settings.get("xpack.security.http.ssl.certificate_authorities").toString())
.toFile()
getConfigDir().resolve(settings.get("xpack.security.http.ssl.certificate_authorities").toString()).toFile()
);
}
if (settings.containsKey("xpack.security.http.ssl.certificate")) {
wait.setCertificateAuthorities(
getConfigDir()
.resolve(settings.get("xpack.security.http.ssl.certificate").toString())
.toFile()
);
wait.setCertificateAuthorities(getConfigDir().resolve(settings.get("xpack.security.http.ssl.certificate").toString()).toFile());
}
if (settings.containsKey("xpack.security.http.ssl.keystore.path")) {
wait.setTrustStoreFile(
getConfigDir()
.resolve(settings.get("xpack.security.http.ssl.keystore.path").toString())
.toFile()
);
wait.setTrustStoreFile(getConfigDir().resolve(settings.get("xpack.security.http.ssl.keystore.path").toString()).toFile());
}
if (keystoreSettings.containsKey("xpack.security.http.ssl.keystore.secure_password")) {
wait.setTrustStorePassword(
keystoreSettings.get("xpack.security.http.ssl.keystore.secure_password").toString()
);
wait.setTrustStorePassword(keystoreSettings.get("xpack.security.http.ssl.keystore.secure_password").toString());
}
}

View File

@ -19,17 +19,21 @@ public class RestTestRunnerTask extends Test implements TestClustersAware {
private Collection<ElasticsearchCluster> clusters = new HashSet<>();
public RestTestRunnerTask() {
this.getOutputs().doNotCacheIf("Caching disabled for this task since it uses a cluster shared by other tasks",
/*
* Look for any other tasks which use the same cluster as this task. Since tests often have side effects for the cluster they
* execute against, this state can cause issues when trying to cache tests results of tasks that share a cluster. To avoid any
* undesired behavior we simply disable the cache if we detect that this task uses a cluster shared between multiple tasks.
*/
t -> getProject().getTasks().withType(RestTestRunnerTask.class)
.stream()
.filter(task -> task != this)
.anyMatch(task -> Collections.disjoint(task.getClusters(), getClusters()) == false)
);
this.getOutputs()
.doNotCacheIf(
"Caching disabled for this task since it uses a cluster shared by other tasks",
/*
* Look for any other tasks which use the same cluster as this task. Since tests often have side effects for the cluster
* they execute against, this state can cause issues when trying to cache tests results of tasks that share a cluster. To
* avoid any undesired behavior we simply disable the cache if we detect that this task uses a cluster shared between
* multiple tasks.
*/
t -> getProject().getTasks()
.withType(RestTestRunnerTask.class)
.stream()
.filter(task -> task != this)
.anyMatch(task -> Collections.disjoint(task.getClusters(), getClusters()) == false)
);
}
@Override

View File

@ -28,10 +28,7 @@ public class RunTask extends DefaultTestClustersTask {
private Path dataDir = null;
@Option(
option = "debug-jvm",
description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch."
)
@Option(option = "debug-jvm", description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch.")
public void setDebug(boolean enabled) {
this.debug = enabled;
}
@ -41,10 +38,7 @@ public class RunTask extends DefaultTestClustersTask {
return debug;
}
@Option(
option = "data-dir",
description = "Override the base data directory used by the testcluster"
)
@Option(option = "data-dir", description = "Override the base data directory used by the testcluster")
public void setDataDir(String dataDirStr) {
dataDir = Paths.get(dataDirStr).toAbsolutePath();
}
@ -52,7 +46,9 @@ public class RunTask extends DefaultTestClustersTask {
@Input
@Optional
public String getDataDir() {
if (dataDir == null) { return null;}
if (dataDir == null) {
return null;
}
return dataDir.toString();
}
@ -61,12 +57,16 @@ public class RunTask extends DefaultTestClustersTask {
int debugPort = 5005;
int httpPort = 9200;
int transportPort = 9300;
Map<String, String> additionalSettings = System.getProperties().entrySet().stream()
Map<String, String> additionalSettings = System.getProperties()
.entrySet()
.stream()
.filter(entry -> entry.getKey().toString().startsWith(CUSTOM_SETTINGS_PREFIX))
.collect(Collectors.toMap(
entry -> entry.getKey().toString().substring(CUSTOM_SETTINGS_PREFIX.length()),
entry -> entry.getValue().toString()
));
.collect(
Collectors.toMap(
entry -> entry.getKey().toString().substring(CUSTOM_SETTINGS_PREFIX.length()),
entry -> entry.getValue().toString()
)
);
boolean singleNode = getClusters().stream().flatMap(c -> c.getNodes().stream()).count() == 1;
final Function<ElasticsearchNode, Path> getDataPath;
if (singleNode) {
@ -86,10 +86,7 @@ public class RunTask extends DefaultTestClustersTask {
node.setDataPath(getDataPath.apply(node));
}
if (debug) {
logger.lifecycle(
"Running elasticsearch in debug mode, {} suspending until connected on debugPort {}",
node, debugPort
);
logger.lifecycle("Running elasticsearch in debug mode, {} suspending until connected on debugPort {}", node, debugPort);
node.jvmArgs("-agentlib:jdwp=transport=dt_socket,server=n,suspend=y,address=" + debugPort);
debugPort += 1;
}

View File

@ -33,7 +33,6 @@ import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
public interface TestClusterConfiguration {
void setVersion(String version);
@ -111,7 +110,8 @@ public interface TestClusterConfiguration {
default void waitForConditions(
LinkedHashMap<String, Predicate<TestClusterConfiguration>> waitConditions,
long startedAtMillis,
long nodeUpTimeout, TimeUnit nodeUpTimeoutUnit,
long nodeUpTimeout,
TimeUnit nodeUpTimeoutUnit,
TestClusterConfiguration context
) {
Logger logger = Logging.getLogger(TestClusterConfiguration.class);
@ -119,17 +119,13 @@ public interface TestClusterConfiguration {
long thisConditionStartedAt = System.currentTimeMillis();
boolean conditionMet = false;
Throwable lastException = null;
while (
System.currentTimeMillis() - startedAtMillis < TimeUnit.MILLISECONDS.convert(nodeUpTimeout, nodeUpTimeoutUnit)
) {
while (System.currentTimeMillis() - startedAtMillis < TimeUnit.MILLISECONDS.convert(nodeUpTimeout, nodeUpTimeoutUnit)) {
if (context.isProcessAlive() == false) {
throw new TestClustersException(
"process was found dead while waiting for " + description + ", " + this
);
throw new TestClustersException("process was found dead while waiting for " + description + ", " + this);
}
try {
if(predicate.test(context)) {
if (predicate.test(context)) {
conditionMet = true;
break;
}
@ -140,8 +136,13 @@ public interface TestClusterConfiguration {
}
}
if (conditionMet == false) {
String message = "`" + context + "` failed to wait for " + description + " after " +
nodeUpTimeout + " " + nodeUpTimeoutUnit;
String message = String.format(
"`%s` failed to wait for %s after %d %s",
context,
description,
nodeUpTimeout,
nodeUpTimeoutUnit
);
if (lastException == null) {
throw new TestClustersException(message);
} else {
@ -158,18 +159,12 @@ public interface TestClusterConfiguration {
throw new TestClustersException(message + extraCause, lastException);
}
}
logger.info(
"{}: {} took {} seconds",
this, description,
(System.currentTimeMillis() - thisConditionStartedAt) / 1000.0
);
logger.info("{}: {} took {} seconds", this, description, (System.currentTimeMillis() - thisConditionStartedAt) / 1000.0);
});
}
default String safeName(String name) {
return name
.replaceAll("^[^a-zA-Z0-9]+", "")
.replaceAll("[^a-zA-Z0-9\\.]+", "-");
return name.replaceAll("^[^a-zA-Z0-9]+", "").replaceAll("[^a-zA-Z0-9\\.]+", "-");
}
boolean isProcessAlive();

View File

@ -12,19 +12,13 @@ interface TestClustersAware extends Task {
default void useCluster(ElasticsearchCluster cluster) {
if (cluster.getPath().equals(getProject().getPath()) == false) {
throw new TestClustersException(
"Task " + getPath() + " can't use test cluster from" +
" another project " + cluster
);
throw new TestClustersException("Task " + getPath() + " can't use test cluster from" + " another project " + cluster);
}
cluster.getNodes().stream().flatMap(node -> node.getDistributions().stream()).forEach(distro ->
dependsOn(distro.getExtracted())
);
cluster.getNodes().stream().flatMap(node -> node.getDistributions().stream()).forEach(distro -> dependsOn(distro.getExtracted()));
getClusters().add(cluster);
}
default void beforeStart() {
}
default void beforeStart() {}
}

View File

@ -40,7 +40,7 @@ public class TestClustersPlugin implements Plugin<Project> {
public static final String EXTENSION_NAME = "testClusters";
private static final String REGISTRY_EXTENSION_NAME = "testClustersRegistry";
private static final Logger logger = Logging.getLogger(TestClustersPlugin.class);
private static final Logger logger = Logging.getLogger(TestClustersPlugin.class);
private ReaperService reaper;
@ -58,7 +58,8 @@ public class TestClustersPlugin implements Plugin<Project> {
createListClustersTask(project, container);
if (project.getRootProject().getExtensions().findByName(REGISTRY_EXTENSION_NAME) == null) {
TestClustersRegistry registry = project.getRootProject().getExtensions()
TestClustersRegistry registry = project.getRootProject()
.getExtensions()
.create(REGISTRY_EXTENSION_NAME, TestClustersRegistry.class);
// When we know what tasks will run, we claim the clusters of those task to differentiate between clusters
@ -79,27 +80,18 @@ public class TestClustersPlugin implements Plugin<Project> {
// Create an extensions that allows describing clusters
NamedDomainObjectContainer<ElasticsearchCluster> container = project.container(
ElasticsearchCluster.class,
name -> new ElasticsearchCluster(
project.getPath(),
name,
project,
reaper,
new File(project.getBuildDir(), "testclusters")
)
name -> new ElasticsearchCluster(project.getPath(), name, project, reaper, new File(project.getBuildDir(), "testclusters"))
);
project.getExtensions().add(EXTENSION_NAME, container);
return container;
}
private void createListClustersTask(Project project, NamedDomainObjectContainer<ElasticsearchCluster> container) {
Task listTask = project.getTasks().create(LIST_TASK_NAME);
listTask.setGroup("ES cluster formation");
listTask.setDescription("Lists all ES clusters configured for this project");
listTask.doLast((Task task) ->
container.forEach(cluster ->
logger.lifecycle(" * {}: {}", cluster.getName(), cluster.getNumberOfNodes())
)
listTask.doLast(
(Task task) -> container.forEach(cluster -> logger.lifecycle(" * {}: {}", cluster.getName(), cluster.getNumberOfNodes()))
);
}
@ -107,7 +99,8 @@ public class TestClustersPlugin implements Plugin<Project> {
// Once we know all the tasks that need to execute, we claim all the clusters that belong to those and count the
// claims so we'll know when it's safe to stop them.
gradle.getTaskGraph().whenReady(taskExecutionGraph -> {
taskExecutionGraph.getAllTasks().stream()
taskExecutionGraph.getAllTasks()
.stream()
.filter(task -> task instanceof TestClustersAware)
.map(task -> (TestClustersAware) task)
.flatMap(task -> task.getClusters().stream())
@ -116,42 +109,38 @@ public class TestClustersPlugin implements Plugin<Project> {
}
private static void configureStartClustersHook(Gradle gradle, TestClustersRegistry registry) {
gradle.addListener(
new TaskActionListener() {
@Override
public void beforeActions(Task task) {
if (task instanceof TestClustersAware == false) {
return;
}
// we only start the cluster before the actions, so we'll not start it if the task is up-to-date
TestClustersAware awareTask = (TestClustersAware) task;
awareTask.beforeStart();
awareTask.getClusters().forEach(registry::maybeStartCluster);
gradle.addListener(new TaskActionListener() {
@Override
public void beforeActions(Task task) {
if (task instanceof TestClustersAware == false) {
return;
}
@Override
public void afterActions(Task task) {}
// we only start the cluster before the actions, so we'll not start it if the task is up-to-date
TestClustersAware awareTask = (TestClustersAware) task;
awareTask.beforeStart();
awareTask.getClusters().forEach(registry::maybeStartCluster);
}
);
@Override
public void afterActions(Task task) {}
});
}
private static void configureStopClustersHook(Gradle gradle, TestClustersRegistry registry) {
gradle.addListener(
new TaskExecutionListener() {
@Override
public void afterExecute(Task task, TaskState state) {
if (task instanceof TestClustersAware == false) {
return;
}
// always unclaim the cluster, even if _this_ task is up-to-date, as others might not have been
// and caused the cluster to start.
((TestClustersAware) task).getClusters()
.forEach(cluster -> registry.stopCluster(cluster, state.getFailure() != null));
gradle.addListener(new TaskExecutionListener() {
@Override
public void afterExecute(Task task, TaskState state) {
if (task instanceof TestClustersAware == false) {
return;
}
@Override
public void beforeExecute(Task task) {}
// always unclaim the cluster, even if _this_ task is up-to-date, as others might not have been
// and caused the cluster to start.
((TestClustersAware) task).getClusters().forEach(cluster -> registry.stopCluster(cluster, state.getFailure() != null));
}
);
@Override
public void beforeExecute(Task task) {}
});
}
}

View File

@ -9,7 +9,7 @@ import java.util.Map;
import java.util.Set;
public class TestClustersRegistry {
private static final Logger logger = Logging.getLogger(TestClustersRegistry.class);
private static final Logger logger = Logging.getLogger(TestClustersRegistry.class);
private static final String TESTCLUSTERS_INSPECT_FAILURE = "testclusters.inspect.failure";
private final Boolean allowClusterToSurvive = Boolean.valueOf(System.getProperty(TESTCLUSTERS_INSPECT_FAILURE, "false"));
private final Map<ElasticsearchCluster, Integer> claimsInventory = new HashMap<>();
@ -35,10 +35,10 @@ public class TestClustersRegistry {
if (allowClusterToSurvive) {
logger.info("Not stopping clusters, disabled by property");
// task failed or this is the last one to stop
for (int i = 1; ; i += i) {
for (int i = 1;; i += i) {
logger.lifecycle(
"No more test clusters left to run, going to sleep because {} was set," +
" interrupt (^C) to stop clusters.", TESTCLUSTERS_INSPECT_FAILURE
"No more test clusters left to run, going to sleep because {} was set," + " interrupt (^C) to stop clusters.",
TESTCLUSTERS_INSPECT_FAILURE
);
try {
Thread.sleep(1000 * i);
@ -52,7 +52,7 @@ public class TestClustersRegistry {
runningClusters.remove(cluster);
}
} else {
int currentClaims = claimsInventory.getOrDefault(cluster, 0) - 1;
int currentClaims = claimsInventory.getOrDefault(cluster, 0) - 1;
claimsInventory.put(cluster, currentClaims);
if (currentClaims <= 0 && runningClusters.contains(cluster)) {
@ -62,5 +62,4 @@ public class TestClustersRegistry {
}
}
}

View File

@ -53,11 +53,16 @@ public class TestFixtureExtension {
Optional<String> otherProject = this.findOtherProjectUsingService(key);
if (otherProject.isPresent()) {
throw new GradleException(
"Projects " + otherProject.get() + " and " + this.project.getPath() + " both claim the "+ serviceName +
" service defined in the docker-compose.yml of " + path + "This is not supported because it breaks " +
"running in parallel. Configure dedicated services for each project and use those instead."
String exceptionMessage = String.format(
"Projects %s and %s both claim the %s service defined in the docker-compose.yml of %s. This is not supported because it "
+ "breaks running in parallel. Configure dedicated services for each project and use those instead.",
otherProject.get(),
this.project.getPath(),
serviceName,
path
);
throw new GradleException(exceptionMessage);
}
}
@ -66,7 +71,9 @@ public class TestFixtureExtension {
}
private Optional<String> findOtherProjectUsingService(String serviceName) {
return this.project.getRootProject().getAllprojects().stream()
return this.project.getRootProject()
.getAllprojects()
.stream()
.filter(p -> p.equals(this.project) == false)
.filter(p -> p.getExtensions().findByType(TestFixtureExtension.class) != null)
.map(project -> project.getExtensions().getByType(TestFixtureExtension.class))
@ -90,10 +97,15 @@ public class TestFixtureExtension {
// Check for exclusive access
Optional<String> otherProject = this.findOtherProjectUsingService(path);
if (otherProject.isPresent()) {
throw new GradleException("Projects " + otherProject.get() + " and " + this.project.getPath() + " both " +
"claim all services from " + path + ". This is not supported because it breaks running in parallel. " +
"Configure specific services in docker-compose.yml for each and add the service name to `useFixture`"
final String exceptionMessage = String.format(
"Projects %s and %s both claim all services from %s. This is not supported because it breaks running in parallel. "
+ "Configure specific services in docker-compose.yml for each and add the service name to `useFixture`",
otherProject.get(),
this.project.getPath(),
path
);
throw new GradleException(exceptionMessage);
}
}

View File

@ -50,9 +50,7 @@ public class TestFixturesPlugin implements Plugin<Project> {
public void apply(Project project) {
TaskContainer tasks = project.getTasks();
TestFixtureExtension extension = project.getExtensions().create(
"testFixtures", TestFixtureExtension.class, project
);
TestFixtureExtension extension = project.getExtensions().create("testFixtures", TestFixtureExtension.class, project);
ExtraPropertiesExtension ext = project.getExtensions().getByType(ExtraPropertiesExtension.class);
File testfixturesDir = project.file("testfixtures_shared");
@ -89,38 +87,34 @@ public class TestFixturesPlugin implements Plugin<Project> {
composeExtension.setUseComposeFiles(Collections.singletonList(DOCKER_COMPOSE_YML));
composeExtension.setRemoveContainers(true);
composeExtension.setExecutable(
project.file("/usr/local/bin/docker-compose").exists() ?
"/usr/local/bin/docker-compose" : "/usr/bin/docker-compose"
project.file("/usr/local/bin/docker-compose").exists() ? "/usr/local/bin/docker-compose" : "/usr/bin/docker-compose"
);
buildFixture.dependsOn(tasks.getByName("composeUp"));
pullFixture.dependsOn(tasks.getByName("composePull"));
tasks.getByName("composeUp").mustRunAfter(preProcessFixture);
tasks.getByName("composePull").mustRunAfter(preProcessFixture);
tasks.getByName("composeDown").doLast((task) -> {
project.delete(testfixturesDir);
});
tasks.getByName("composeDown").doLast((task) -> { project.delete(testfixturesDir); });
configureServiceInfoForTask(
postProcessFixture,
project,
false,
(name, port) -> postProcessFixture.getExtensions()
.getByType(ExtraPropertiesExtension.class).set(name, port)
(name, port) -> postProcessFixture.getExtensions().getByType(ExtraPropertiesExtension.class).set(name, port)
);
}
} else {
project.afterEvaluate(spec -> {
if (extension.fixtures.isEmpty()) {
// if only one fixture is used, that's this one, but without a compose file that's not a valid configuration
throw new IllegalStateException("No " + DOCKER_COMPOSE_YML + " found for " + project.getPath() +
" nor does it use other fixtures.");
throw new IllegalStateException(
"No " + DOCKER_COMPOSE_YML + " found for " + project.getPath() + " nor does it use other fixtures."
);
}
});
}
extension.fixtures
.matching(fixtureProject -> fixtureProject.equals(project) == false)
extension.fixtures.matching(fixtureProject -> fixtureProject.equals(project) == false)
.all(fixtureProject -> project.evaluationDependsOn(fixtureProject.getPath()));
conditionTaskByType(tasks, extension, Test.class);
@ -129,89 +123,81 @@ public class TestFixturesPlugin implements Plugin<Project> {
conditionTaskByType(tasks, extension, ComposeUp.class);
if (dockerComposeSupported() == false) {
project.getLogger().warn(
"Tests for {} require docker-compose at /usr/local/bin/docker-compose or /usr/bin/docker-compose " +
"but none could be found so these will be skipped", project.getPath()
);
project.getLogger()
.warn(
"Tests for {} require docker-compose at /usr/local/bin/docker-compose or /usr/bin/docker-compose "
+ "but none could be found so these will be skipped",
project.getPath()
);
return;
}
tasks.withType(Test.class, task ->
extension.fixtures.all(fixtureProject -> {
fixtureProject.getTasks().matching(it -> it.getName().equals("buildFixture")).all(task::dependsOn);
fixtureProject.getTasks().matching(it -> it.getName().equals("composeDown")).all(task::finalizedBy);
configureServiceInfoForTask(
task,
fixtureProject,
true,
(name, host) ->
task.getExtensions().getByType(SystemPropertyCommandLineArgumentProvider.class).systemProperty(name, host)
);
task.dependsOn(fixtureProject.getTasks().getByName("postProcessFixture"));
})
);
tasks.withType(Test.class, task -> extension.fixtures.all(fixtureProject -> {
fixtureProject.getTasks().matching(it -> it.getName().equals("buildFixture")).all(task::dependsOn);
fixtureProject.getTasks().matching(it -> it.getName().equals("composeDown")).all(task::finalizedBy);
configureServiceInfoForTask(
task,
fixtureProject,
true,
(name, host) -> task.getExtensions().getByType(SystemPropertyCommandLineArgumentProvider.class).systemProperty(name, host)
);
task.dependsOn(fixtureProject.getTasks().getByName("postProcessFixture"));
}));
}
private void conditionTaskByType(TaskContainer tasks, TestFixtureExtension extension, Class<? extends DefaultTask> taskClass) {
tasks.withType(
taskClass,
task -> task.onlyIf(spec ->
extension.fixtures.stream()
.anyMatch(fixtureProject ->
fixtureProject.getTasks().getByName("buildFixture").getEnabled() == false
) == false
task -> task.onlyIf(
spec -> extension.fixtures.stream()
.anyMatch(fixtureProject -> fixtureProject.getTasks().getByName("buildFixture").getEnabled() == false) == false
)
);
}
private void configureServiceInfoForTask(
Task task, Project fixtureProject, boolean enableFilter, BiConsumer<String, Integer> consumer
Task task,
Project fixtureProject,
boolean enableFilter,
BiConsumer<String, Integer> consumer
) {
// Configure ports for the tests as system properties.
// We only know these at execution time so we need to do it in doFirst
TestFixtureExtension extension = task.getProject().getExtensions().getByType(TestFixtureExtension.class);
task.doFirst(new Action<Task>() {
@Override
public void execute(Task theTask) {
fixtureProject.getExtensions().getByType(ComposeExtension.class).getServicesInfos()
.entrySet().stream()
.filter(entry -> enableFilter == false ||
extension.isServiceRequired(entry.getKey(), fixtureProject.getPath())
)
.forEach(entry -> {
String service = entry.getKey();
ServiceInfo infos = entry.getValue();
infos.getTcpPorts()
.forEach((container, host) -> {
String name = "test.fixtures." + service + ".tcp." + container;
theTask.getLogger().info("port mapping property: {}={}", name, host);
consumer.accept(
name,
host
);
});
infos.getUdpPorts()
.forEach((container, host) -> {
String name = "test.fixtures." + service + ".udp." + container;
theTask.getLogger().info("port mapping property: {}={}", name, host);
consumer.accept(
name,
host
);
});
});
}
}
);
@Override
public void execute(Task theTask) {
fixtureProject.getExtensions()
.getByType(ComposeExtension.class)
.getServicesInfos()
.entrySet()
.stream()
.filter(entry -> enableFilter == false || extension.isServiceRequired(entry.getKey(), fixtureProject.getPath()))
.forEach(entry -> {
String service = entry.getKey();
ServiceInfo infos = entry.getValue();
infos.getTcpPorts().forEach((container, host) -> {
String name = "test.fixtures." + service + ".tcp." + container;
theTask.getLogger().info("port mapping property: {}={}", name, host);
consumer.accept(name, host);
});
infos.getUdpPorts().forEach((container, host) -> {
String name = "test.fixtures." + service + ".udp." + container;
theTask.getLogger().info("port mapping property: {}={}", name, host);
consumer.accept(name, host);
});
});
}
});
}
public static boolean dockerComposeSupported() {
if (OS.current().equals(OS.WINDOWS)) {
return false;
}
final boolean hasDockerCompose = (new File("/usr/local/bin/docker-compose")).exists() ||
(new File("/usr/bin/docker-compose").exists());
final boolean hasDockerCompose = (new File("/usr/local/bin/docker-compose")).exists()
|| (new File("/usr/bin/docker-compose").exists());
return hasDockerCompose && Boolean.parseBoolean(System.getProperty("tests.fixture.enabled", "true"));
}

View File

@ -38,28 +38,25 @@ public abstract class Boilerplate {
}
public static <T> T maybeCreate(NamedDomainObjectContainer<T> collection, String name) {
return Optional.ofNullable(collection.findByName(name))
.orElse(collection.create(name));
return Optional.ofNullable(collection.findByName(name)).orElse(collection.create(name));
}
public static <T> T maybeCreate(NamedDomainObjectContainer<T> collection, String name, Action<T> action) {
return Optional.ofNullable(collection.findByName(name))
.orElseGet(() -> {
T result = collection.create(name);
action.execute(result);
return result;
});
return Optional.ofNullable(collection.findByName(name)).orElseGet(() -> {
T result = collection.create(name);
action.execute(result);
return result;
});
}
public static <T> T maybeCreate(PolymorphicDomainObjectContainer<T> collection, String name, Class<T> type, Action<T> action) {
return Optional.ofNullable(collection.findByName(name))
.orElseGet(() -> {
T result = collection.create(name, type);
action.execute(result);
return result;
});
return Optional.ofNullable(collection.findByName(name)).orElseGet(() -> {
T result = collection.create(name, type);
action.execute(result);
return result;
});
}
@ -83,7 +80,8 @@ public abstract class Boilerplate {
}
public static <T extends Task> void maybeConfigure(
TaskContainer tasks, String name,
TaskContainer tasks,
String name,
Class<? extends T> type,
Action<? super T> config
) {

View File

@ -5,8 +5,7 @@ import org.gradle.api.plugins.ExtraPropertiesExtension;
public class ClasspathUtils {
private ClasspathUtils() {
}
private ClasspathUtils() {}
/**
* Determine if we are running in the context of the `elastic/elasticsearch` project. This method will return {@code false} when

View File

@ -31,10 +31,7 @@ public class DockerUtils {
*/
public static Optional<String> getDockerPath() {
// Check if the Docker binary exists
return List.of(DOCKER_BINARIES)
.stream()
.filter(path -> new File(path).exists())
.findFirst();
return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst();
}
/**
@ -144,7 +141,8 @@ public class DockerUtils {
"Docker (checked [%s]) is required to run the following task%s: \n%s",
String.join(", ", DOCKER_BINARIES),
tasks.size() > 1 ? "s" : "",
String.join("\n", tasks));
String.join("\n", tasks)
);
throwDockerRequiredException(message);
}
@ -153,7 +151,8 @@ public class DockerUtils {
Locale.ROOT,
"Docker is required to run the following task%s, but it doesn't appear to be running: \n%s",
tasks.size() > 1 ? "s" : "",
String.join("\n", tasks));
String.join("\n", tasks)
);
throwDockerRequiredException(message);
}
@ -161,20 +160,22 @@ public class DockerUtils {
final String message = String.format(
Locale.ROOT,
"building Docker images requires Docker version 17.05+ due to use of multi-stage builds yet was [%s]",
availability.version);
availability.version
);
throwDockerRequiredException(message);
}
// Some other problem, print the error
final String message = String.format(
Locale.ROOT,
"a problem occurred running Docker from [%s] yet it is required to run the following task%s: \n%s\n" +
"the problem is that Docker exited with exit code [%d] with standard error output [%s]",
"a problem occurred running Docker from [%s] yet it is required to run the following task%s: \n%s\n"
+ "the problem is that Docker exited with exit code [%d] with standard error output [%s]",
availability.path,
tasks.size() > 1 ? "s" : "",
String.join("\n", tasks),
availability.lastCommand.exitCode,
availability.lastCommand.stderr.trim());
availability.lastCommand.stderr.trim()
);
throwDockerRequiredException(message);
}
@ -184,9 +185,12 @@ public class DockerUtils {
private static void throwDockerRequiredException(final String message, Exception e) {
throw new GradleException(
message + "\nyou can address this by attending to the reported issue, "
message
+ "\nyou can address this by attending to the reported issue, "
+ "removing the offending tasks from being executed, "
+ "or by passing -Dbuild.docker=false", e);
+ "or by passing -Dbuild.docker=false",
e
);
}
/**

View File

@ -41,8 +41,9 @@ import java.util.regex.Pattern;
*/
public class BatsProgressLogger implements UnaryOperator<String> {
private static final Pattern lineRegex =
Pattern.compile("(?<status>ok|not ok) \\d+(?<skip> # skip (?<skipReason>\\(.+\\))?)? \\[(?<suite>.+)\\] (?<test>.+)");
private static final Pattern lineRegex = Pattern.compile(
"(?<status>ok|not ok) \\d+(?<skip> # skip (?<skipReason>\\(.+\\))?)? \\[(?<suite>.+)\\] (?<test>.+)"
);
private static final Pattern startRegex = Pattern.compile("1..(\\d+)");
private final Logger logger;
@ -67,7 +68,7 @@ public class BatsProgressLogger implements UnaryOperator<String> {
testCount = Integer.parseInt(m.group(1));
int length = String.valueOf(testCount).length();
String count = "%0" + length + "d";
countsFormat = "[" + count +"|" + count + "|" + count + "/" + count + "]";
countsFormat = "[" + count + "|" + count + "|" + count + "/" + count + "]";
return null;
}
Matcher m = lineRegex.matcher(line);

View File

@ -49,14 +49,17 @@ public class VagrantBasePlugin implements Plugin<Project> {
VagrantExtension extension = project.getExtensions().create("vagrant", VagrantExtension.class, project);
VagrantMachine service = project.getExtensions().create("vagrantService", VagrantMachine.class, project, extension, reaper);
project.getGradle().getTaskGraph().whenReady(graph ->
service.refs = graph.getAllTasks().stream()
.filter(t -> t instanceof VagrantShellTask)
.filter(t -> t.getProject() == project)
.count());
project.getGradle()
.getTaskGraph()
.whenReady(
graph -> service.refs = graph.getAllTasks()
.stream()
.filter(t -> t instanceof VagrantShellTask)
.filter(t -> t.getProject() == project)
.count()
);
}
/**
* Check vagrant and virtualbox versions, if any vagrant test tasks will be run.
*/
@ -89,8 +92,9 @@ public class VagrantBasePlugin implements Plugin<Project> {
String output = pipe.toString(StandardCharsets.UTF_8).trim();
Matcher matcher = versionRegex.matcher(output);
if (matcher.find() == false) {
throw new IllegalStateException(tool +
" version output [" + output + "] did not match regex [" + versionRegex.pattern() + "]");
throw new IllegalStateException(
tool + " version output [" + output + "] did not match regex [" + versionRegex.pattern() + "]"
);
}
String version = matcher.group(1);
@ -100,8 +104,14 @@ public class VagrantBasePlugin implements Plugin<Project> {
if (found > minVersion[i]) {
break; // most significant version is good
} else if (found < minVersion[i]) {
throw new IllegalStateException("Unsupported version of " + tool + ". Found [" + version + "], expected [" +
Stream.of(minVersion).map(String::valueOf).collect(Collectors.joining(".")) + "+");
String exceptionMessage = String.format(
"Unsupported version of %s. Found [%s], expected [%s+",
tool,
version,
Stream.of(minVersion).map(String::valueOf).collect(Collectors.joining("."))
);
throw new IllegalStateException(exceptionMessage);
} // else equal, so check next element
}
}

View File

@ -78,13 +78,16 @@ public abstract class VagrantShellTask extends DefaultTask {
script.add("cd " + convertWindowsPath(getProject(), rootDir));
extension.getVmEnv().forEach((k, v) -> script.add("$Env:" + k + " = \"" + v + "\""));
script.addAll(getWindowsScript().stream().map(s -> " " + s).collect(Collectors.toList()));
script.addAll(Arrays.asList(
" exit $LASTEXITCODE",
"} catch {",
// catch if we have a failure to even run the script at all above, equivalent to set -e, sort of
" echo $_.Exception.Message",
" exit 1",
"}"));
script.addAll(
Arrays.asList(
" exit $LASTEXITCODE",
"} catch {",
// catch if we have a failure to even run the script at all above, equivalent to set -e, sort of
" echo $_.Exception.Message",
" exit 1",
"}"
)
);
spec.setArgs("--elevated", "--command", String.join("\n", script));
spec.setProgressHandler(progressHandler);
});
@ -105,5 +108,4 @@ public abstract class VagrantShellTask extends DefaultTask {
}
}
}

View File

@ -45,11 +45,7 @@ public class JdkJarHellCheck {
String entry = root.relativize(file).toString().replace('\\', '/');
if (entry.endsWith(".class") && entry.endsWith("module-info.class") == false) {
if (ext.getResource(entry) != null) {
detected.add(
entry
.replace("/", ".")
.replace(".class","")
);
detected.add(entry.replace("/", ".").replace(".class", ""));
}
}
return FileVisitResult.CONTINUE;

View File

@ -37,11 +37,13 @@ class LazyFileOutputStream extends OutputStream {
file.getParentFile().mkdirs();
delegate = new FileOutputStream(file);
}
@Override
public void write(int b) throws IOException {
bootstrap();
delegate.write(b);
}
@Override
public void write(byte b[], int off, int len) throws IOException {
bootstrap();

View File

@ -103,9 +103,9 @@ public class LoggedExec extends Exec {
private static final Pattern NEWLINE = Pattern.compile(System.lineSeparator());
private static <T extends BaseExecSpec> ExecResult genericExec(
private static <T extends BaseExecSpec> ExecResult genericExec(
Project project,
Function<Action<T>,ExecResult> function,
Function<Action<T>, ExecResult> function,
Action<T> action
) {
if (project.getLogger().isInfoEnabled()) {

View File

@ -29,11 +29,9 @@ public final class Version implements Comparable<Version> {
RELAXED
}
private static final Pattern pattern =
Pattern.compile("(\\d)+\\.(\\d+)\\.(\\d+)(-alpha\\d+|-beta\\d+|-rc\\d+)?(-SNAPSHOT)?");
private static final Pattern pattern = Pattern.compile("(\\d)+\\.(\\d+)\\.(\\d+)(-alpha\\d+|-beta\\d+|-rc\\d+)?(-SNAPSHOT)?");
private static final Pattern relaxedPattern =
Pattern.compile("(\\d)+\\.(\\d+)\\.(\\d+)(-[a-zA-Z0-9_]+)*?");
private static final Pattern relaxedPattern = Pattern.compile("(\\d)+\\.(\\d+)\\.(\\d+)(-[a-zA-Z0-9_]+)*?");
public Version(int major, int minor, int revision) {
Objects.requireNonNull(major, "major version can't be null");
@ -65,16 +63,10 @@ public final class Version implements Comparable<Version> {
String expected = mode == Mode.STRICT == true
? "major.minor.revision[-(alpha|beta|rc)Number][-SNAPSHOT]"
: "major.minor.revision[-extra]";
throw new IllegalArgumentException(
"Invalid version format: '" + s + "'. Should be " + expected
);
throw new IllegalArgumentException("Invalid version format: '" + s + "'. Should be " + expected);
}
return new Version(
Integer.parseInt(matcher.group(1)),
parseSuffixNumber(matcher.group(2)),
parseSuffixNumber(matcher.group(3))
);
return new Version(Integer.parseInt(matcher.group(1)), parseSuffixNumber(matcher.group(2)), parseSuffixNumber(matcher.group(3)));
}
@Override
@ -116,12 +108,14 @@ public final class Version implements Comparable<Version> {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Version version = (Version) o;
return major == version.major &&
minor == version.minor &&
revision == version.revision;
return major == version.major && minor == version.minor && revision == version.revision;
}
@Override

View File

@ -15,7 +15,7 @@ public class VersionProperties {
return elasticsearch;
}
public static Version getElasticsearchVersion() {
public static Version getElasticsearchVersion() {
return Version.fromString(elasticsearch);
}

View File

@ -124,8 +124,8 @@ public class BuildParams {
String message = "Build parameter '" + propertyName(callingMethod) + "' has not been initialized. ";
if (executionTime) {
message += "This property is initialized at execution time, " +
"please ensure you are not attempting to access it during project configuration.";
message += "This property is initialized at execution time, "
+ "please ensure you are not attempting to access it during project configuration.";
} else {
message += "Perhaps the plugin responsible for initializing this property has not been applied.";
}
@ -144,24 +144,22 @@ public class BuildParams {
public static class MutableBuildParams {
private static MutableBuildParams INSTANCE = new MutableBuildParams();
private MutableBuildParams() { }
private MutableBuildParams() {}
/**
* Resets any existing values from previous initializations.
*/
public void reset() {
Arrays.stream(BuildParams.class.getDeclaredFields())
.filter(f -> Modifier.isStatic(f.getModifiers()))
.forEach(f -> {
try {
// Since we are mutating private static fields from a public static inner class we need to suppress
// accessibility controls here.
f.setAccessible(true);
f.set(null, null);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
});
Arrays.stream(BuildParams.class.getDeclaredFields()).filter(f -> Modifier.isStatic(f.getModifiers())).forEach(f -> {
try {
// Since we are mutating private static fields from a public static inner class we need to suppress
// accessibility controls here.
f.setAccessible(true);
f.set(null, null);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
});
}
public void setCompilerJavaHome(File compilerJavaHome) {
@ -233,8 +231,9 @@ public class BuildParams {
* Indicates that a build parameter is initialized at task execution time and is not available at project configuration time.
* Attempts to read an uninitialized parameter wil result in an {@link IllegalStateException}.
*/
@Target({ElementType.METHOD, ElementType.FIELD})
@Target({ ElementType.METHOD, ElementType.FIELD })
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface ExecutionTime {}
public @interface ExecutionTime {
}
}

View File

@ -42,17 +42,13 @@ public class BuildPluginIT extends GradleIntegrationTestCase {
public TemporaryFolder tmpDir = new TemporaryFolder();
public void testPluginCanBeApplied() {
BuildResult result = getGradleRunner("elasticsearch.build")
.withArguments("hello", "-s")
.build();
BuildResult result = getGradleRunner("elasticsearch.build").withArguments("hello", "-s").build();
assertTaskSuccessful(result, ":hello");
assertOutputContains("build plugin can be applied");
}
public void testCheckTask() {
BuildResult result = getGradleRunner("elasticsearch.build")
.withArguments("check", "assemble", "-s")
.build();
BuildResult result = getGradleRunner("elasticsearch.build").withArguments("check", "assemble", "-s").build();
assertTaskSuccessful(result, ":check");
}
@ -64,9 +60,10 @@ public class BuildPluginIT extends GradleIntegrationTestCase {
"repositories {",
" maven {",
" name \"elastic-maven\"",
" url \"" + url + "\"\n",
" url \"" + url + "\"\n",
" }",
"}");
"}"
);
runInsecureArtifactRepositoryTest(name, url, lines);
}
@ -78,17 +75,17 @@ public class BuildPluginIT extends GradleIntegrationTestCase {
"repositories {",
" ivy {",
" name \"elastic-ivy\"",
" url \"" + url + "\"\n",
" url \"" + url + "\"\n",
" }",
"}");
"}"
);
runInsecureArtifactRepositoryTest(name, url, lines);
}
private void runInsecureArtifactRepositoryTest(final String name, final String url, final List<String> lines) throws IOException {
final File projectDir = getProjectDir("elasticsearch.build");
FileUtils.copyDirectory(projectDir, tmpDir.getRoot(), pathname -> pathname.getPath().contains("/build/") == false);
final List<String> buildGradleLines =
Files.readAllLines(tmpDir.getRoot().toPath().resolve("build.gradle"), StandardCharsets.UTF_8);
final List<String> buildGradleLines = Files.readAllLines(tmpDir.getRoot().toPath().resolve("build.gradle"), StandardCharsets.UTF_8);
buildGradleLines.addAll(lines);
Files.write(tmpDir.getRoot().toPath().resolve("build.gradle"), buildGradleLines, StandardCharsets.UTF_8);
final BuildResult result = GradleRunner.create()
@ -98,34 +95,27 @@ public class BuildPluginIT extends GradleIntegrationTestCase {
.buildAndFail();
assertOutputContains(
result.getOutput(),
"repository [" + name + "] on project with path [:] is not using a secure protocol for artifacts on [" + url + "]");
"repository [" + name + "] on project with path [:] is not using a secure protocol for artifacts on [" + url + "]"
);
}
public void testLicenseAndNotice() throws IOException {
BuildResult result = getGradleRunner("elasticsearch.build")
.withArguments("clean", "assemble")
.build();
BuildResult result = getGradleRunner("elasticsearch.build").withArguments("clean", "assemble").build();
assertTaskSuccessful(result, ":assemble");
assertBuildFileExists(result, "elasticsearch.build", "distributions/elasticsearch.build.jar");
try (ZipFile zipFile = new ZipFile(new File(
getBuildDir("elasticsearch.build"), "distributions/elasticsearch.build.jar"
))) {
try (ZipFile zipFile = new ZipFile(new File(getBuildDir("elasticsearch.build"), "distributions/elasticsearch.build.jar"))) {
ZipEntry licenseEntry = zipFile.getEntry("META-INF/LICENSE.txt");
ZipEntry noticeEntry = zipFile.getEntry("META-INF/NOTICE.txt");
assertNotNull("Jar does not have META-INF/LICENSE.txt", licenseEntry);
assertNotNull("Jar does not have META-INF/NOTICE.txt", noticeEntry);
try (
InputStream license = zipFile.getInputStream(licenseEntry);
InputStream notice = zipFile.getInputStream(noticeEntry)
) {
try (InputStream license = zipFile.getInputStream(licenseEntry); InputStream notice = zipFile.getInputStream(noticeEntry)) {
assertEquals("this is a test license file", IOUtils.toString(license, StandardCharsets.UTF_8.name()));
assertEquals("this is a test notice file", IOUtils.toString(notice, StandardCharsets.UTF_8.name()));
}
}
}
}

View File

@ -25,7 +25,6 @@ import org.junit.Test;
import java.net.URI;
import java.net.URISyntaxException;
public class BuildPluginTests extends GradleUnitTestCase {
@Test(expected = GradleException.class)

View File

@ -42,48 +42,252 @@ public class BwcVersionsTests extends GradleUnitTestCase {
static {
// unreleased major and two unreleased minors ( minor in feature freeze )
sampleVersions.put("8.0.0", asList(
"7_0_0", "7_0_1", "7_1_0", "7_1_1", "7_2_0", "7_3_0", "8.0.0"
));
sampleVersions.put("7.0.0-alpha1", asList(
"6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2",
"6_0_0", "6_0_1", "6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4",
"6_2_0", "6_2_1", "6_2_2", "6_2_3", "6_2_4",
"6_3_0", "6_3_1", "6_3_2",
"6_4_0", "6_4_1", "6_4_2",
"6_5_0", "7_0_0_alpha1"
));
sampleVersions.put("6.5.0", asList(
"5_0_0_alpha1", "5_0_0_alpha2", "5_0_0_alpha3", "5_0_0_alpha4", "5_0_0_alpha5", "5_0_0_beta1", "5_0_0_rc1",
"5_0_0", "5_0_1", "5_0_2", "5_1_1", "5_1_2", "5_2_0", "5_2_1", "5_2_2", "5_3_0", "5_3_1", "5_3_2", "5_3_3",
"5_4_0", "5_4_1", "5_4_2", "5_4_3", "5_5_0", "5_5_1", "5_5_2", "5_5_3", "5_6_0", "5_6_1", "5_6_2", "5_6_3",
"5_6_4", "5_6_5", "5_6_6", "5_6_7", "5_6_8", "5_6_9", "5_6_10", "5_6_11", "5_6_12", "5_6_13",
"6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2", "6_0_0", "6_0_1",
"6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", "6_2_0", "6_2_1", "6_2_2", "6_2_3", "6_2_4", "6_3_0", "6_3_1",
"6_3_2", "6_4_0", "6_4_1", "6_4_2", "6_5_0"
));
sampleVersions.put("6.6.0", asList(
"5_0_0_alpha1", "5_0_0_alpha2", "5_0_0_alpha3", "5_0_0_alpha4", "5_0_0_alpha5", "5_0_0_beta1", "5_0_0_rc1",
"5_0_0", "5_0_1", "5_0_2", "5_1_1", "5_1_2", "5_2_0", "5_2_1", "5_2_2", "5_3_0", "5_3_1", "5_3_2", "5_3_3",
"5_4_0", "5_4_1", "5_4_2", "5_4_3", "5_5_0", "5_5_1", "5_5_2", "5_5_3", "5_6_0", "5_6_1", "5_6_2", "5_6_3",
"5_6_4", "5_6_5", "5_6_6", "5_6_7", "5_6_8", "5_6_9", "5_6_10", "5_6_11", "5_6_12", "5_6_13",
"6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2", "6_0_0", "6_0_1",
"6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", "6_2_0", "6_2_1", "6_2_2", "6_2_3", "6_2_4", "6_3_0", "6_3_1",
"6_3_2", "6_4_0", "6_4_1", "6_4_2", "6_5_0", "6_6_0"
));
sampleVersions.put("6.4.2", asList(
"5_0_0_alpha1", "5_0_0_alpha2", "5_0_0_alpha3", "5_0_0_alpha4", "5_0_0_alpha5", "5_0_0_beta1", "5_0_0_rc1",
"5_0_0", "5_0_1", "5_0_2", "5_1_1", "5_1_2", "5_2_0", "5_2_1", "5_2_2", "5_3_0",
"5_3_1", "5_3_2", "5_3_3", "5_4_0", "5_4_1", "5_4_2", "5_4_3", "5_5_0", "5_5_1", "5_5_2", "5_5_3",
"5_6_0", "5_6_1", "5_6_2", "5_6_3", "5_6_4", "5_6_5", "5_6_6", "5_6_7", "5_6_8", "5_6_9", "5_6_10",
"5_6_11", "5_6_12", "5_6_13",
"6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2",
"6_0_0", "6_0_1", "6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", "6_2_0", "6_2_1", "6_2_2", "6_2_3",
"6_2_4", "6_3_0", "6_3_1", "6_3_2", "6_4_0", "6_4_1", "6_4_2"
));
sampleVersions.put("7.1.0", asList(
"7_1_0", "7_0_0", "6_7_0", "6_6_1", "6_6_0"
));
sampleVersions.put("8.0.0", asList("7_0_0", "7_0_1", "7_1_0", "7_1_1", "7_2_0", "7_3_0", "8.0.0"));
sampleVersions.put(
"7.0.0-alpha1",
asList(
"6_0_0_alpha1",
"6_0_0_alpha2",
"6_0_0_beta1",
"6_0_0_beta2",
"6_0_0_rc1",
"6_0_0_rc2",
"6_0_0",
"6_0_1",
"6_1_0",
"6_1_1",
"6_1_2",
"6_1_3",
"6_1_4",
"6_2_0",
"6_2_1",
"6_2_2",
"6_2_3",
"6_2_4",
"6_3_0",
"6_3_1",
"6_3_2",
"6_4_0",
"6_4_1",
"6_4_2",
"6_5_0",
"7_0_0_alpha1"
)
);
sampleVersions.put(
"6.5.0",
asList(
"5_0_0_alpha1",
"5_0_0_alpha2",
"5_0_0_alpha3",
"5_0_0_alpha4",
"5_0_0_alpha5",
"5_0_0_beta1",
"5_0_0_rc1",
"5_0_0",
"5_0_1",
"5_0_2",
"5_1_1",
"5_1_2",
"5_2_0",
"5_2_1",
"5_2_2",
"5_3_0",
"5_3_1",
"5_3_2",
"5_3_3",
"5_4_0",
"5_4_1",
"5_4_2",
"5_4_3",
"5_5_0",
"5_5_1",
"5_5_2",
"5_5_3",
"5_6_0",
"5_6_1",
"5_6_2",
"5_6_3",
"5_6_4",
"5_6_5",
"5_6_6",
"5_6_7",
"5_6_8",
"5_6_9",
"5_6_10",
"5_6_11",
"5_6_12",
"5_6_13",
"6_0_0_alpha1",
"6_0_0_alpha2",
"6_0_0_beta1",
"6_0_0_beta2",
"6_0_0_rc1",
"6_0_0_rc2",
"6_0_0",
"6_0_1",
"6_1_0",
"6_1_1",
"6_1_2",
"6_1_3",
"6_1_4",
"6_2_0",
"6_2_1",
"6_2_2",
"6_2_3",
"6_2_4",
"6_3_0",
"6_3_1",
"6_3_2",
"6_4_0",
"6_4_1",
"6_4_2",
"6_5_0"
)
);
sampleVersions.put(
"6.6.0",
asList(
"5_0_0_alpha1",
"5_0_0_alpha2",
"5_0_0_alpha3",
"5_0_0_alpha4",
"5_0_0_alpha5",
"5_0_0_beta1",
"5_0_0_rc1",
"5_0_0",
"5_0_1",
"5_0_2",
"5_1_1",
"5_1_2",
"5_2_0",
"5_2_1",
"5_2_2",
"5_3_0",
"5_3_1",
"5_3_2",
"5_3_3",
"5_4_0",
"5_4_1",
"5_4_2",
"5_4_3",
"5_5_0",
"5_5_1",
"5_5_2",
"5_5_3",
"5_6_0",
"5_6_1",
"5_6_2",
"5_6_3",
"5_6_4",
"5_6_5",
"5_6_6",
"5_6_7",
"5_6_8",
"5_6_9",
"5_6_10",
"5_6_11",
"5_6_12",
"5_6_13",
"6_0_0_alpha1",
"6_0_0_alpha2",
"6_0_0_beta1",
"6_0_0_beta2",
"6_0_0_rc1",
"6_0_0_rc2",
"6_0_0",
"6_0_1",
"6_1_0",
"6_1_1",
"6_1_2",
"6_1_3",
"6_1_4",
"6_2_0",
"6_2_1",
"6_2_2",
"6_2_3",
"6_2_4",
"6_3_0",
"6_3_1",
"6_3_2",
"6_4_0",
"6_4_1",
"6_4_2",
"6_5_0",
"6_6_0"
)
);
sampleVersions.put(
"6.4.2",
asList(
"5_0_0_alpha1",
"5_0_0_alpha2",
"5_0_0_alpha3",
"5_0_0_alpha4",
"5_0_0_alpha5",
"5_0_0_beta1",
"5_0_0_rc1",
"5_0_0",
"5_0_1",
"5_0_2",
"5_1_1",
"5_1_2",
"5_2_0",
"5_2_1",
"5_2_2",
"5_3_0",
"5_3_1",
"5_3_2",
"5_3_3",
"5_4_0",
"5_4_1",
"5_4_2",
"5_4_3",
"5_5_0",
"5_5_1",
"5_5_2",
"5_5_3",
"5_6_0",
"5_6_1",
"5_6_2",
"5_6_3",
"5_6_4",
"5_6_5",
"5_6_6",
"5_6_7",
"5_6_8",
"5_6_9",
"5_6_10",
"5_6_11",
"5_6_12",
"5_6_13",
"6_0_0_alpha1",
"6_0_0_alpha2",
"6_0_0_beta1",
"6_0_0_beta2",
"6_0_0_rc1",
"6_0_0_rc2",
"6_0_0",
"6_0_1",
"6_1_0",
"6_1_1",
"6_1_2",
"6_1_3",
"6_1_4",
"6_2_0",
"6_2_1",
"6_2_2",
"6_2_3",
"6_2_4",
"6_3_0",
"6_3_1",
"6_3_2",
"6_4_0",
"6_4_1",
"6_4_2"
)
);
sampleVersions.put("7.1.0", asList("7_1_0", "7_0_0", "6_7_0", "6_6_1", "6_6_0"));
}
@Test(expected = IllegalArgumentException.class)
@ -99,131 +303,340 @@ public class BwcVersionsTests extends GradleUnitTestCase {
@Test(expected = IllegalStateException.class)
public void testExceptionOnTooManyMajors() {
new BwcVersions(
asList(
formatVersionToLine("5.6.12"),
formatVersionToLine("6.5.0"),
formatVersionToLine("7.0.0")
),
asList(formatVersionToLine("5.6.12"), formatVersionToLine("6.5.0"), formatVersionToLine("7.0.0")),
Version.fromString("6.5.0")
);
}
public void testWireCompatible() {
assertVersionsEquals(
asList("6.5.0"),
getVersionCollection("7.0.0-alpha1").getWireCompatible()
);
assertVersionsEquals(asList("6.5.0"), getVersionCollection("7.0.0-alpha1").getWireCompatible());
assertVersionsEquals(
asList(
"5.6.0", "5.6.1", "5.6.2", "5.6.3", "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10",
"5.6.11", "5.6.12", "5.6.13",
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4",
"6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4",
"6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2"
"5.6.0",
"5.6.1",
"5.6.2",
"5.6.3",
"5.6.4",
"5.6.5",
"5.6.6",
"5.6.7",
"5.6.8",
"5.6.9",
"5.6.10",
"5.6.11",
"5.6.12",
"5.6.13",
"6.0.0",
"6.0.1",
"6.1.0",
"6.1.1",
"6.1.2",
"6.1.3",
"6.1.4",
"6.2.0",
"6.2.1",
"6.2.2",
"6.2.3",
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"6.4.0",
"6.4.1",
"6.4.2"
),
getVersionCollection("6.5.0").getWireCompatible()
);
assertVersionsEquals(
asList(
"5.6.0", "5.6.1", "5.6.2", "5.6.3", "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10",
"5.6.11", "5.6.12", "5.6.13", "6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4",
"6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", "6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1"
"5.6.0",
"5.6.1",
"5.6.2",
"5.6.3",
"5.6.4",
"5.6.5",
"5.6.6",
"5.6.7",
"5.6.8",
"5.6.9",
"5.6.10",
"5.6.11",
"5.6.12",
"5.6.13",
"6.0.0",
"6.0.1",
"6.1.0",
"6.1.1",
"6.1.2",
"6.1.3",
"6.1.4",
"6.2.0",
"6.2.1",
"6.2.2",
"6.2.3",
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"6.4.0",
"6.4.1"
),
getVersionCollection("6.4.2").getWireCompatible()
);
assertVersionsEquals(
asList(
"5.6.0", "5.6.1", "5.6.2", "5.6.3", "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10",
"5.6.11", "5.6.12", "5.6.13",
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4",
"6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4",
"6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2", "6.5.0"
"5.6.0",
"5.6.1",
"5.6.2",
"5.6.3",
"5.6.4",
"5.6.5",
"5.6.6",
"5.6.7",
"5.6.8",
"5.6.9",
"5.6.10",
"5.6.11",
"5.6.12",
"5.6.13",
"6.0.0",
"6.0.1",
"6.1.0",
"6.1.1",
"6.1.2",
"6.1.3",
"6.1.4",
"6.2.0",
"6.2.1",
"6.2.2",
"6.2.3",
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"6.4.0",
"6.4.1",
"6.4.2",
"6.5.0"
),
getVersionCollection("6.6.0").getWireCompatible()
);
assertVersionsEquals(
asList("7.3.0"),
getVersionCollection("8.0.0").getWireCompatible()
);
assertVersionsEquals(
asList("6.7.0", "7.0.0"),
getVersionCollection("7.1.0").getWireCompatible()
);
assertVersionsEquals(asList("7.3.0"), getVersionCollection("8.0.0").getWireCompatible());
assertVersionsEquals(asList("6.7.0", "7.0.0"), getVersionCollection("7.1.0").getWireCompatible());
}
public void testWireCompatibleUnreleased() {
assertVersionsEquals(
asList("6.5.0"),
getVersionCollection("7.0.0-alpha1").getUnreleasedWireCompatible()
);
assertVersionsEquals(
asList("5.6.13", "6.4.2"),
getVersionCollection("6.5.0").getUnreleasedWireCompatible()
);
assertVersionsEquals(asList("6.5.0"), getVersionCollection("7.0.0-alpha1").getUnreleasedWireCompatible());
assertVersionsEquals(asList("5.6.13", "6.4.2"), getVersionCollection("6.5.0").getUnreleasedWireCompatible());
assertVersionsEquals(
asList("5.6.13"),
getVersionCollection("6.4.2").getUnreleasedWireCompatible()
);
assertVersionsEquals(asList("5.6.13"), getVersionCollection("6.4.2").getUnreleasedWireCompatible());
assertVersionsEquals(
asList("5.6.13", "6.4.2", "6.5.0"),
getVersionCollection("6.6.0").getUnreleasedWireCompatible()
);
assertVersionsEquals(asList("5.6.13", "6.4.2", "6.5.0"), getVersionCollection("6.6.0").getUnreleasedWireCompatible());
assertVersionsEquals(
asList("7.3.0"),
getVersionCollection("8.0.0").getUnreleasedWireCompatible()
);
assertVersionsEquals(
asList("6.7.0", "7.0.0"),
getVersionCollection("7.1.0").getWireCompatible()
);
assertVersionsEquals(asList("7.3.0"), getVersionCollection("8.0.0").getUnreleasedWireCompatible());
assertVersionsEquals(asList("6.7.0", "7.0.0"), getVersionCollection("7.1.0").getWireCompatible());
}
public void testIndexCompatible() {
assertVersionsEquals(
asList(
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4",
"6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", "6.3.0", "6.3.1",
"6.3.2", "6.4.0", "6.4.1", "6.4.2", "6.5.0"
"6.0.0",
"6.0.1",
"6.1.0",
"6.1.1",
"6.1.2",
"6.1.3",
"6.1.4",
"6.2.0",
"6.2.1",
"6.2.2",
"6.2.3",
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"6.4.0",
"6.4.1",
"6.4.2",
"6.5.0"
),
getVersionCollection("7.0.0-alpha1").getIndexCompatible()
);
assertVersionsEquals(
asList(
"5.0.0", "5.0.1", "5.0.2", "5.1.1", "5.1.2", "5.2.0", "5.2.1", "5.2.2", "5.3.0", "5.3.1", "5.3.2", "5.3.3",
"5.4.0", "5.4.1", "5.4.2", "5.4.3", "5.5.0", "5.5.1", "5.5.2", "5.5.3", "5.6.0", "5.6.1", "5.6.2", "5.6.3",
"5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", "5.6.11", "5.6.12", "5.6.13",
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4",
"6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2"
"5.0.0",
"5.0.1",
"5.0.2",
"5.1.1",
"5.1.2",
"5.2.0",
"5.2.1",
"5.2.2",
"5.3.0",
"5.3.1",
"5.3.2",
"5.3.3",
"5.4.0",
"5.4.1",
"5.4.2",
"5.4.3",
"5.5.0",
"5.5.1",
"5.5.2",
"5.5.3",
"5.6.0",
"5.6.1",
"5.6.2",
"5.6.3",
"5.6.4",
"5.6.5",
"5.6.6",
"5.6.7",
"5.6.8",
"5.6.9",
"5.6.10",
"5.6.11",
"5.6.12",
"5.6.13",
"6.0.0",
"6.0.1",
"6.1.0",
"6.1.1",
"6.1.2",
"6.1.3",
"6.1.4",
"6.2.0",
"6.2.1",
"6.2.2",
"6.2.3",
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"6.4.0",
"6.4.1",
"6.4.2"
),
getVersionCollection("6.5.0").getIndexCompatible()
);
assertVersionsEquals(
asList(
"5.0.0", "5.0.1", "5.0.2", "5.1.1", "5.1.2", "5.2.0", "5.2.1", "5.2.2", "5.3.0", "5.3.1", "5.3.2", "5.3.3",
"5.4.0", "5.4.1", "5.4.2", "5.4.3", "5.5.0", "5.5.1", "5.5.2", "5.5.3", "5.6.0", "5.6.1", "5.6.2", "5.6.3",
"5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", "5.6.11", "5.6.12", "5.6.13",
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4",
"6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1"
"5.0.0",
"5.0.1",
"5.0.2",
"5.1.1",
"5.1.2",
"5.2.0",
"5.2.1",
"5.2.2",
"5.3.0",
"5.3.1",
"5.3.2",
"5.3.3",
"5.4.0",
"5.4.1",
"5.4.2",
"5.4.3",
"5.5.0",
"5.5.1",
"5.5.2",
"5.5.3",
"5.6.0",
"5.6.1",
"5.6.2",
"5.6.3",
"5.6.4",
"5.6.5",
"5.6.6",
"5.6.7",
"5.6.8",
"5.6.9",
"5.6.10",
"5.6.11",
"5.6.12",
"5.6.13",
"6.0.0",
"6.0.1",
"6.1.0",
"6.1.1",
"6.1.2",
"6.1.3",
"6.1.4",
"6.2.0",
"6.2.1",
"6.2.2",
"6.2.3",
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"6.4.0",
"6.4.1"
),
getVersionCollection("6.4.2").getIndexCompatible()
);
assertVersionsEquals(
asList(
"5.0.0", "5.0.1", "5.0.2", "5.1.1", "5.1.2", "5.2.0", "5.2.1", "5.2.2", "5.3.0", "5.3.1", "5.3.2", "5.3.3",
"5.4.0", "5.4.1", "5.4.2", "5.4.3", "5.5.0", "5.5.1", "5.5.2", "5.5.3", "5.6.0", "5.6.1", "5.6.2", "5.6.3",
"5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", "5.6.11", "5.6.12", "5.6.13",
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4",
"6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2", "6.5.0"
"5.0.0",
"5.0.1",
"5.0.2",
"5.1.1",
"5.1.2",
"5.2.0",
"5.2.1",
"5.2.2",
"5.3.0",
"5.3.1",
"5.3.2",
"5.3.3",
"5.4.0",
"5.4.1",
"5.4.2",
"5.4.3",
"5.5.0",
"5.5.1",
"5.5.2",
"5.5.3",
"5.6.0",
"5.6.1",
"5.6.2",
"5.6.3",
"5.6.4",
"5.6.5",
"5.6.6",
"5.6.7",
"5.6.8",
"5.6.9",
"5.6.10",
"5.6.11",
"5.6.12",
"5.6.13",
"6.0.0",
"6.0.1",
"6.1.0",
"6.1.1",
"6.1.2",
"6.1.3",
"6.1.4",
"6.2.0",
"6.2.1",
"6.2.2",
"6.2.3",
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"6.4.0",
"6.4.1",
"6.4.2",
"6.5.0"
),
getVersionCollection("6.6.0").getIndexCompatible()
);
@ -235,84 +648,33 @@ public class BwcVersionsTests extends GradleUnitTestCase {
}
public void testIndexCompatibleUnreleased() {
assertVersionsEquals(
asList("6.4.2", "6.5.0"),
getVersionCollection("7.0.0-alpha1").getUnreleasedIndexCompatible()
);
assertVersionsEquals(asList("6.4.2", "6.5.0"), getVersionCollection("7.0.0-alpha1").getUnreleasedIndexCompatible());
assertVersionsEquals(
asList("5.6.13", "6.4.2"),
getVersionCollection("6.5.0").getUnreleasedIndexCompatible()
);
assertVersionsEquals(asList("5.6.13", "6.4.2"), getVersionCollection("6.5.0").getUnreleasedIndexCompatible());
assertVersionsEquals(
asList("5.6.13"),
getVersionCollection("6.4.2").getUnreleasedIndexCompatible()
);
assertVersionsEquals(asList("5.6.13"), getVersionCollection("6.4.2").getUnreleasedIndexCompatible());
assertVersionsEquals(
asList("5.6.13", "6.4.2", "6.5.0"),
getVersionCollection("6.6.0").getUnreleasedIndexCompatible()
);
assertVersionsEquals(asList("5.6.13", "6.4.2", "6.5.0"), getVersionCollection("6.6.0").getUnreleasedIndexCompatible());
assertVersionsEquals(
asList("7.1.1", "7.2.0", "7.3.0"),
getVersionCollection("8.0.0").getUnreleasedIndexCompatible()
);
assertVersionsEquals(asList("7.1.1", "7.2.0", "7.3.0"), getVersionCollection("8.0.0").getUnreleasedIndexCompatible());
}
public void testGetUnreleased() {
assertVersionsEquals(
asList("6.4.2", "6.5.0", "7.0.0-alpha1"),
getVersionCollection("7.0.0-alpha1").getUnreleased()
);
assertVersionsEquals(
asList("5.6.13", "6.4.2", "6.5.0"),
getVersionCollection("6.5.0").getUnreleased()
);
assertVersionsEquals(
asList("5.6.13", "6.4.2"),
getVersionCollection("6.4.2").getUnreleased()
);
assertVersionsEquals(
asList("5.6.13", "6.4.2", "6.5.0", "6.6.0"),
getVersionCollection("6.6.0").getUnreleased()
);
assertVersionsEquals(
asList("7.1.1", "7.2.0", "7.3.0", "8.0.0"),
getVersionCollection("8.0.0").getUnreleased()
);
assertVersionsEquals(
asList("6.6.1", "6.7.0", "7.0.0", "7.1.0"),
getVersionCollection("7.1.0").getUnreleased()
);
assertVersionsEquals(asList("6.4.2", "6.5.0", "7.0.0-alpha1"), getVersionCollection("7.0.0-alpha1").getUnreleased());
assertVersionsEquals(asList("5.6.13", "6.4.2", "6.5.0"), getVersionCollection("6.5.0").getUnreleased());
assertVersionsEquals(asList("5.6.13", "6.4.2"), getVersionCollection("6.4.2").getUnreleased());
assertVersionsEquals(asList("5.6.13", "6.4.2", "6.5.0", "6.6.0"), getVersionCollection("6.6.0").getUnreleased());
assertVersionsEquals(asList("7.1.1", "7.2.0", "7.3.0", "8.0.0"), getVersionCollection("8.0.0").getUnreleased());
assertVersionsEquals(asList("6.6.1", "6.7.0", "7.0.0", "7.1.0"), getVersionCollection("7.1.0").getUnreleased());
}
public void testGetBranch() {
assertUnreleasedBranchNames(
asList("6.4", "6.x"),
getVersionCollection("7.0.0-alpha1")
);
assertUnreleasedBranchNames(
asList("5.6", "6.4"),
getVersionCollection("6.5.0")
);
assertUnreleasedBranchNames(
singletonList("5.6"),
getVersionCollection("6.4.2")
);
assertUnreleasedBranchNames(
asList("5.6", "6.4", "6.5"),
getVersionCollection("6.6.0")
);
assertUnreleasedBranchNames(
asList("7.1", "7.2", "7.x"),
getVersionCollection("8.0.0")
);
assertUnreleasedBranchNames(
asList("6.6", "6.7", "7.0"),
getVersionCollection("7.1.0")
);
assertUnreleasedBranchNames(asList("6.4", "6.x"), getVersionCollection("7.0.0-alpha1"));
assertUnreleasedBranchNames(asList("5.6", "6.4"), getVersionCollection("6.5.0"));
assertUnreleasedBranchNames(singletonList("5.6"), getVersionCollection("6.4.2"));
assertUnreleasedBranchNames(asList("5.6", "6.4", "6.5"), getVersionCollection("6.6.0"));
assertUnreleasedBranchNames(asList("7.1", "7.2", "7.x"), getVersionCollection("8.0.0"));
assertUnreleasedBranchNames(asList("6.6", "6.7", "7.0"), getVersionCollection("7.1.0"));
}
public void testGetGradleProjectPath() {
@ -324,10 +686,7 @@ public class BwcVersionsTests extends GradleUnitTestCase {
asList(":distribution:bwc:maintenance", ":distribution:bwc:bugfix"),
getVersionCollection("6.5.0")
);
assertUnreleasedGradleProjectPaths(
singletonList(":distribution:bwc:maintenance"),
getVersionCollection("6.4.2")
);
assertUnreleasedGradleProjectPaths(singletonList(":distribution:bwc:maintenance"), getVersionCollection("6.4.2"));
assertUnreleasedGradleProjectPaths(
asList(":distribution:bwc:maintenance", ":distribution:bwc:bugfix", ":distribution:bwc:minor"),
getVersionCollection("6.6.0")
@ -345,13 +704,11 @@ public class BwcVersionsTests extends GradleUnitTestCase {
public void testCompareToAuthoritative() {
List<String> listOfVersions = asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0", "8.0.0");
List<Version> authoritativeReleasedVersions = Stream.of("7.0.0", "7.0.1", "7.1.0")
.map(Version::fromString)
.collect(Collectors.toList());
.map(Version::fromString)
.collect(Collectors.toList());
BwcVersions vc = new BwcVersions(
listOfVersions.stream()
.map(this::formatVersionToLine)
.collect(Collectors.toList()),
listOfVersions.stream().map(this::formatVersionToLine).collect(Collectors.toList()),
Version.fromString("8.0.0")
);
vc.compareToAuthoritative(authoritativeReleasedVersions);
@ -360,13 +717,11 @@ public class BwcVersionsTests extends GradleUnitTestCase {
public void testCompareToAuthoritativeUnreleasedActuallyReleased() {
List<String> listOfVersions = asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0", "8.0.0");
List<Version> authoritativeReleasedVersions = Stream.of("7.0.0", "7.0.1", "7.1.0", "7.1.1", "8.0.0")
.map(Version::fromString)
.collect(Collectors.toList());
.map(Version::fromString)
.collect(Collectors.toList());
BwcVersions vc = new BwcVersions(
listOfVersions.stream()
.map(this::formatVersionToLine)
.collect(Collectors.toList()),
listOfVersions.stream().map(this::formatVersionToLine).collect(Collectors.toList()),
Version.fromString("8.0.0")
);
expectedEx.expect(IllegalStateException.class);
@ -376,13 +731,9 @@ public class BwcVersionsTests extends GradleUnitTestCase {
public void testCompareToAuthoritativeNotReallyRelesed() {
List<String> listOfVersions = asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0", "8.0.0");
List<Version> authoritativeReleasedVersions = Stream.of("7.0.0", "7.0.1")
.map(Version::fromString)
.collect(Collectors.toList());
List<Version> authoritativeReleasedVersions = Stream.of("7.0.0", "7.0.1").map(Version::fromString).collect(Collectors.toList());
BwcVersions vc = new BwcVersions(
listOfVersions.stream()
.map(this::formatVersionToLine)
.collect(Collectors.toList()),
listOfVersions.stream().map(this::formatVersionToLine).collect(Collectors.toList()),
Version.fromString("8.0.0")
);
expectedEx.expect(IllegalStateException.class);
@ -392,17 +743,13 @@ public class BwcVersionsTests extends GradleUnitTestCase {
private void assertUnreleasedGradleProjectPaths(List<String> expectedNAmes, BwcVersions bwcVersions) {
List<String> actualNames = new ArrayList<>();
bwcVersions.forPreviousUnreleased(unreleasedVersion ->
actualNames.add(unreleasedVersion.gradleProjectPath)
);
bwcVersions.forPreviousUnreleased(unreleasedVersion -> actualNames.add(unreleasedVersion.gradleProjectPath));
assertEquals(expectedNAmes, actualNames);
}
private void assertUnreleasedBranchNames(List<String> expectedBranches, BwcVersions bwcVersions) {
List<String> actualBranches = new ArrayList<>();
bwcVersions.forPreviousUnreleased(unreleasedVersionInfo ->
actualBranches.add(unreleasedVersionInfo.branch)
);
bwcVersions.forPreviousUnreleased(unreleasedVersionInfo -> actualBranches.add(unreleasedVersionInfo.branch));
assertEquals(expectedBranches, actualBranches);
}
@ -411,19 +758,12 @@ public class BwcVersionsTests extends GradleUnitTestCase {
}
private void assertVersionsEquals(List<String> expected, List<Version> actual) {
assertEquals(
expected.stream()
.map(Version::fromString)
.collect(Collectors.toList()),
actual
);
assertEquals(expected.stream().map(Version::fromString).collect(Collectors.toList()), actual);
}
private BwcVersions getVersionCollection(String currentVersion) {
return new BwcVersions(
sampleVersions.get(currentVersion).stream()
.map(this::formatVersionToLine)
.collect(Collectors.toList()),
sampleVersions.get(currentVersion).stream().map(this::formatVersionToLine).collect(Collectors.toList()),
Version.fromString(currentVersion)
);
}

View File

@ -66,17 +66,14 @@ public class ConcatFilesTaskTests extends GradleUnitTestCase {
file2.getParentFile().mkdirs();
file1.createNewFile();
file2.createNewFile();
Files.write(file1.toPath(), ("Hello" + System.lineSeparator() + "Hello").getBytes(StandardCharsets.UTF_8));
Files.write(file1.toPath(), ("Hello" + System.lineSeparator() + "Hello").getBytes(StandardCharsets.UTF_8));
Files.write(file2.toPath(), ("Hello" + System.lineSeparator() + "नमस्ते").getBytes(StandardCharsets.UTF_8));
concatFilesTask.setFiles(project.fileTree(file1.getParentFile().getParentFile()));
concatFilesTask.concatFiles();
assertEquals(
Arrays.asList("Hello", "नमस्ते"),
Files.readAllLines(concatFilesTask.getTarget().toPath(), StandardCharsets.UTF_8)
);
assertEquals(Arrays.asList("Hello", "नमस्ते"), Files.readAllLines(concatFilesTask.getTarget().toPath(), StandardCharsets.UTF_8));
}

View File

@ -44,49 +44,100 @@ public class DistributionDownloadPluginIT extends GradleIntegrationTestCase {
public void testCurrent() throws Exception {
String projectName = ":distribution:archives:linux-tar";
assertExtractedDistro(VersionProperties.getElasticsearch(), "archive", "linux", null, null,
"tests.local_distro.config", "default",
"tests.local_distro.project", projectName);
assertExtractedDistro(
VersionProperties.getElasticsearch(),
"archive",
"linux",
null,
null,
"tests.local_distro.config",
"default",
"tests.local_distro.project",
projectName
);
}
public void testCurrentExternal() throws Exception {
checkService(VersionProperties.getElasticsearch(), "archive", "linux", null, null,
checkService(
VersionProperties.getElasticsearch(),
"archive",
"linux",
null,
null,
"/downloads/elasticsearch/elasticsearch-" + VersionProperties.getElasticsearch() + "-linux-x86_64.tar.gz",
"tests.internal", "false");
"tests.internal",
"false"
);
}
public void testBwc() throws Exception {
assertExtractedDistro("8.1.0", "archive", "linux", null, null,
"tests.local_distro.config", "linux-tar",
"tests.local_distro.project", ":distribution:bwc:minor",
"tests.current_version", "8.0.0");
assertExtractedDistro(
"8.1.0",
"archive",
"linux",
null,
null,
"tests.local_distro.config",
"linux-tar",
"tests.local_distro.project",
":distribution:bwc:minor",
"tests.current_version",
"8.0.0"
);
}
public void testBwcExternal() throws Exception {
checkService("8.1.0-SNAPSHOT", "archive", "linux", null, null,
checkService(
"8.1.0-SNAPSHOT",
"archive",
"linux",
null,
null,
"/downloads/elasticsearch/elasticsearch-8.1.0-SNAPSHOT-linux-x86_64.tar.gz",
"tests.internal", "false",
"tests.current_version", "9.0.0");
"tests.internal",
"false",
"tests.current_version",
"9.0.0"
);
}
public void testReleased() throws Exception {
checkService("7.0.0", "archive", "windows", null, null,
"/downloads/elasticsearch/elasticsearch-7.0.0-windows-x86_64.zip");
checkService("6.5.0", "archive", "windows", null, null,
"/downloads/elasticsearch/elasticsearch-6.5.0.zip");
checkService("7.0.0", "archive", "windows", null, null, "/downloads/elasticsearch/elasticsearch-7.0.0-windows-x86_64.zip");
checkService("6.5.0", "archive", "windows", null, null, "/downloads/elasticsearch/elasticsearch-6.5.0.zip");
}
public void testReleasedExternal() throws Exception {
checkService("7.0.0", "archive", "windows", null, null,
checkService(
"7.0.0",
"archive",
"windows",
null,
null,
"/downloads/elasticsearch/elasticsearch-7.0.0-windows-x86_64.zip",
"tests.internal", "false");
checkService("6.5.0", "archive", "windows", null, null,
"tests.internal",
"false"
);
checkService(
"6.5.0",
"archive",
"windows",
null,
null,
"/downloads/elasticsearch/elasticsearch-6.5.0.zip",
"tests.internal", "false");
"tests.internal",
"false"
);
}
private void checkService(String version, String type, String platform, String flavor, Boolean bundledJdk,
String urlPath, String... sysProps) throws IOException {
private void checkService(
String version,
String type,
String platform,
String flavor,
Boolean bundledJdk,
String urlPath,
String... sysProps
) throws IOException {
String suffix = urlPath.endsWith("zip") ? "zip" : "tar.gz";
String sourceFile = "src/testKit/distribution-download/distribution/files/fake_elasticsearch." + suffix;
WireMockServer wireMock = new WireMockServer(0);
@ -113,16 +164,16 @@ public class DistributionDownloadPluginIT extends GradleIntegrationTestCase {
}
}
private void assertFileDistro(String version, String type, String platform, String flavor, Boolean bundledJdk,
String... sysProps) throws IOException {
private void assertFileDistro(String version, String type, String platform, String flavor, Boolean bundledJdk, String... sysProps)
throws IOException {
List<String> finalSysProps = new ArrayList<>();
addDistroSysProps(finalSysProps, version, type, platform, flavor, bundledJdk);
finalSysProps.addAll(Arrays.asList(sysProps));
runBuild(":subproj:assertDistroFile", finalSysProps.toArray(new String[0]));
}
private void assertExtractedDistro(String version, String type, String platform, String flavor, Boolean bundledJdk,
String... sysProps) throws IOException {
private void assertExtractedDistro(String version, String type, String platform, String flavor, Boolean bundledJdk, String... sysProps)
throws IOException {
List<String> finalSysProps = new ArrayList<>();
addDistroSysProps(finalSysProps, version, type, platform, flavor, bundledJdk);
finalSysProps.addAll(Arrays.asList(sysProps));

View File

@ -45,64 +45,138 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
private static final Version BWC_STAGED_VERSION = Version.fromString("1.0.0");
private static final Version BWC_BUGFIX_VERSION = Version.fromString("1.0.1");
private static final Version BWC_MAINTENANCE_VERSION = Version.fromString("0.90.1");
private static final BwcVersions BWC_MINOR =
new BwcVersions(new TreeSet<>(Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION)), BWC_MAJOR_VERSION);
private static final BwcVersions BWC_STAGED =
new BwcVersions(new TreeSet<>(Arrays.asList(BWC_STAGED_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION)), BWC_MAJOR_VERSION);
private static final BwcVersions BWC_BUGFIX =
new BwcVersions(new TreeSet<>(Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION)), BWC_MAJOR_VERSION);
private static final BwcVersions BWC_MAINTENANCE =
new BwcVersions(new TreeSet<>(Arrays.asList(BWC_MAINTENANCE_VERSION, BWC_STAGED_VERSION, BWC_MINOR_VERSION)), BWC_MINOR_VERSION);
private static final BwcVersions BWC_MINOR = new BwcVersions(
new TreeSet<>(Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION)),
BWC_MAJOR_VERSION
);
private static final BwcVersions BWC_STAGED = new BwcVersions(
new TreeSet<>(Arrays.asList(BWC_STAGED_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION)),
BWC_MAJOR_VERSION
);
private static final BwcVersions BWC_BUGFIX = new BwcVersions(
new TreeSet<>(Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION)),
BWC_MAJOR_VERSION
);
private static final BwcVersions BWC_MAINTENANCE = new BwcVersions(
new TreeSet<>(Arrays.asList(BWC_MAINTENANCE_VERSION, BWC_STAGED_VERSION, BWC_MINOR_VERSION)),
BWC_MINOR_VERSION
);
public void testVersionDefault() {
ElasticsearchDistribution distro = checkDistro(createProject(null, false),
"testdistro", null, Type.ARCHIVE, Platform.LINUX, Flavor.OSS, true);
ElasticsearchDistribution distro = checkDistro(
createProject(null, false),
"testdistro",
null,
Type.ARCHIVE,
Platform.LINUX,
Flavor.OSS,
true
);
assertEquals(distro.getVersion(), VersionProperties.getElasticsearch());
}
public void testBadVersionFormat() {
assertDistroError(createProject(null, false), "testdistro", "badversion", Type.ARCHIVE, Platform.LINUX, Flavor.OSS, true,
"Invalid version format: 'badversion'");
assertDistroError(
createProject(null, false),
"testdistro",
"badversion",
Type.ARCHIVE,
Platform.LINUX,
Flavor.OSS,
true,
"Invalid version format: 'badversion'"
);
}
public void testTypeDefault() {
ElasticsearchDistribution distro = checkDistro(createProject(null, false),
"testdistro", "5.0.0", null, Platform.LINUX, Flavor.OSS, true);
ElasticsearchDistribution distro = checkDistro(
createProject(null, false),
"testdistro",
"5.0.0",
null,
Platform.LINUX,
Flavor.OSS,
true
);
assertEquals(distro.getType(), Type.ARCHIVE);
}
public void testPlatformDefault() {
ElasticsearchDistribution distro = checkDistro(createProject(null, false),
"testdistro", "5.0.0", Type.ARCHIVE, null, Flavor.OSS, true);
ElasticsearchDistribution distro = checkDistro(
createProject(null, false),
"testdistro",
"5.0.0",
Type.ARCHIVE,
null,
Flavor.OSS,
true
);
assertEquals(distro.getPlatform(), ElasticsearchDistribution.CURRENT_PLATFORM);
}
public void testPlatformForIntegTest() {
assertDistroError(createProject(null, false), "testdistro", "5.0.0", Type.INTEG_TEST_ZIP, Platform.LINUX, null, null,
"platform not allowed for elasticsearch distribution [testdistro]");
assertDistroError(
createProject(null, false),
"testdistro",
"5.0.0",
Type.INTEG_TEST_ZIP,
Platform.LINUX,
null,
null,
"platform not allowed for elasticsearch distribution [testdistro]"
);
}
public void testFlavorDefault() {
ElasticsearchDistribution distro = checkDistro(createProject(null, false),
"testdistro", "5.0.0", Type.ARCHIVE, Platform.LINUX, null, true);
ElasticsearchDistribution distro = checkDistro(
createProject(null, false),
"testdistro",
"5.0.0",
Type.ARCHIVE,
Platform.LINUX,
null,
true
);
assertEquals(distro.getFlavor(), Flavor.DEFAULT);
}
public void testFlavorForIntegTest() {
assertDistroError(createProject(null, false),
"testdistro", "5.0.0", Type.INTEG_TEST_ZIP, null, Flavor.OSS, null,
"flavor [oss] not allowed for elasticsearch distribution [testdistro] of type [integ_test_zip]");
assertDistroError(
createProject(null, false),
"testdistro",
"5.0.0",
Type.INTEG_TEST_ZIP,
null,
Flavor.OSS,
null,
"flavor [oss] not allowed for elasticsearch distribution [testdistro] of type [integ_test_zip]"
);
}
public void testBundledJdkDefault() {
ElasticsearchDistribution distro = checkDistro(createProject(null, false),
"testdistro", "5.0.0", Type.ARCHIVE, Platform.LINUX, null, true);
ElasticsearchDistribution distro = checkDistro(
createProject(null, false),
"testdistro",
"5.0.0",
Type.ARCHIVE,
Platform.LINUX,
null,
true
);
assertTrue(distro.getBundledJdk());
}
public void testBundledJdkForIntegTest() {
assertDistroError(createProject(null, false), "testdistro", "5.0.0", Type.INTEG_TEST_ZIP, null, null, true,
"bundledJdk not allowed for elasticsearch distribution [testdistro]");
assertDistroError(
createProject(null, false),
"testdistro",
"5.0.0",
Type.INTEG_TEST_ZIP,
null,
null,
true,
"bundledJdk not allowed for elasticsearch distribution [testdistro]"
);
}
public void testLocalCurrentVersionIntegTestZip() {
@ -110,15 +184,14 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
Project archiveProject = ProjectBuilder.builder().withParent(archivesProject).withName("integ-test-zip").build();
archiveProject.getConfigurations().create("default");
archiveProject.getArtifacts().add("default", new File("doesnotmatter"));
createDistro(project, "distro",
VersionProperties.getElasticsearch(), Type.INTEG_TEST_ZIP, null, null, null);
createDistro(project, "distro", VersionProperties.getElasticsearch(), Type.INTEG_TEST_ZIP, null, null, null);
checkPlugin(project);
}
public void testLocalCurrentVersionArchives() {
for (Platform platform : Platform.values()) {
for (Flavor flavor : Flavor.values()) {
for (boolean bundledJdk : new boolean[] { true, false}) {
for (boolean bundledJdk : new boolean[] { true, false }) {
// create a new project in each iteration, so that we know we are resolving the only additional project being created
Project project = createProject(BWC_MINOR, true);
String projectName = projectName(platform.toString(), flavor, bundledJdk);
@ -126,8 +199,7 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
Project archiveProject = ProjectBuilder.builder().withParent(archivesProject).withName(projectName).build();
archiveProject.getConfigurations().create("default");
archiveProject.getArtifacts().add("default", new File("doesnotmatter"));
createDistro(project, "distro",
VersionProperties.getElasticsearch(), Type.ARCHIVE, platform, flavor, bundledJdk);
createDistro(project, "distro", VersionProperties.getElasticsearch(), Type.ARCHIVE, platform, flavor, bundledJdk);
checkPlugin(project);
}
}
@ -137,14 +209,13 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
public void testLocalCurrentVersionPackages() {
for (Type packageType : new Type[] { Type.RPM, Type.DEB }) {
for (Flavor flavor : Flavor.values()) {
for (boolean bundledJdk : new boolean[] { true, false}) {
for (boolean bundledJdk : new boolean[] { true, false }) {
Project project = createProject(BWC_MINOR, true);
String projectName = projectName(packageType.toString(), flavor, bundledJdk);
Project packageProject = ProjectBuilder.builder().withParent(packagesProject).withName(projectName).build();
packageProject.getConfigurations().create("default");
packageProject.getArtifacts().add("default", new File("doesnotmatter"));
createDistro(project, "distro",
VersionProperties.getElasticsearch(), packageType, null, flavor, bundledJdk);
createDistro(project, "distro", VersionProperties.getElasticsearch(), packageType, null, flavor, bundledJdk);
checkPlugin(project);
}
}
@ -180,15 +251,32 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
}
}
private void assertDistroError(Project project, String name, String version, Type type, Platform platform,
Flavor flavor, Boolean bundledJdk, String message) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> checkDistro(project, name, version, type, platform, flavor, bundledJdk));
private void assertDistroError(
Project project,
String name,
String version,
Type type,
Platform platform,
Flavor flavor,
Boolean bundledJdk,
String message
) {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> checkDistro(project, name, version, type, platform, flavor, bundledJdk)
);
assertThat(e.getMessage(), containsString(message));
}
private ElasticsearchDistribution createDistro(Project project, String name, String version, Type type,
Platform platform, Flavor flavor, Boolean bundledJdk) {
private ElasticsearchDistribution createDistro(
Project project,
String name,
String version,
Type type,
Platform platform,
Flavor flavor,
Boolean bundledJdk
) {
NamedDomainObjectContainer<ElasticsearchDistribution> distros = DistributionDownloadPlugin.getContainer(project);
return distros.create(name, distro -> {
if (version != null) {
@ -210,8 +298,15 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
}
// create a distro and finalize its configuration
private ElasticsearchDistribution checkDistro(Project project, String name, String version, Type type,
Platform platform, Flavor flavor, Boolean bundledJdk) {
private ElasticsearchDistribution checkDistro(
Project project,
String name,
String version,
Type type,
Platform platform,
Flavor flavor,
Boolean bundledJdk
) {
ElasticsearchDistribution distribution = createDistro(project, name, version, type, platform, flavor, bundledJdk);
distribution.finalizeValues();
return distribution;
@ -223,8 +318,16 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
plugin.setupDistributions(project);
}
private void checkBwc(String projectName, String config, Version version,
Type type, Platform platform, Flavor flavor, BwcVersions bwcVersions, boolean isInternal) {
private void checkBwc(
String projectName,
String config,
Version version,
Type type,
Platform platform,
Flavor flavor,
BwcVersions bwcVersions,
boolean isInternal
) {
Project project = createProject(bwcVersions, isInternal);
Project archiveProject = ProjectBuilder.builder().withParent(bwcProject).withName(projectName).build();
archiveProject.getConfigurations().create(config);

View File

@ -22,35 +22,26 @@ package org.elasticsearch.gradle;
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.BuildResult;
public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTestCase {
public static final String PROJECT_NAME = "elasticsearch-build-resources";
public void testUpToDateWithSourcesConfigured() {
getGradleRunner(PROJECT_NAME)
.withArguments("clean", "-s")
.build();
getGradleRunner(PROJECT_NAME).withArguments("clean", "-s").build();
BuildResult result = getGradleRunner(PROJECT_NAME)
.withArguments("buildResources", "-s", "-i")
.build();
BuildResult result = getGradleRunner(PROJECT_NAME).withArguments("buildResources", "-s", "-i").build();
assertTaskSuccessful(result, ":buildResources");
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml");
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml");
result = getGradleRunner(PROJECT_NAME)
.withArguments("buildResources", "-s", "-i")
.build();
result = getGradleRunner(PROJECT_NAME).withArguments("buildResources", "-s", "-i").build();
assertTaskUpToDate(result, ":buildResources");
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml");
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml");
}
public void testImplicitTaskDependencyCopy() {
BuildResult result = getGradleRunner(PROJECT_NAME)
.withArguments("clean", "sampleCopyAll", "-s", "-i")
.build();
BuildResult result = getGradleRunner(PROJECT_NAME).withArguments("clean", "sampleCopyAll", "-s", "-i").build();
assertTaskSuccessful(result, ":buildResources");
assertTaskSuccessful(result, ":sampleCopyAll");
@ -60,9 +51,7 @@ public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTe
}
public void testImplicitTaskDependencyInputFileOfOther() {
BuildResult result = getGradleRunner(PROJECT_NAME)
.withArguments("clean", "sample", "-s", "-i")
.build();
BuildResult result = getGradleRunner(PROJECT_NAME).withArguments("clean", "sample", "-s", "-i").build();
assertTaskSuccessful(result, ":sample");
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml");
@ -71,10 +60,7 @@ public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTe
public void testIncorrectUsage() {
assertOutputContains(
getGradleRunner(PROJECT_NAME)
.withArguments("noConfigAfterExecution", "-s", "-i")
.buildAndFail()
.getOutput(),
getGradleRunner(PROJECT_NAME).withArguments("noConfigAfterExecution", "-s", "-i").buildAndFail().getOutput(),
"buildResources can't be configured after the task ran"
);
}

View File

@ -95,24 +95,30 @@ public abstract class JdkDownloadPluginIT extends GradleIntegrationTestCase {
protected abstract byte[] filebytes(String platform, String extension) throws IOException;
private void runBuild(
String taskname, String platform, Consumer<BuildResult> assertions, String vendor, String version) throws IOException {
private void runBuild(String taskname, String platform, Consumer<BuildResult> assertions, String vendor, String version)
throws IOException {
WireMockServer wireMock = new WireMockServer(0);
try {
String extension = platform.equals("windows") ? "zip" : "tar.gz";
boolean isOld = version.equals(oldJdkVersion());
wireMock.stubFor(head(urlEqualTo(urlPath(isOld, platform, extension))).willReturn(aResponse().withStatus(200)));
wireMock.stubFor(get(urlEqualTo(urlPath(isOld, platform, extension)))
.willReturn(aResponse().withStatus(200).withBody(filebytes(platform, extension))));
wireMock.stubFor(
get(urlEqualTo(urlPath(isOld, platform, extension))).willReturn(
aResponse().withStatus(200).withBody(filebytes(platform, extension))
)
);
wireMock.start();
GradleRunner runner = GradleRunner.create().withProjectDir(getProjectDir("jdk-download"))
.withArguments(taskname,
GradleRunner runner = GradleRunner.create()
.withProjectDir(getProjectDir("jdk-download"))
.withArguments(
taskname,
"-Dtests.jdk_vendor=" + vendor,
"-Dtests.jdk_version=" + version,
"-Dtests.jdk_repo=" + wireMock.baseUrl(),
"-i")
"-i"
)
.withPluginClasspath();
BuildResult result = runner.build();

View File

@ -32,7 +32,7 @@ public class JdkDownloadPluginTests extends GradleUnitTestCase {
@BeforeClass
public static void setupRoot() {
rootProject = ProjectBuilder.builder().build();
rootProject = ProjectBuilder.builder().build();
}
public void testMissingVendor() {
@ -46,7 +46,8 @@ public class JdkDownloadPluginTests extends GradleUnitTestCase {
"unknown",
"11.0.2+33",
"linux",
"unknown vendor [unknown] for jdk [testjdk], must be one of [adoptopenjdk, openjdk]");
"unknown vendor [unknown] for jdk [testjdk], must be one of [adoptopenjdk, openjdk]"
);
}
public void testMissingVersion() {
@ -62,13 +63,21 @@ public class JdkDownloadPluginTests extends GradleUnitTestCase {
}
public void testUnknownPlatform() {
assertJdkError(createProject(), "testjdk", "openjdk", "11.0.2+33", "unknown",
"unknown platform [unknown] for jdk [testjdk], must be one of [darwin, linux, windows, mac]");
assertJdkError(
createProject(),
"testjdk",
"openjdk",
"11.0.2+33",
"unknown",
"unknown platform [unknown] for jdk [testjdk], must be one of [darwin, linux, windows, mac]"
);
}
private void assertJdkError(Project project, String name, String vendor, String version, String platform, String message) {
IllegalArgumentException e =
expectThrows(IllegalArgumentException.class, () -> createJdk(project, name, vendor, version, platform));
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> createJdk(project, name, vendor, version, platform)
);
assertThat(e.getMessage(), equalTo(message));
}

View File

@ -49,13 +49,13 @@ public class VersionTests extends GradleUnitTestCase {
}
public void testCompareWithStringVersions() {
assertTrue("1.10.20 is not interpreted as before 2.0.0",
Version.fromString("1.10.20").before("2.0.0")
);
assertTrue("7.0.0-alpha1 should be equal to 7.0.0-alpha1",
assertTrue("1.10.20 is not interpreted as before 2.0.0", Version.fromString("1.10.20").before("2.0.0"));
assertTrue(
"7.0.0-alpha1 should be equal to 7.0.0-alpha1",
Version.fromString("7.0.0-alpha1").equals(Version.fromString("7.0.0-alpha1"))
);
assertTrue("7.0.0-SNAPSHOT should be equal to 7.0.0-SNAPSHOT",
assertTrue(
"7.0.0-SNAPSHOT should be equal to 7.0.0-SNAPSHOT",
Version.fromString("7.0.0-SNAPSHOT").equals(Version.fromString("7.0.0-SNAPSHOT"))
);
}
@ -63,21 +63,25 @@ public class VersionTests extends GradleUnitTestCase {
public void testCollections() {
assertTrue(
Arrays.asList(
Version.fromString("5.2.0"), Version.fromString("5.2.1-SNAPSHOT"), Version.fromString("6.0.0"),
Version.fromString("6.0.1"), Version.fromString("6.1.0")
).containsAll(Arrays.asList(
Version.fromString("6.0.1"), Version.fromString("5.2.1-SNAPSHOT")
))
Version.fromString("5.2.0"),
Version.fromString("5.2.1-SNAPSHOT"),
Version.fromString("6.0.0"),
Version.fromString("6.0.1"),
Version.fromString("6.1.0")
).containsAll(Arrays.asList(Version.fromString("6.0.1"), Version.fromString("5.2.1-SNAPSHOT")))
);
Set<Version> versions = new HashSet<>();
versions.addAll(Arrays.asList(
Version.fromString("5.2.0"), Version.fromString("5.2.1-SNAPSHOT"), Version.fromString("6.0.0"),
Version.fromString("6.0.1"), Version.fromString("6.1.0")
));
versions.addAll(
Arrays.asList(
Version.fromString("5.2.0"),
Version.fromString("5.2.1-SNAPSHOT"),
Version.fromString("6.0.0"),
Version.fromString("6.0.1"),
Version.fromString("6.1.0")
)
);
Set<Version> subset = new HashSet<>();
subset.addAll(Arrays.asList(
Version.fromString("6.0.1"), Version.fromString("5.2.1-SNAPSHOT")
));
subset.addAll(Arrays.asList(Version.fromString("6.0.1"), Version.fromString("5.2.1-SNAPSHOT")));
assertTrue(versions.containsAll(subset));
}
@ -86,9 +90,7 @@ public class VersionTests extends GradleUnitTestCase {
}
public void testCompareVersions() {
assertEquals(0,
new Version(7, 0, 0).compareTo(new Version(7, 0, 0))
);
assertEquals(0, new Version(7, 0, 0).compareTo(new Version(7, 0, 0)));
}
public void testExceptionEmpty() {

View File

@ -23,8 +23,10 @@ public class RestTestFromSnippetsTaskTests extends GradleUnitTestCase {
}
public void testMultipleBlockQuotes() {
assertEquals("\"foo\": \"bort baz\", \"bar\": \"other\"",
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\""));
assertEquals(
"\"foo\": \"bort baz\", \"bar\": \"other\"",
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"")
);
}
public void testEscapingInBlockQuote() {

View File

@ -43,7 +43,7 @@ public class WaitForHttpResourceTests extends GradleUnitTestCase {
final Certificate certificate = store.getCertificate("ca");
assertThat(certificate, notNullValue());
assertThat(certificate, instanceOf(X509Certificate.class));
assertThat(((X509Certificate)certificate).getSubjectDN().toString(), equalTo("CN=Elastic Certificate Tool Autogenerated CA"));
assertThat(((X509Certificate) certificate).getSubjectDN().toString(), equalTo("CN=Elastic Certificate Tool Autogenerated CA"));
}
public void testBuildTrustStoreFromCA() throws Exception {
@ -55,6 +55,6 @@ public class WaitForHttpResourceTests extends GradleUnitTestCase {
final Certificate certificate = store.getCertificate("cert-0");
assertThat(certificate, notNullValue());
assertThat(certificate, instanceOf(X509Certificate.class));
assertThat(((X509Certificate)certificate).getSubjectDN().toString(), equalTo("CN=Elastic Certificate Tool Autogenerated CA"));
assertThat(((X509Certificate) certificate).getSubjectDN().toString(), equalTo("CN=Elastic Certificate Tool Autogenerated CA"));
}
}

View File

@ -18,38 +18,26 @@ public class PluginBuildPluginTests extends GradleUnitTestCase {
@Before
public void setUp() throws Exception {
project = ProjectBuilder.builder()
.withName(getClass().getName())
.build();
project = ProjectBuilder.builder().withName(getClass().getName()).build();
}
public void testApply() {
// FIXME: distribution download plugin doesn't support running externally
project.getExtensions().getExtraProperties().set(
"bwcVersions", Mockito.mock(BwcVersions.class)
);
project.getExtensions().getExtraProperties().set("bwcVersions", Mockito.mock(BwcVersions.class));
project.getPlugins().apply(PluginBuildPlugin.class);
assertNotNull(
"plugin extension created with the right name",
project.getExtensions().findByName(PluginBuildPlugin.PLUGIN_EXTENSION_NAME)
);
assertNotNull(
"plugin extensions has the right type",
project.getExtensions().findByType(PluginPropertiesExtension.class)
);
assertNotNull("plugin extensions has the right type", project.getExtensions().findByType(PluginPropertiesExtension.class));
assertNotNull(
"plugin created an integTest class",
project.getTasks().findByName("integTest")
);
assertNotNull("plugin created an integTest class", project.getTasks().findByName("integTest"));
}
@Ignore("https://github.com/elastic/elasticsearch/issues/47123")
public void testApplyWithAfterEvaluate() {
project.getExtensions().getExtraProperties().set(
"bwcVersions", Mockito.mock(BwcVersions.class)
);
project.getExtensions().getExtraProperties().set("bwcVersions", Mockito.mock(BwcVersions.class));
project.getPlugins().apply(PluginBuildPlugin.class);
PluginPropertiesExtension extension = project.getExtensions().getByType(PluginPropertiesExtension.class);
extension.setNoticeFile(project.file("test.notice"));
@ -60,9 +48,7 @@ public class PluginBuildPluginTests extends GradleUnitTestCase {
((ProjectInternal) project).evaluate();
assertNotNull(
"Task to generate notice not created: " + project.getTasks().stream()
.map(Task::getPath)
.collect(Collectors.joining(", ")),
"Task to generate notice not created: " + project.getTasks().stream().map(Task::getPath).collect(Collectors.joining(", ")),
project.getTasks().findByName("generateNotice")
);
}

View File

@ -30,8 +30,9 @@ public class PluginPropertiesExtensionTests extends GradleUnitTestCase {
String projectName = "Test";
String projectVersion = "5.0";
PluginPropertiesExtension pluginPropertiesExtension =
new PluginPropertiesExtension(this.createProject(projectName, projectVersion));
PluginPropertiesExtension pluginPropertiesExtension = new PluginPropertiesExtension(
this.createProject(projectName, projectVersion)
);
assertEquals(projectName, pluginPropertiesExtension.getName());
assertEquals(projectVersion, pluginPropertiesExtension.getVersion());
@ -40,8 +41,7 @@ public class PluginPropertiesExtensionTests extends GradleUnitTestCase {
public void testCreatingPluginPropertiesExtensionWithNameWithoutVersion() {
String projectName = "Test";
PluginPropertiesExtension pluginPropertiesExtension =
new PluginPropertiesExtension(this.createProject(projectName, null));
PluginPropertiesExtension pluginPropertiesExtension = new PluginPropertiesExtension(this.createProject(projectName, null));
assertEquals(projectName, pluginPropertiesExtension.getName());
assertEquals("unspecified", pluginPropertiesExtension.getVersion());

View File

@ -170,10 +170,7 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase {
File licensesDir = getLicensesDir(project);
createAllDefaultDependencyFiles(licensesDir, "groovy-all");
Path groovySha = Files
.list(licensesDir.toPath())
.filter(file -> file.toFile().getName().contains("sha"))
.findFirst().get();
Path groovySha = Files.list(licensesDir.toPath()).filter(file -> file.toFile().getName().contains("sha")).findFirst().get();
Files.write(groovySha, new byte[] { 1 }, StandardOpenOption.CREATE);
@ -247,16 +244,14 @@ public class DependencyLicensesTaskTests extends GradleUnitTestCase {
}
private UpdateShasTask createUpdateShasTask(Project project, TaskProvider<DependencyLicensesTask> dependencyLicensesTask) {
UpdateShasTask task = project.getTasks()
.register("updateShas", UpdateShasTask.class)
.get();
UpdateShasTask task = project.getTasks().register("updateShas", UpdateShasTask.class).get();
task.setParentTask(dependencyLicensesTask);
return task;
}
private TaskProvider<DependencyLicensesTask> createDependencyLicensesTask(Project project) {
TaskProvider<DependencyLicensesTask> task = project.getTasks()
TaskProvider<DependencyLicensesTask> task = project.getTasks()
.register("dependencyLicenses", DependencyLicensesTask.class, new Action<DependencyLicensesTask>() {
@Override
public void execute(DependencyLicensesTask dependencyLicensesTask) {

View File

@ -55,7 +55,6 @@ public class FilePermissionsTaskTests extends GradleUnitTestCase {
file.delete();
}
public void testCheckPermissionsWhenNoFileExists() throws Exception {
RandomizedTest.assumeFalse("Functionality is Unix specific", Os.isFamily(Os.FAMILY_WINDOWS));

View File

@ -26,14 +26,18 @@ import org.junit.Before;
public class TestingConventionsTasksIT extends GradleIntegrationTestCase {
@Before
public void setUp() {
}
public void setUp() {}
public void testInnerClasses() {
GradleRunner runner = getGradleRunner("testingConventions")
.withArguments("clean", ":no_tests_in_inner_classes:testingConventions", "-i", "-s");
GradleRunner runner = getGradleRunner("testingConventions").withArguments(
"clean",
":no_tests_in_inner_classes:testingConventions",
"-i",
"-s"
);
BuildResult result = runner.buildAndFail();
assertOutputContains(result.getOutput(),
assertOutputContains(
result.getOutput(),
"Test classes implemented by inner classes will not run:",
" * org.elasticsearch.gradle.testkit.NastyInnerClasses$LooksLikeATestWithoutNamingConvention1",
" * org.elasticsearch.gradle.testkit.NastyInnerClasses$LooksLikeATestWithoutNamingConvention2",
@ -44,10 +48,15 @@ public class TestingConventionsTasksIT extends GradleIntegrationTestCase {
}
public void testNamingConvention() {
GradleRunner runner = getGradleRunner("testingConventions")
.withArguments("clean", ":incorrect_naming_conventions:testingConventions", "-i", "-s");
GradleRunner runner = getGradleRunner("testingConventions").withArguments(
"clean",
":incorrect_naming_conventions:testingConventions",
"-i",
"-s"
);
BuildResult result = runner.buildAndFail();
assertOutputContains(result.getOutput(),
assertOutputContains(
result.getOutput(),
"Seem like test classes but don't match naming convention:",
" * org.elasticsearch.gradle.testkit.LooksLikeATestWithoutNamingConvention1",
" * org.elasticsearch.gradle.testkit.LooksLikeATestWithoutNamingConvention2",
@ -57,61 +66,84 @@ public class TestingConventionsTasksIT extends GradleIntegrationTestCase {
}
public void testNoEmptyTasks() {
GradleRunner runner = getGradleRunner("testingConventions")
.withArguments("clean", ":empty_test_task:testingConventions", "-i", "-s");
GradleRunner runner = getGradleRunner("testingConventions").withArguments(
"clean",
":empty_test_task:testingConventions",
"-i",
"-s"
);
BuildResult result = runner.buildAndFail();
assertOutputContains(result.getOutput(),
assertOutputContains(
result.getOutput(),
"Expected at least one test class included in task :empty_test_task:emptyTest, but found none.",
"Expected at least one test class included in task :empty_test_task:test, but found none."
);
}
public void testAllTestTasksIncluded() {
GradleRunner runner = getGradleRunner("testingConventions")
.withArguments("clean", ":all_classes_in_tasks:testingConventions", "-i", "-s");
GradleRunner runner = getGradleRunner("testingConventions").withArguments(
"clean",
":all_classes_in_tasks:testingConventions",
"-i",
"-s"
);
BuildResult result = runner.buildAndFail();
assertOutputContains(result.getOutput(),
assertOutputContains(
result.getOutput(),
"Test classes are not included in any enabled task (:all_classes_in_tasks:test):",
" * org.elasticsearch.gradle.testkit.NamingConventionIT"
);
}
public void testTaskNotImplementBaseClass() {
GradleRunner runner = getGradleRunner("testingConventions")
.withArguments("clean", ":not_implementing_base:testingConventions", "-i", "-s");
GradleRunner runner = getGradleRunner("testingConventions").withArguments(
"clean",
":not_implementing_base:testingConventions",
"-i",
"-s"
);
BuildResult result = runner.buildAndFail();
assertOutputContains(result.getOutput(),
assertOutputContains(
result.getOutput(),
"Tests classes with suffix `IT` should extend org.elasticsearch.gradle.testkit.Integration but the following classes do not:",
" * org.elasticsearch.gradle.testkit.NamingConventionIT",
" * org.elasticsearch.gradle.testkit.NamingConventionMissmatchIT",
"Tests classes with suffix `Tests` should extend org.elasticsearch.gradle.testkit.Unit but the following classes do not:",
" * org.elasticsearch.gradle.testkit.NamingConventionMissmatchTests",
" * org.elasticsearch.gradle.testkit.NamingConventionTests"
" * org.elasticsearch.gradle.testkit.NamingConventionIT",
" * org.elasticsearch.gradle.testkit.NamingConventionMissmatchIT",
"Tests classes with suffix `Tests` should extend org.elasticsearch.gradle.testkit.Unit but the following classes do not:",
" * org.elasticsearch.gradle.testkit.NamingConventionMissmatchTests",
" * org.elasticsearch.gradle.testkit.NamingConventionTests"
);
}
public void testValidSetupWithoutBaseClass() {
GradleRunner runner = getGradleRunner("testingConventions")
.withArguments("clean", ":valid_setup_no_base:testingConventions", "-i", "-s");
GradleRunner runner = getGradleRunner("testingConventions").withArguments(
"clean",
":valid_setup_no_base:testingConventions",
"-i",
"-s"
);
BuildResult result = runner.build();
assertTaskSuccessful(result, ":valid_setup_no_base:testingConventions");
}
public void testValidSetupWithBaseClass() {
GradleRunner runner = getGradleRunner("testingConventions")
.withArguments("clean", ":valid_setup_with_base:testingConventions", "-i", "-s");
GradleRunner runner = getGradleRunner("testingConventions").withArguments(
"clean",
":valid_setup_with_base:testingConventions",
"-i",
"-s"
);
BuildResult result = runner.build();
assertTaskSuccessful(result, ":valid_setup_with_base:testingConventions");
}
public void testTestsInMain() {
GradleRunner runner = getGradleRunner("testingConventions")
.withArguments("clean", ":tests_in_main:testingConventions", "-i", "-s");
GradleRunner runner = getGradleRunner("testingConventions").withArguments("clean", ":tests_in_main:testingConventions", "-i", "-s");
BuildResult result = runner.buildAndFail();
assertOutputContains(result.getOutput(),
"Classes matching the test naming convention should be in test not main:",
" * NamingConventionIT",
" * NamingConventionTests"
assertOutputContains(
result.getOutput(),
"Classes matching the test naming convention should be in test not main:",
" * NamingConventionIT",
" * NamingConventionTests"
);
}

View File

@ -27,57 +27,64 @@ public class ThirdPartyAuditTaskIT extends GradleIntegrationTestCase {
@Before
public void setUp() throws Exception {
// Build the sample jars
getGradleRunner("thirdPartyAudit")
.withArguments("build", "-s")
.build();
getGradleRunner("thirdPartyAudit").withArguments("build", "-s").build();
}
public void testElasticsearchIgnored() {
BuildResult result = getGradleRunner("thirdPartyAudit")
.withArguments("clean", "empty", "-s",
"-PcompileOnlyGroup=elasticsearch.gradle:broken-log4j", "-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=elasticsearch.gradle:dummy-io", "-PcompileVersion=0.0.1"
)
.build();
BuildResult result = getGradleRunner("thirdPartyAudit").withArguments(
"clean",
"empty",
"-s",
"-PcompileOnlyGroup=elasticsearch.gradle:broken-log4j",
"-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=elasticsearch.gradle:dummy-io",
"-PcompileVersion=0.0.1"
).build();
assertTaskNoSource(result, ":empty");
}
public void testWithEmptyRules() {
BuildResult result = getGradleRunner("thirdPartyAudit")
.withArguments("clean", "empty", "-s",
"-PcompileOnlyGroup=other.gradle:broken-log4j", "-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=other.gradle:dummy-io", "-PcompileVersion=0.0.1"
)
.buildAndFail();
BuildResult result = getGradleRunner("thirdPartyAudit").withArguments(
"clean",
"empty",
"-s",
"-PcompileOnlyGroup=other.gradle:broken-log4j",
"-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=other.gradle:dummy-io",
"-PcompileVersion=0.0.1"
).buildAndFail();
}
public void testViolationFoundAndCompileOnlyIgnored() {
BuildResult result = getGradleRunner("thirdPartyAudit")
.withArguments("clean", "absurd", "-s",
"-PcompileOnlyGroup=other.gradle:broken-log4j", "-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=other.gradle:dummy-io", "-PcompileVersion=0.0.1"
)
.buildAndFail();
BuildResult result = getGradleRunner("thirdPartyAudit").withArguments(
"clean",
"absurd",
"-s",
"-PcompileOnlyGroup=other.gradle:broken-log4j",
"-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=other.gradle:dummy-io",
"-PcompileVersion=0.0.1"
).buildAndFail();
assertTaskFailed(result, ":absurd");
assertOutputContains(result.getOutput(),
"Classes with violations:",
" * TestingIO",
"> Audit of third party dependencies failed"
);
assertOutputDoesNotContain(result.getOutput(),"Missing classes:");
assertOutputContains(result.getOutput(), "Classes with violations:", " * TestingIO", "> Audit of third party dependencies failed");
assertOutputDoesNotContain(result.getOutput(), "Missing classes:");
}
public void testClassNotFoundAndCompileOnlyIgnored() {
BuildResult result = getGradleRunner("thirdPartyAudit")
.withArguments("clean", "absurd", "-s",
"-PcompileGroup=other.gradle:broken-log4j", "-PcompileVersion=0.0.1",
"-PcompileOnlyGroup=other.gradle:dummy-io", "-PcompileOnlyVersion=0.0.1"
)
.buildAndFail();
BuildResult result = getGradleRunner("thirdPartyAudit").withArguments(
"clean",
"absurd",
"-s",
"-PcompileGroup=other.gradle:broken-log4j",
"-PcompileVersion=0.0.1",
"-PcompileOnlyGroup=other.gradle:dummy-io",
"-PcompileOnlyVersion=0.0.1"
).buildAndFail();
assertTaskFailed(result, ":absurd");
assertOutputContains(result.getOutput(),
assertOutputContains(
result.getOutput(),
"Missing classes:",
" * org.apache.logging.log4j.LogManager",
"> Audit of third party dependencies failed"
@ -86,15 +93,19 @@ public class ThirdPartyAuditTaskIT extends GradleIntegrationTestCase {
}
public void testJarHellWithJDK() {
BuildResult result = getGradleRunner("thirdPartyAudit")
.withArguments("clean", "absurd", "-s",
"-PcompileGroup=other.gradle:jarhellJdk", "-PcompileVersion=0.0.1",
"-PcompileOnlyGroup=other.gradle:dummy-io", "-PcompileOnlyVersion=0.0.1"
)
.buildAndFail();
BuildResult result = getGradleRunner("thirdPartyAudit").withArguments(
"clean",
"absurd",
"-s",
"-PcompileGroup=other.gradle:jarhellJdk",
"-PcompileVersion=0.0.1",
"-PcompileOnlyGroup=other.gradle:dummy-io",
"-PcompileOnlyVersion=0.0.1"
).buildAndFail();
assertTaskFailed(result, ":absurd");
assertOutputContains(result.getOutput(),
assertOutputContains(
result.getOutput(),
"> Audit of third party dependencies failed:",
" Jar Hell with the JDK:",
" * java.lang.String"
@ -103,12 +114,15 @@ public class ThirdPartyAuditTaskIT extends GradleIntegrationTestCase {
}
public void testElasticsearchIgnoredWithViolations() {
BuildResult result = getGradleRunner("thirdPartyAudit")
.withArguments("clean", "absurd", "-s",
"-PcompileOnlyGroup=elasticsearch.gradle:broken-log4j", "-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=elasticsearch.gradle:dummy-io", "-PcompileVersion=0.0.1"
)
.build();
BuildResult result = getGradleRunner("thirdPartyAudit").withArguments(
"clean",
"absurd",
"-s",
"-PcompileOnlyGroup=elasticsearch.gradle:broken-log4j",
"-PcompileOnlyVersion=0.0.1",
"-PcompileGroup=elasticsearch.gradle:dummy-io",
"-PcompileVersion=0.0.1"
).build();
assertTaskNoSource(result, ":absurd");
}

View File

@ -45,8 +45,7 @@ public class UpdateShasTaskTests extends GradleUnitTestCase {
}
@Test
public void whenDependencyDoesntExistThenShouldDeleteDependencySha()
throws IOException, NoSuchAlgorithmException {
public void whenDependencyDoesntExistThenShouldDeleteDependencySha() throws IOException, NoSuchAlgorithmException {
File unusedSha = createFileIn(getLicensesDir(project), "test.sha1", "");
task.updateShas();
@ -55,23 +54,19 @@ public class UpdateShasTaskTests extends GradleUnitTestCase {
}
@Test
public void whenDependencyExistsButShaNotThenShouldCreateNewShaFile()
throws IOException, NoSuchAlgorithmException {
public void whenDependencyExistsButShaNotThenShouldCreateNewShaFile() throws IOException, NoSuchAlgorithmException {
project.getDependencies().add("compile", dependency);
getLicensesDir(project).mkdir();
task.updateShas();
Path groovySha = Files
.list(getLicensesDir(project).toPath())
.findFirst().get();
Path groovySha = Files.list(getLicensesDir(project).toPath()).findFirst().get();
assertTrue(groovySha.toFile().getName().startsWith("groovy-all"));
}
@Test
public void whenDependencyAndWrongShaExistsThenShouldNotOverwriteShaFile()
throws IOException, NoSuchAlgorithmException {
public void whenDependencyAndWrongShaExistsThenShouldNotOverwriteShaFile() throws IOException, NoSuchAlgorithmException {
project.getDependencies().add("compile", dependency);
File groovyJar = task.getParentTask().getDependencies().getFiles().iterator().next();
@ -84,8 +79,7 @@ public class UpdateShasTaskTests extends GradleUnitTestCase {
}
@Test
public void whenLicensesDirDoesntExistThenShouldThrowException()
throws IOException, NoSuchAlgorithmException {
public void whenLicensesDirDoesntExistThenShouldThrowException() throws IOException, NoSuchAlgorithmException {
expectedException.expect(GradleException.class);
expectedException.expectMessage(containsString("isn't a valid directory"));
@ -119,16 +113,14 @@ public class UpdateShasTaskTests extends GradleUnitTestCase {
}
private UpdateShasTask createUpdateShasTask(Project project) {
UpdateShasTask task = project.getTasks()
.register("updateShas", UpdateShasTask.class)
.get();
UpdateShasTask task = project.getTasks().register("updateShas", UpdateShasTask.class).get();
task.setParentTask(createDependencyLicensesTask(project));
return task;
}
private TaskProvider<DependencyLicensesTask> createDependencyLicensesTask(Project project) {
TaskProvider<DependencyLicensesTask> task = project.getTasks()
TaskProvider<DependencyLicensesTask> task = project.getTasks()
.register("dependencyLicenses", DependencyLicensesTask.class, new Action<DependencyLicensesTask>() {
@Override
public void execute(DependencyLicensesTask dependencyLicensesTask) {

View File

@ -74,8 +74,8 @@ public class SymbolicLinkPreservingTarIT extends GradleIntegrationTestCase {
InputStream apply(FileInputStream fis) throws IOException;
}
private void assertTar(
final String extension, final FileInputStreamWrapper wrapper, boolean preserveFileTimestamps) throws IOException {
private void assertTar(final String extension, final FileInputStreamWrapper wrapper, boolean preserveFileTimestamps)
throws IOException {
try (TarArchiveInputStream tar = new TarArchiveInputStream(wrapper.apply(new FileInputStream(getOutputFile(extension))))) {
TarArchiveEntry entry = tar.getNextTarEntry();
boolean realFolderEntry = false;
@ -93,27 +93,18 @@ public class SymbolicLinkPreservingTarIT extends GradleIntegrationTestCase {
fileEntry = true;
} else if (entry.getName().equals("real-folder/link-to-file")) {
assertTrue(entry.isSymbolicLink());
assertThat(
entry.getLinkName(),
anyOf(equalTo("./file"), equalTo(".\\file"))
);
assertThat(entry.getLinkName(), anyOf(equalTo("./file"), equalTo(".\\file")));
linkToFileEntry = true;
} else if (entry.getName().equals("link-in-folder/")) {
assertTrue(entry.isDirectory());
linkInFolderEntry = true;
} else if (entry.getName().equals("link-in-folder/link-to-file")) {
assertTrue(entry.isSymbolicLink());
assertThat(
entry.getLinkName(),
anyOf(equalTo("../real-folder/file"), equalTo("..\\real-folder\\file"))
);
assertThat(entry.getLinkName(), anyOf(equalTo("../real-folder/file"), equalTo("..\\real-folder\\file")));
linkInFolderLinkToFileEntry = true;
} else if (entry.getName().equals("link-to-real-folder")) {
assertTrue(entry.isSymbolicLink());
assertThat(
entry.getLinkName(),
anyOf(equalTo("./real-folder"), equalTo(".\\real-folder"))
);
assertThat(entry.getLinkName(), anyOf(equalTo("./real-folder"), equalTo(".\\real-folder")));
linkToRealFolderEntry = true;
} else {
throw new GradleException("unexpected entry [" + entry.getName() + "]");
@ -135,12 +126,14 @@ public class SymbolicLinkPreservingTarIT extends GradleIntegrationTestCase {
}
private void runBuild(final String task, final boolean preserveFileTimestamps) {
final GradleRunner runner = GradleRunner.create().withProjectDir(getProjectDir())
final GradleRunner runner = GradleRunner.create()
.withProjectDir(getProjectDir())
.withArguments(
task,
"-Dtests.symbolic_link_preserving_tar_source=" + temporaryFolder.getRoot().toString(),
"-Dtests.symbolic_link_preserving_tar_preserve_file_timestamps=" + preserveFileTimestamps,
"-i")
"-i"
)
.withPluginClasspath();
runner.build();

View File

@ -27,10 +27,7 @@ import org.junit.Assert;
import org.junit.runner.RunWith;
@RunWith(RandomizedRunner.class)
@TestMethodProviders({
JUnit4MethodProvider.class,
JUnit3MethodProvider.class
})
@TestMethodProviders({ JUnit4MethodProvider.class, JUnit3MethodProvider.class })
@ThreadLeakLingering(linger = 5000) // wait for "Connection worker" to die
public abstract class BaseTestCase extends Assert {
@ -39,6 +36,7 @@ public abstract class BaseTestCase extends Assert {
public interface ThrowingRunnable {
void run() throws Throwable;
}
public static <T extends Throwable> T expectThrows(Class<T> expectedType, ThrowingRunnable runnable) {
try {
runnable.run();
@ -46,11 +44,12 @@ public abstract class BaseTestCase extends Assert {
if (expectedType.isInstance(e)) {
return expectedType.cast(e);
}
AssertionFailedError assertion =
new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName() + " but got " + e);
AssertionFailedError assertion = new AssertionFailedError(
"Unexpected exception type, expected " + expectedType.getSimpleName() + " but got " + e
);
assertion.initCause(e);
throw assertion;
}
throw new AssertionFailedError("Expected exception "+ expectedType.getSimpleName() + " but no exception was thrown");
throw new AssertionFailedError("Expected exception " + expectedType.getSimpleName() + " but no exception was thrown");
}
}

View File

@ -11,23 +11,22 @@ import static org.hamcrest.CoreMatchers.equalTo;
public class DistroTestPluginTests extends GradleIntegrationTestCase {
public void testParseOsReleaseOnOracle() {
final List<String> lines = List
.of(
"NAME=\"Oracle Linux Server\"",
"VERSION=\"6.10\"",
"ID=\"ol\"",
"VERSION_ID=\"6.10\"",
"PRETTY_NAME=\"Oracle Linux Server 6.10\"",
"ANSI_COLOR=\"0;31\"",
"CPE_NAME=\"cpe:/o:oracle:linux:6:10:server\"",
"HOME_URL" + "=\"https://linux.oracle.com/\"",
"BUG_REPORT_URL=\"https://bugzilla.oracle.com/\"",
"",
"ORACLE_BUGZILLA_PRODUCT" + "=\"Oracle Linux 6\"",
"ORACLE_BUGZILLA_PRODUCT_VERSION=6.10",
"ORACLE_SUPPORT_PRODUCT=\"Oracle Linux\"",
"ORACLE_SUPPORT_PRODUCT_VERSION=6.10"
);
final List<String> lines = List.of(
"NAME=\"Oracle Linux Server\"",
"VERSION=\"6.10\"",
"ID=\"ol\"",
"VERSION_ID=\"6.10\"",
"PRETTY_NAME=\"Oracle Linux Server 6.10\"",
"ANSI_COLOR=\"0;31\"",
"CPE_NAME=\"cpe:/o:oracle:linux:6:10:server\"",
"HOME_URL" + "=\"https://linux.oracle.com/\"",
"BUG_REPORT_URL=\"https://bugzilla.oracle.com/\"",
"",
"ORACLE_BUGZILLA_PRODUCT" + "=\"Oracle Linux 6\"",
"ORACLE_BUGZILLA_PRODUCT_VERSION=6.10",
"ORACLE_SUPPORT_PRODUCT=\"Oracle Linux\"",
"ORACLE_SUPPORT_PRODUCT_VERSION=6.10"
);
final Map<String, String> results = parseOsRelease(lines);

View File

@ -26,8 +26,10 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
protected File getProjectDir(String name) {
File root = new File("src/testKit/");
if (root.exists() == false) {
throw new RuntimeException("Could not find resources dir for integration tests. " +
"Note that these tests can only be ran by Gradle and are not currently supported by the IDE");
throw new RuntimeException(
"Could not find resources dir for integration tests. "
+ "Note that these tests can only be ran by Gradle and are not currently supported by the IDE"
);
}
return new File(root, name).getAbsoluteFile();
}
@ -39,10 +41,7 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return GradleRunner.create()
.withProjectDir(getProjectDir(sampleProject))
.withPluginClasspath()
.withTestKitDir(testkit);
return GradleRunner.create().withProjectDir(getProjectDir(sampleProject)).withPluginClasspath().withTestKitDir(testkit);
}
protected File getBuildDir(String name) {
@ -55,9 +54,12 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
}
List<Integer> index = Stream.of(lines).map(line -> output.indexOf(line)).collect(Collectors.toList());
if (index.equals(index.stream().sorted().collect(Collectors.toList())) == false) {
fail("Expected the following lines to appear in this order:\n" +
Stream.of(lines).map(line -> " - `" + line + "`").collect(Collectors.joining("\n")) +
"\nTBut the order was different. Output is:\n\n```" + output + "\n```\n"
fail(
"Expected the following lines to appear in this order:\n"
+ Stream.of(lines).map(line -> " - `" + line + "`").collect(Collectors.joining("\n"))
+ "\nTBut the order was different. Output is:\n\n```"
+ output
+ "\n```\n"
);
}
}
@ -69,17 +71,11 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
}
protected void assertOutputContains(String output, String line) {
assertTrue(
"Expected the following line in output:\n\n" + line + "\n\nOutput is:\n" + output,
output.contains(line)
);
assertTrue("Expected the following line in output:\n\n" + line + "\n\nOutput is:\n" + output, output.contains(line));
}
protected void assertOutputDoesNotContain(String output, String line) {
assertFalse(
"Expected the following line not to be in output:\n\n" + line + "\n\nOutput is:\n" + output,
output.contains(line)
);
assertFalse("Expected the following line not to be in output:\n\n" + line + "\n\nOutput is:\n" + output, output.contains(line));
}
protected void assertOutputDoesNotContain(String output, String... lines) {
@ -113,12 +109,19 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
private void assertTaskOutcome(BuildResult result, String taskName, TaskOutcome taskOutcome) {
BuildTask task = result.task(taskName);
if (task == null) {
fail("Expected task `" + taskName + "` to be " + taskOutcome +", but it did not run" +
"\n\nOutput is:\n" + result.getOutput());
fail(
"Expected task `" + taskName + "` to be " + taskOutcome + ", but it did not run" + "\n\nOutput is:\n" + result.getOutput()
);
}
assertEquals(
"Expected task `" + taskName +"` to be " + taskOutcome + " but it was: " + task.getOutcome() +
"\n\nOutput is:\n" + result.getOutput() ,
"Expected task `"
+ taskName
+ "` to be "
+ taskOutcome
+ " but it was: "
+ task.getOutcome()
+ "\n\nOutput is:\n"
+ result.getOutput(),
taskOutcome,
task.getOutcome()
);
@ -131,8 +134,7 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
fail("Expected task `" + taskName + "` to be up-to-date, but it did not run");
}
assertEquals(
"Expected task to be up to date but it was: " + task.getOutcome() +
"\n\nOutput is:\n" + result.getOutput(),
"Expected task to be up to date but it was: " + task.getOutcome() + "\n\nOutput is:\n" + result.getOutput(),
TaskOutcome.UP_TO_DATE,
task.getOutcome()
);
@ -142,8 +144,7 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
protected void assertBuildFileExists(BuildResult result, String projectName, String path) {
Path absPath = getBuildDir(projectName).toPath().resolve(path);
assertTrue(
result.getOutput() + "\n\nExpected `" + absPath + "` to exists but it did not" +
"\n\nOutput is:\n" + result.getOutput(),
result.getOutput() + "\n\nExpected `" + absPath + "` to exists but it did not" + "\n\nOutput is:\n" + result.getOutput(),
Files.exists(absPath)
);
}
@ -151,8 +152,7 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
protected void assertBuildFileDoesNotExists(BuildResult result, String projectName, String path) {
Path absPath = getBuildDir(projectName).toPath().resolve(path);
assertFalse(
result.getOutput() + "\n\nExpected `" + absPath + "` bo to exists but it did" +
"\n\nOutput is:\n" + result.getOutput(),
result.getOutput() + "\n\nExpected `" + absPath + "` bo to exists but it did" + "\n\nOutput is:\n" + result.getOutput(),
Files.exists(absPath)
);
}
@ -177,12 +177,11 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
public void assertOutputOnlyOnce(String output, String... text) {
for (String each : text) {
int i = output.indexOf(each);
if (i == -1 ) {
fail("Expected \n```" + each + "```\nto appear at most once, but it didn't at all.\n\nOutout is:\n"+ output
);
if (i == -1) {
fail("Expected \n```" + each + "```\nto appear at most once, but it didn't at all.\n\nOutout is:\n" + output);
}
if(output.indexOf(each) != output.lastIndexOf(each)) {
fail("Expected `" + each + "` to appear at most once, but it did multiple times.\n\nOutout is:\n"+ output);
if (output.indexOf(each) != output.lastIndexOf(each)) {
fail("Expected `" + each + "` to appear at most once, but it did multiple times.\n\nOutout is:\n" + output);
}
}
}

View File

@ -7,12 +7,6 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import org.junit.runner.RunWith;
@RunWith(RandomizedRunner.class)
@TestMethodProviders({
JUnit4MethodProvider.class,
JUnit3MethodProvider.class
})
@ThreadLeakFilters(defaultFilters = true, filters = {
GradleThreadsFilter.class
})
public abstract class GradleUnitTestCase extends BaseTestCase {
}
@TestMethodProviders({ JUnit4MethodProvider.class, JUnit3MethodProvider.class })
@ThreadLeakFilters(defaultFilters = true, filters = { GradleThreadsFilter.class })
public abstract class GradleUnitTestCase extends BaseTestCase {}

View File

@ -36,17 +36,18 @@ import java.util.Map;
public final class JUnit3MethodProvider implements TestMethodProvider {
@Override
public Collection<Method> getTestMethods(Class<?> suiteClass, ClassModel classModel) {
Map<Method,MethodModel> methods = classModel.getMethods();
Map<Method, MethodModel> methods = classModel.getMethods();
ArrayList<Method> result = new ArrayList<>();
for (MethodModel mm : methods.values()) {
// Skip any methods that have overrieds/ shadows.
if (mm.getDown() != null) continue;
if (mm.getDown() != null)
continue;
Method m = mm.element;
if (m.getName().startsWith("test") &&
Modifier.isPublic(m.getModifiers()) &&
!Modifier.isStatic(m.getModifiers()) &&
m.getParameterTypes().length == 0) {
if (m.getName().startsWith("test")
&& Modifier.isPublic(m.getModifiers())
&& !Modifier.isStatic(m.getModifiers())
&& m.getParameterTypes().length == 0) {
result.add(m);
}
}

View File

@ -20,4 +20,4 @@ package org.elasticsearch.gradle.testkit;
public class NamingConventionIT {
}
}

View File

@ -18,13 +18,10 @@
*/
package org.elasticsearch.gradle.testkit;
import org.junit.Assert;
import org.junit.Test;
public class LooksLikeATestWithoutNamingConvention3 {
public void testMethod() {
}
}
}

View File

@ -18,9 +18,6 @@
*/
package org.elasticsearch.gradle.testkit;
import org.junit.Assert;
import org.junit.Test;
public abstract class LooksLikeTestsButAbstract {
public void testMethod() {

View File

@ -20,4 +20,4 @@ package org.elasticsearch.gradle.testkit;
public class NamingConventionIT {
}
}

View File

@ -20,4 +20,4 @@ package org.elasticsearch.gradle.testkit;
public class NamingConventionTests {
}
}

View File

@ -19,5 +19,5 @@
package org.elasticsearch.gradle.testkit;
public class Integration {
}

View File

@ -20,4 +20,4 @@ package org.elasticsearch.gradle.testkit;
public class NamingConventionIT {
}
}

View File

@ -20,4 +20,4 @@ package org.elasticsearch.gradle.testkit;
public class NamingConventionIT {
}
}

View File

@ -20,4 +20,4 @@ package org.elasticsearch.gradle.testkit;
public class NamingConventionIT {
}
}

View File

@ -5,4 +5,3 @@ public class TestingIO {
new File("foo");
}
}

View File

@ -34,11 +34,10 @@ public abstract class CommonEnrichRestTestCase extends ESRestTestCase {
public void deletePolicies() throws Exception {
Map<String, Object> responseMap = toMap(adminClient().performRequest(new Request("GET", "/_enrich/policy")));
@SuppressWarnings("unchecked")
List<Map<?,?>> policies = (List<Map<?,?>>) responseMap.get("policies");
List<Map<?, ?>> policies = (List<Map<?, ?>>) responseMap.get("policies");
for (Map<?, ?> entry: policies) {
client().performRequest(new Request("DELETE", "/_enrich/policy/" +
XContentMapValues.extractValue("config.match.name", entry)));
for (Map<?, ?> entry : policies) {
client().performRequest(new Request("DELETE", "/_enrich/policy/" + XContentMapValues.extractValue("config.match.name", entry)));
List<?> sourceIndices = (List<?>) XContentMapValues.extractValue("config.match.indices", entry);
for (Object sourceIndex : sourceIndices) {
@ -72,9 +71,9 @@ public abstract class CommonEnrichRestTestCase extends ESRestTestCase {
// Create pipeline
Request putPipelineRequest = new Request("PUT", "/_ingest/pipeline/my_pipeline");
putPipelineRequest.setJsonEntity("{\"processors\":[" +
"{\"enrich\":{\"policy_name\":\"my_policy\",\"field\":\"host\",\"target_field\":\"entry\"}}" +
"]}");
putPipelineRequest.setJsonEntity(
"{\"processors\":[" + "{\"enrich\":{\"policy_name\":\"my_policy\",\"field\":\"host\",\"target_field\":\"entry\"}}" + "]}"
);
assertOK(client().performRequest(putPipelineRequest));
// Index document using pipeline with enrich processor:
@ -120,8 +119,10 @@ public abstract class CommonEnrichRestTestCase extends ESRestTestCase {
putPolicyRequest.setJsonEntity(generatePolicySource("my-source-index"));
assertOK(client().performRequest(putPolicyRequest));
ResponseException exc = expectThrows(ResponseException.class,
() -> client().performRequest(new Request("DELETE", "/_enrich/policy/MY_POLICY")));
ResponseException exc = expectThrows(
ResponseException.class,
() -> client().performRequest(new Request("DELETE", "/_enrich/policy/MY_POLICY"))
);
assertTrue(exc.getMessage().contains("policy [MY_POLICY] not found"));
}
@ -130,15 +131,19 @@ public abstract class CommonEnrichRestTestCase extends ESRestTestCase {
setupGenericLifecycleTest(false);
Request putPipelineRequest = new Request("PUT", "/_ingest/pipeline/another_pipeline");
putPipelineRequest.setJsonEntity("{\"processors\":[" +
"{\"enrich\":{\"policy_name\":\"my_policy\",\"field\":\"host\",\"target_field\":\"entry\"}}" +
"]}");
putPipelineRequest.setJsonEntity(
"{\"processors\":[" + "{\"enrich\":{\"policy_name\":\"my_policy\",\"field\":\"host\",\"target_field\":\"entry\"}}" + "]}"
);
assertOK(client().performRequest(putPipelineRequest));
ResponseException exc = expectThrows(ResponseException.class,
() -> client().performRequest(new Request("DELETE", "/_enrich/policy/my_policy")));
assertTrue(exc.getMessage().contains("Could not delete policy [my_policy] because" +
" a pipeline is referencing it [my_pipeline, another_pipeline]"));
ResponseException exc = expectThrows(
ResponseException.class,
() -> client().performRequest(new Request("DELETE", "/_enrich/policy/my_policy"))
);
assertTrue(
exc.getMessage()
.contains("Could not delete policy [my_policy] because" + " a pipeline is referencing it [my_pipeline, another_pipeline]")
);
// delete the pipelines so the policies can be deleted
client().performRequest(new Request("DELETE", "/_ingest/pipeline/my_pipeline"));
@ -157,7 +162,7 @@ public abstract class CommonEnrichRestTestCase extends ESRestTestCase {
source.field("query", QueryBuilders.matchAllQuery());
}
source.field("match_field", "host");
source.field("enrich_fields", new String[] {"globalRank", "tldRank", "tld"});
source.field("enrich_fields", new String[] { "globalRank", "tldRank", "tld" });
}
source.endObject().endObject();
return Strings.toString(source);
@ -169,12 +174,12 @@ public abstract class CommonEnrichRestTestCase extends ESRestTestCase {
}
public static String createSourceIndexMapping() {
return "\"properties\":" +
"{\"host\": {\"type\":\"keyword\"}," +
"\"globalRank\":{\"type\":\"keyword\"}," +
"\"tldRank\":{\"type\":\"keyword\"}," +
"\"tld\":{\"type\":\"keyword\"}" +
"}";
return "\"properties\":"
+ "{\"host\": {\"type\":\"keyword\"},"
+ "\"globalRank\":{\"type\":\"keyword\"},"
+ "\"tldRank\":{\"type\":\"keyword\"},"
+ "\"tld\":{\"type\":\"keyword\"}"
+ "}";
}
private static Map<String, Object> toMap(Response response) throws IOException {
@ -204,11 +209,15 @@ public abstract class CommonEnrichRestTestCase extends ESRestTestCase {
for (int i = 0; i < hits.size(); i++) {
Map<?, ?> hit = (Map<?, ?>) hits.get(i);
int foundRemoteRequestsTotal =
(int) XContentMapValues.extractValue("_source.enrich_coordinator_stats.remote_requests_total", hit);
int foundRemoteRequestsTotal = (int) XContentMapValues.extractValue(
"_source.enrich_coordinator_stats.remote_requests_total",
hit
);
maxRemoteRequestsTotal = Math.max(maxRemoteRequestsTotal, foundRemoteRequestsTotal);
int foundExecutedSearchesTotal =
(int) XContentMapValues.extractValue("_source.enrich_coordinator_stats.executed_searches_total", hit);
int foundExecutedSearchesTotal = (int) XContentMapValues.extractValue(
"_source.enrich_coordinator_stats.executed_searches_total",
hit
);
maxExecutedSearchesTotal = Math.max(maxExecutedSearchesTotal, foundExecutedSearchesTotal);
}

View File

@ -20,17 +20,13 @@ public class EnrichSecurityFailureIT extends ESRestTestCase {
@Override
protected Settings restClientSettings() {
String token = basicAuthHeaderValue("test_enrich_no_privs", new SecureString("x-pack-test-password".toCharArray()));
return Settings.builder()
.put(ThreadContext.PREFIX + ".Authorization", token)
.build();
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
@Override
protected Settings restAdminSettings() {
String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray()));
return Settings.builder()
.put(ThreadContext.PREFIX + ".Authorization", token)
.build();
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
public void testFailure() throws Exception {

View File

@ -20,17 +20,13 @@ public class EnrichSecurityIT extends CommonEnrichRestTestCase {
@Override
protected Settings restClientSettings() {
String token = basicAuthHeaderValue("test_enrich", new SecureString("x-pack-test-password".toCharArray()));
return Settings.builder()
.put(ThreadContext.PREFIX + ".Authorization", token)
.build();
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
@Override
protected Settings restAdminSettings() {
String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray()));
return Settings.builder()
.put(ThreadContext.PREFIX + ".Authorization", token)
.build();
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
public void testInsufficientPermissionsOnNonExistentIndex() throws Exception {
@ -42,7 +38,9 @@ public class EnrichSecurityIT extends CommonEnrichRestTestCase {
Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy");
putPolicyRequest.setJsonEntity(generatePolicySource("some-other-index"));
ResponseException exc = expectThrows(ResponseException.class, () -> client().performRequest(putPolicyRequest));
assertThat(exc.getMessage(),
containsString("unable to store policy because no indices match with the specified index patterns [some-other-index]"));
assertThat(
exc.getMessage(),
containsString("unable to store policy because no indices match with the specified index patterns [some-other-index]")
);
}
}

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.enrich;
import org.elasticsearch.test.enrich.CommonEnrichRestTestCase;
public class EnrichIT extends CommonEnrichRestTestCase {
}
public class EnrichIT extends CommonEnrichRestTestCase {}