This commit teaches the build how to bundle AdoptOpenJDK with our artifacts, and switches to AdoptOpenJDK as the bundled JDK. We keep the functionality to also bundle Oracle OpenJDK distributions.
This commit is contained in:
parent
00c604a5a6
commit
cd71d4a83b
|
@ -105,6 +105,7 @@ dependencies {
|
|||
compile localGroovy()
|
||||
|
||||
compile 'commons-codec:commons-codec:1.12'
|
||||
compile 'org.apache.commons:commons-compress:1.19'
|
||||
|
||||
compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3'
|
||||
compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4'
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
include 'reaper'
|
||||
include 'symbolic-link-preserving-tar'
|
||||
|
|
|
@ -66,6 +66,7 @@ import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath
|
|||
public class DistroTestPlugin implements Plugin<Project> {
|
||||
|
||||
private static final String GRADLE_JDK_VERSION = "12.0.1+12@69cfe15208a647278a19ef0990eea691";
|
||||
private static final String GRADLE_JDK_VENDOR = "openjdk";
|
||||
|
||||
// all distributions used by distro tests. this is temporary until tests are per distribution
|
||||
private static final String DISTRIBUTIONS_CONFIGURATION = "distributions";
|
||||
|
@ -138,8 +139,10 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
});
|
||||
}
|
||||
|
||||
private static Jdk createJdk(NamedDomainObjectContainer<Jdk> jdksContainer, String name, String version, String platform) {
|
||||
private static Jdk createJdk(
|
||||
NamedDomainObjectContainer<Jdk> jdksContainer, String name, String vendor, String version, String platform) {
|
||||
Jdk jdk = jdksContainer.create(name);
|
||||
jdk.setVendor(vendor);
|
||||
jdk.setVersion(version);
|
||||
jdk.setPlatform(platform);
|
||||
return jdk;
|
||||
|
@ -171,10 +174,10 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
String box = project.getName();
|
||||
|
||||
// setup jdks used by the distro tests, and by gradle executing
|
||||
|
||||
|
||||
NamedDomainObjectContainer<Jdk> jdksContainer = JdkDownloadPlugin.getContainer(project);
|
||||
String platform = box.contains("windows") ? "windows" : "linux";
|
||||
Jdk gradleJdk = createJdk(jdksContainer, "gradle", GRADLE_JDK_VERSION, platform);
|
||||
Jdk gradleJdk = createJdk(jdksContainer, "gradle", GRADLE_JDK_VENDOR, GRADLE_JDK_VERSION, platform);
|
||||
|
||||
// setup VM used by these tests
|
||||
VagrantExtension vagrant = project.getExtensions().getByType(VagrantExtension.class);
|
||||
|
@ -311,7 +314,7 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
private List<ElasticsearchDistribution> configureDistributions(Project project, Version upgradeVersion) {
|
||||
NamedDomainObjectContainer<ElasticsearchDistribution> distributions = DistributionDownloadPlugin.getContainer(project);
|
||||
List<ElasticsearchDistribution> currentDistros = new ArrayList<>();
|
||||
|
|
|
@ -34,19 +34,22 @@ import java.util.regex.Pattern;
|
|||
|
||||
public class Jdk implements Buildable, Iterable<File> {
|
||||
|
||||
static final Pattern VERSION_PATTERN = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+)(@([a-f0-9]{32}))?");
|
||||
private static final List<String> ALLOWED_PLATFORMS = Collections.unmodifiableList(Arrays.asList("linux", "windows", "darwin"));
|
||||
private static final List<String> ALLOWED_VENDORS = Collections.unmodifiableList(Arrays.asList("adoptopenjdk", "openjdk"));
|
||||
static final Pattern VERSION_PATTERN =
|
||||
Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+(?:\\.\\d+)?)(@([a-f0-9]{32}))?");
|
||||
private static final List<String> ALLOWED_PLATFORMS = Collections.unmodifiableList(Arrays.asList("darwin", "linux", "windows"));
|
||||
|
||||
private final String name;
|
||||
private final Configuration configuration;
|
||||
|
||||
private final Property<String> vendor;
|
||||
private final Property<String> version;
|
||||
private final Property<String> platform;
|
||||
|
||||
|
||||
Jdk(String name, Project project) {
|
||||
this.name = name;
|
||||
this.configuration = project.getConfigurations().create("jdk_" + name);
|
||||
this.vendor = project.getObjects().property(String.class);
|
||||
this.version = project.getObjects().property(String.class);
|
||||
this.platform = project.getObjects().property(String.class);
|
||||
}
|
||||
|
@ -55,6 +58,17 @@ public class Jdk implements Buildable, Iterable<File> {
|
|||
return name;
|
||||
}
|
||||
|
||||
public String getVendor() {
|
||||
return vendor.get();
|
||||
}
|
||||
|
||||
public void setVendor(final String vendor) {
|
||||
if (ALLOWED_VENDORS.contains(vendor) == false) {
|
||||
throw new IllegalArgumentException("unknown vendor [" + vendor + "] for jdk [" + name + "], must be one of " + ALLOWED_VENDORS);
|
||||
}
|
||||
this.vendor.set(vendor);
|
||||
}
|
||||
|
||||
public String getVersion() {
|
||||
return version.get();
|
||||
}
|
||||
|
@ -105,12 +119,17 @@ public class Jdk implements Buildable, Iterable<File> {
|
|||
if (platform.isPresent() == false) {
|
||||
throw new IllegalArgumentException("platform not specified for jdk [" + name + "]");
|
||||
}
|
||||
if (vendor.isPresent() == false) {
|
||||
throw new IllegalArgumentException("vendor not specified for jdk [" + name + "]");
|
||||
}
|
||||
version.finalizeValue();
|
||||
platform.finalizeValue();
|
||||
vendor.finalizeValue();;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<File> iterator() {
|
||||
return configuration.iterator();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import org.elasticsearch.gradle.tar.SymbolicLinkPreservingUntarTask;
|
||||
import org.gradle.api.Action;
|
||||
import org.gradle.api.NamedDomainObjectContainer;
|
||||
import org.gradle.api.Plugin;
|
||||
|
@ -31,12 +32,15 @@ import org.gradle.api.artifacts.dsl.DependencyHandler;
|
|||
import org.gradle.api.artifacts.dsl.RepositoryHandler;
|
||||
import org.gradle.api.artifacts.repositories.IvyArtifactRepository;
|
||||
import org.gradle.api.file.CopySpec;
|
||||
import org.gradle.api.file.Directory;
|
||||
import org.gradle.api.file.FileTree;
|
||||
import org.gradle.api.file.RelativePath;
|
||||
import org.gradle.api.provider.Provider;
|
||||
import org.gradle.api.tasks.Copy;
|
||||
import org.gradle.api.tasks.TaskProvider;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
|
@ -60,6 +64,7 @@ public class JdkDownloadPlugin implements Plugin<Project> {
|
|||
project.afterEvaluate(p -> {
|
||||
for (Jdk jdk : jdksContainer) {
|
||||
jdk.finalizeValues();
|
||||
String vendor = jdk.getVendor();
|
||||
String version = jdk.getVersion();
|
||||
String platform = jdk.getPlatform();
|
||||
|
||||
|
@ -67,18 +72,21 @@ public class JdkDownloadPlugin implements Plugin<Project> {
|
|||
DependencyHandler dependencies = project.getDependencies();
|
||||
Map<String, Object> depConfig = new HashMap<>();
|
||||
depConfig.put("path", ":"); // root project
|
||||
depConfig.put("configuration", configName("extracted_jdk", version, platform));
|
||||
depConfig.put("configuration", configName("extracted_jdk", vendor, version, platform));
|
||||
dependencies.add(jdk.getConfiguration().getName(), dependencies.project(depConfig));
|
||||
|
||||
// ensure a root level jdk download task exists
|
||||
setupRootJdkDownload(project.getRootProject(), platform, version);
|
||||
setupRootJdkDownload(project.getRootProject(), platform, vendor, version);
|
||||
}
|
||||
});
|
||||
|
||||
// all other repos should ignore the special jdk artifacts
|
||||
project.getRootProject().getRepositories().all(repo -> {
|
||||
if (repo.getName().startsWith(REPO_NAME_PREFIX) == false) {
|
||||
repo.content(content -> content.excludeGroup("jdk"));
|
||||
repo.content(content -> {
|
||||
content.excludeGroup("adoptopenjdk");
|
||||
content.excludeGroup("openjdk");
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -88,8 +96,8 @@ public class JdkDownloadPlugin implements Plugin<Project> {
|
|||
return (NamedDomainObjectContainer<Jdk>) project.getExtensions().getByName(CONTAINER_NAME);
|
||||
}
|
||||
|
||||
private static void setupRootJdkDownload(Project rootProject, String platform, String version) {
|
||||
String extractTaskName = "extract" + capitalize(platform) + "Jdk" + version;
|
||||
private static void setupRootJdkDownload(Project rootProject, String platform, String vendor, String version) {
|
||||
String extractTaskName = "extract" + capitalize(platform) + "Jdk-" + vendor + "-" + version;
|
||||
// NOTE: this is *horrendous*, but seems to be the only way to check for the existence of a registered task
|
||||
try {
|
||||
rootProject.getTasks().named(extractTaskName);
|
||||
|
@ -111,83 +119,162 @@ public class JdkDownloadPlugin implements Plugin<Project> {
|
|||
String hash = jdkVersionMatcher.group(5);
|
||||
|
||||
// add fake ivy repo for jdk url
|
||||
String repoName = REPO_NAME_PREFIX + version;
|
||||
String repoName = REPO_NAME_PREFIX + vendor + "_" + version;
|
||||
RepositoryHandler repositories = rootProject.getRepositories();
|
||||
if (rootProject.getRepositories().findByName(repoName) == null) {
|
||||
if (hash != null) {
|
||||
// current pattern since 12.0.1
|
||||
if (vendor.equals("adoptopenjdk")) {
|
||||
if (hash != null) {
|
||||
throw new IllegalArgumentException("adoptopenjdk versions do not have hashes but was [" + version + "]");
|
||||
}
|
||||
repositories.ivy(ivyRepo -> {
|
||||
ivyRepo.setName(repoName);
|
||||
ivyRepo.setUrl("https://download.oracle.com");
|
||||
ivyRepo.setUrl("https://artifactory.elstc.co/artifactory/oss-jdk-local/");
|
||||
ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact);
|
||||
ivyRepo.patternLayout(layout -> layout.artifact(
|
||||
"java/GA/jdk" + jdkVersion + "/" + hash + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"));
|
||||
ivyRepo.content(content -> content.includeGroup("jdk"));
|
||||
final String pattern = String.format(
|
||||
Locale.ROOT,
|
||||
"adoptopenjdk/OpenJDK%sU-jdk_x64_[module]_hotspot_[revision]_%s.[ext]",
|
||||
jdkMajor,
|
||||
jdkBuild);
|
||||
ivyRepo.patternLayout(layout -> layout.artifact(pattern));
|
||||
ivyRepo.content(content -> content.includeGroup("adoptopenjdk"));
|
||||
});
|
||||
} else {
|
||||
// simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back
|
||||
repositories.ivy(ivyRepo -> {
|
||||
ivyRepo.setName(repoName);
|
||||
ivyRepo.setUrl("https://download.oracle.com");
|
||||
ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact);
|
||||
ivyRepo.patternLayout(layout ->
|
||||
layout.artifact("java/GA/jdk" + jdkMajor + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"));
|
||||
ivyRepo.content(content -> content.includeGroup("jdk"));
|
||||
});
|
||||
assert vendor.equals("openjdk") : vendor;
|
||||
if (hash != null) {
|
||||
// current pattern since 12.0.1
|
||||
repositories.ivy(ivyRepo -> {
|
||||
ivyRepo.setName(repoName);
|
||||
ivyRepo.setUrl("https://download.oracle.com");
|
||||
ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact);
|
||||
ivyRepo.patternLayout(layout -> layout.artifact(
|
||||
"java/GA/jdk" + jdkVersion + "/" + hash + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"));
|
||||
ivyRepo.content(content -> content.includeGroup("openjdk"));
|
||||
});
|
||||
} else {
|
||||
// simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back
|
||||
repositories.ivy(ivyRepo -> {
|
||||
ivyRepo.setName(repoName);
|
||||
ivyRepo.setUrl("https://download.oracle.com");
|
||||
ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact);
|
||||
ivyRepo.patternLayout(layout ->
|
||||
layout.artifact("java/GA/jdk" + jdkMajor + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]"));
|
||||
ivyRepo.content(content -> content.includeGroup("openjdk"));
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add the jdk as a "dependency"
|
||||
final ConfigurationContainer configurations = rootProject.getConfigurations();
|
||||
String remoteConfigName = configName("openjdk", version, platform);
|
||||
String localConfigName = configName("extracted_jdk", version, platform);
|
||||
String remoteConfigName = configName(vendor, version, platform);
|
||||
String localConfigName = configName("extracted_jdk", vendor, version, platform);
|
||||
Configuration jdkConfig = configurations.findByName(remoteConfigName);
|
||||
if (jdkConfig == null) {
|
||||
jdkConfig = configurations.create(remoteConfigName);
|
||||
configurations.create(localConfigName);
|
||||
}
|
||||
String platformDep = platform.equals("darwin") ? (vendor.equals("adoptopenjdk") ? "mac" : "osx") : platform;
|
||||
String extension = platform.equals("windows") ? "zip" : "tar.gz";
|
||||
String jdkDep = "jdk:" + (platform.equals("darwin") ? "osx" : platform) + ":" + jdkVersion + "@" + extension;
|
||||
rootProject.getDependencies().add(configName("openjdk", version, platform), jdkDep);
|
||||
String jdkDep = vendor + ":" + platformDep + ":" + jdkVersion + "@" + extension;
|
||||
rootProject.getDependencies().add(configName(vendor, version, platform), jdkDep);
|
||||
|
||||
// add task for extraction
|
||||
// TODO: look into doing this as an artifact transform, which are cacheable starting in gradle 5.3
|
||||
int rootNdx = platform.equals("darwin") ? 2 : 1;
|
||||
Action<CopySpec> removeRootDir = copy -> {
|
||||
// remove extra unnecessary directory levels
|
||||
copy.eachFile(details -> {
|
||||
String[] pathSegments = details.getRelativePath().getSegments();
|
||||
String[] newPathSegments = Arrays.copyOfRange(pathSegments, rootNdx, pathSegments.length);
|
||||
details.setRelativePath(new RelativePath(true, newPathSegments));
|
||||
});
|
||||
copy.setIncludeEmptyDirs(false);
|
||||
};
|
||||
final Provider<Directory> extractPath =
|
||||
rootProject.getLayout().getBuildDirectory().dir("jdks/" + vendor + "-" + jdkVersion + "_" + platform);
|
||||
|
||||
// delay resolving jdkConfig until runtime
|
||||
Supplier<File> jdkArchiveGetter = jdkConfig::getSingleFile;
|
||||
final Callable<FileTree> fileGetter;
|
||||
final Object extractTask;
|
||||
if (extension.equals("zip")) {
|
||||
fileGetter = () -> rootProject.zipTree(jdkArchiveGetter.get());
|
||||
} else {
|
||||
fileGetter = () -> rootProject.tarTree(rootProject.getResources().gzip(jdkArchiveGetter.get()));
|
||||
}
|
||||
String extractDir = rootProject.getBuildDir().toPath().resolve("jdks/openjdk-" + jdkVersion + "_" + platform).toString();
|
||||
TaskProvider<Copy> extractTask = rootProject.getTasks().register(extractTaskName, Copy.class, copyTask -> {
|
||||
copyTask.doFirst(new Action<Task>() {
|
||||
@Override
|
||||
public void execute(Task t) {
|
||||
rootProject.delete(extractDir);
|
||||
}
|
||||
final Callable<FileTree> fileGetter = () -> rootProject.zipTree(jdkArchiveGetter.get());
|
||||
// TODO: look into doing this as an artifact transform, which are cacheable starting in gradle 5.3
|
||||
Action<CopySpec> removeRootDir = copy -> {
|
||||
// remove extra unnecessary directory levels
|
||||
copy.eachFile(details -> {
|
||||
/*
|
||||
* We want to remove up to the and including the jdk-.* relative paths. That is a JDK archive is structured as:
|
||||
* jdk-12.0.1/
|
||||
* jdk-12.0.1/Contents
|
||||
* ...
|
||||
*
|
||||
* and we want to remove the leading jdk-12.0.1. Note however that there could also be a leading ./ as in
|
||||
* ./
|
||||
* ./jdk-12.0.1/
|
||||
* ./jdk-12.0.1/Contents
|
||||
*
|
||||
* so we account for this and search the path components until we find the jdk-12.0.1, and strip the leading components.
|
||||
*/
|
||||
String[] pathSegments = details.getRelativePath().getSegments();
|
||||
int index = 0;
|
||||
for (; index < pathSegments.length; index++) {
|
||||
if (pathSegments[index].matches("jdk-.*")) break;
|
||||
}
|
||||
assert index + 1 <= pathSegments.length;
|
||||
String[] newPathSegments = Arrays.copyOfRange(pathSegments, index + 1, pathSegments.length);
|
||||
details.setRelativePath(new RelativePath(true, newPathSegments));
|
||||
});
|
||||
copy.setIncludeEmptyDirs(false);
|
||||
};
|
||||
extractTask = rootProject.getTasks().register(extractTaskName, Copy.class, copyTask -> {
|
||||
copyTask.doFirst(new Action<Task>() {
|
||||
@Override
|
||||
public void execute(Task t) {
|
||||
rootProject.delete(extractPath);
|
||||
}
|
||||
});
|
||||
copyTask.into(extractPath);
|
||||
copyTask.from(fileGetter, removeRootDir);
|
||||
});
|
||||
copyTask.into(extractDir);
|
||||
copyTask.from(fileGetter, removeRootDir);
|
||||
});
|
||||
} else {
|
||||
/*
|
||||
* Gradle TarFileTree does not resolve symlinks, so we have to manually extract and preserve the symlinks.
|
||||
* cf. https://github.com/gradle/gradle/issues/3982 and https://discuss.gradle.org/t/tar-and-untar-losing-symbolic-links/2039
|
||||
*/
|
||||
final Configuration jdkConfiguration = jdkConfig;
|
||||
extractTask = rootProject.getTasks().register(extractTaskName, SymbolicLinkPreservingUntarTask.class, task -> {
|
||||
task.getTarFile().set(jdkConfiguration.getSingleFile());
|
||||
task.getExtractPath().set(extractPath);
|
||||
task.setTransform(
|
||||
name -> {
|
||||
/*
|
||||
* We want to remove up to the and including the jdk-.* relative paths. That is a JDK archive is structured as:
|
||||
* jdk-12.0.1/
|
||||
* jdk-12.0.1/Contents
|
||||
* ...
|
||||
*
|
||||
* and we want to remove the leading jdk-12.0.1. Note however that there could also be a leading ./ as in
|
||||
* ./
|
||||
* ./jdk-12.0.1/
|
||||
* ./jdk-12.0.1/Contents
|
||||
*
|
||||
* so we account for this and search the path components until we find the jdk-12.0.1, and strip the leading
|
||||
* components.
|
||||
*/
|
||||
final Path entryName = Paths.get(name);
|
||||
int index = 0;
|
||||
for (; index < entryName.getNameCount(); index++) {
|
||||
if (entryName.getName(index).toString().matches("jdk-.*")) break;
|
||||
}
|
||||
if (index + 1 >= entryName.getNameCount()) {
|
||||
// this happens on the top-level directories in the archive, which we are removing
|
||||
return null;
|
||||
}
|
||||
// finally remove the top-level directories from the output path
|
||||
return entryName.subpath(index + 1, entryName.getNameCount());
|
||||
});
|
||||
});
|
||||
}
|
||||
rootProject.getArtifacts().add(localConfigName,
|
||||
rootProject.getLayout().getProjectDirectory().dir(extractDir),
|
||||
extractPath,
|
||||
artifact -> artifact.builtBy(extractTask));
|
||||
}
|
||||
|
||||
private static String configName(String prefix, String version, String platform) {
|
||||
return prefix + "_" + version + "_" + platform;
|
||||
private static String configName(String vendor, String version, String platform) {
|
||||
return vendor + "_" + version + "_" + platform;
|
||||
}
|
||||
|
||||
private static String configName(String prefix, String vendor, String version, String platform) {
|
||||
return prefix + "_" + vendor + "_" + version + "_" + platform;
|
||||
}
|
||||
|
||||
private static String capitalize(String s) {
|
||||
|
|
|
@ -0,0 +1,211 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.tar;
|
||||
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
|
||||
import org.apache.commons.compress.archivers.tar.TarConstants;
|
||||
import org.apache.commons.compress.archivers.zip.UnixStat;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.file.FileCopyDetails;
|
||||
import org.gradle.api.file.RegularFile;
|
||||
import org.gradle.api.internal.file.CopyActionProcessingStreamAction;
|
||||
import org.gradle.api.internal.file.archive.compression.ArchiveOutputStreamFactory;
|
||||
import org.gradle.api.internal.file.archive.compression.Bzip2Archiver;
|
||||
import org.gradle.api.internal.file.archive.compression.GzipArchiver;
|
||||
import org.gradle.api.internal.file.archive.compression.SimpleCompressor;
|
||||
import org.gradle.api.internal.file.copy.CopyAction;
|
||||
import org.gradle.api.internal.file.copy.CopyActionProcessingStream;
|
||||
import org.gradle.api.internal.file.copy.FileCopyDetailsInternal;
|
||||
import org.gradle.api.provider.Provider;
|
||||
import org.gradle.api.tasks.WorkResult;
|
||||
import org.gradle.api.tasks.WorkResults;
|
||||
import org.gradle.api.tasks.bundling.Tar;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* A custom archive task that assembles a tar archive that preserves symbolic links.
|
||||
*
|
||||
* This task is necessary because the built-in task {@link org.gradle.api.tasks.bundling.Tar} does not preserve symbolic links.
|
||||
*/
|
||||
public class SymbolicLinkPreservingTar extends Tar {
|
||||
|
||||
@Override
|
||||
protected CopyAction createCopyAction() {
|
||||
final ArchiveOutputStreamFactory compressor;
|
||||
switch (getCompression()) {
|
||||
case BZIP2:
|
||||
compressor = Bzip2Archiver.getCompressor();
|
||||
break;
|
||||
case GZIP:
|
||||
compressor = GzipArchiver.getCompressor();
|
||||
break;
|
||||
default:
|
||||
compressor = new SimpleCompressor();
|
||||
break;
|
||||
}
|
||||
return new SymbolicLinkPreservingTarCopyAction(getArchiveFile(), compressor, isPreserveFileTimestamps());
|
||||
}
|
||||
|
||||
private static class SymbolicLinkPreservingTarCopyAction implements CopyAction {
|
||||
|
||||
private final Provider<RegularFile> tarFile;
|
||||
private final ArchiveOutputStreamFactory compressor;
|
||||
private final boolean isPreserveFileTimestamps;
|
||||
|
||||
SymbolicLinkPreservingTarCopyAction(
|
||||
final Provider<RegularFile> tarFile,
|
||||
final ArchiveOutputStreamFactory compressor,
|
||||
final boolean isPreserveFileTimestamps) {
|
||||
this.tarFile = tarFile;
|
||||
this.compressor = compressor;
|
||||
this.isPreserveFileTimestamps = isPreserveFileTimestamps;
|
||||
}
|
||||
|
||||
@Override
|
||||
public WorkResult execute(final CopyActionProcessingStream stream) {
|
||||
try (OutputStream out = compressor.createArchiveOutputStream(tarFile.get().getAsFile());
|
||||
TarArchiveOutputStream tar = new TarArchiveOutputStream(out)) {
|
||||
tar.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
|
||||
stream.process(new SymbolicLinkPreservingTarStreamAction(tar));
|
||||
} catch (final IOException e) {
|
||||
throw new GradleException("failed writing tar file [" + tarFile + "]", e);
|
||||
}
|
||||
|
||||
return WorkResults.didWork(true);
|
||||
}
|
||||
|
||||
private class SymbolicLinkPreservingTarStreamAction implements CopyActionProcessingStreamAction {
|
||||
|
||||
private final TarArchiveOutputStream tar;
|
||||
|
||||
/*
|
||||
* When Gradle walks the file tree, it will follow symbolic links. This means that if there is a symbolic link to a directory
|
||||
* in the source file tree, we could otherwise end up duplicating the entries below that directory in the resulting tar archive.
|
||||
* To avoid this, we track which symbolic links we have visited, and skip files that are children of symbolic links that we have
|
||||
* already visited.
|
||||
*/
|
||||
private final Set<File> visitedSymbolicLinks = new HashSet<>();
|
||||
|
||||
SymbolicLinkPreservingTarStreamAction(final TarArchiveOutputStream tar) {
|
||||
this.tar = tar;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processFile(final FileCopyDetailsInternal details) {
|
||||
if (isChildOfVisitedSymbolicLink(details) == false) {
|
||||
if (isSymbolicLink(details)) {
|
||||
visitSymbolicLink(details);
|
||||
} else if (details.isDirectory()) {
|
||||
visitDirectory(details);
|
||||
} else {
|
||||
visitFile(details);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isChildOfVisitedSymbolicLink(final FileCopyDetailsInternal details) {
|
||||
final File file;
|
||||
try {
|
||||
file = details.getFile();
|
||||
} catch (final UnsupportedOperationException e) {
|
||||
// we get invoked with stubbed details, there is no way to introspect this other than catching this exception
|
||||
return false;
|
||||
}
|
||||
for (final File symbolicLink : visitedSymbolicLinks) {
|
||||
if (isChildOf(symbolicLink, file)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isChildOf(final File directory, final File file) {
|
||||
return file.toPath().startsWith(directory.toPath());
|
||||
}
|
||||
|
||||
private boolean isSymbolicLink(final FileCopyDetailsInternal details) {
|
||||
final File file;
|
||||
try {
|
||||
file = details.getFile();
|
||||
} catch (final UnsupportedOperationException e) {
|
||||
// we get invoked with stubbed details, there is no way to introspect this other than catching this exception
|
||||
return false;
|
||||
}
|
||||
return Files.isSymbolicLink(file.toPath());
|
||||
}
|
||||
|
||||
private void visitSymbolicLink(final FileCopyDetailsInternal details) {
|
||||
visitedSymbolicLinks.add(details.getFile());
|
||||
final TarArchiveEntry entry = new TarArchiveEntry(details.getRelativePath().getPathString(), TarConstants.LF_SYMLINK);
|
||||
entry.setModTime(getModTime(details));
|
||||
entry.setMode(UnixStat.LINK_FLAG | details.getMode());
|
||||
try {
|
||||
entry.setLinkName(Files.readSymbolicLink(details.getFile().toPath()).toString());
|
||||
tar.putArchiveEntry(entry);
|
||||
tar.closeArchiveEntry();
|
||||
} catch (final IOException e) {
|
||||
handleProcessingException(details, e);
|
||||
}
|
||||
}
|
||||
|
||||
private void visitDirectory(final FileCopyDetailsInternal details) {
|
||||
final TarArchiveEntry entry = new TarArchiveEntry(details.getRelativePath().getPathString() + "/");
|
||||
entry.setModTime(getModTime(details));
|
||||
entry.setMode(UnixStat.DIR_FLAG | details.getMode());
|
||||
try {
|
||||
tar.putArchiveEntry(entry);
|
||||
tar.closeArchiveEntry();
|
||||
} catch (final IOException e) {
|
||||
handleProcessingException(details, e);
|
||||
}
|
||||
}
|
||||
|
||||
private void visitFile(final FileCopyDetailsInternal details) {
|
||||
final TarArchiveEntry entry = new TarArchiveEntry(details.getRelativePath().getPathString());
|
||||
entry.setModTime(getModTime(details));
|
||||
entry.setMode(UnixStat.FILE_FLAG | details.getMode());
|
||||
entry.setSize(details.getSize());
|
||||
try {
|
||||
tar.putArchiveEntry(entry);
|
||||
details.copyTo(tar);
|
||||
tar.closeArchiveEntry();
|
||||
} catch (final IOException e) {
|
||||
handleProcessingException(details, e);
|
||||
}
|
||||
}
|
||||
|
||||
private void handleProcessingException(final FileCopyDetailsInternal details, final IOException e) {
|
||||
throw new GradleException("could not add [" + details + "] to tar file [" + tarFile + "]", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private long getModTime(final FileCopyDetails details) {
|
||||
return isPreserveFileTimestamps ? details.getLastModified() : 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,162 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.tar;
|
||||
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.file.DirectoryProperty;
|
||||
import org.gradle.api.file.RegularFileProperty;
|
||||
import org.gradle.api.model.ObjectFactory;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.InputFile;
|
||||
import org.gradle.api.tasks.OutputDirectory;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.attribute.PosixFileAttributeView;
|
||||
import java.nio.file.attribute.PosixFilePermission;
|
||||
import java.nio.file.attribute.PosixFilePermissions;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* A custom task that explodes a tar archive that preserves symbolic links.
|
||||
*
|
||||
* This task is necessary because the built-in task {@link org.gradle.api.internal.file.archive.TarFileTree} does not preserve symbolic
|
||||
* links.
|
||||
*/
|
||||
public class SymbolicLinkPreservingUntarTask extends DefaultTask {
|
||||
|
||||
private final RegularFileProperty tarFile;
|
||||
|
||||
@InputFile
|
||||
public RegularFileProperty getTarFile() {
|
||||
return tarFile;
|
||||
}
|
||||
|
||||
private final DirectoryProperty extractPath;
|
||||
|
||||
@OutputDirectory
|
||||
public DirectoryProperty getExtractPath() {
|
||||
return extractPath;
|
||||
}
|
||||
|
||||
private Function<String, Path> transform;
|
||||
|
||||
/**
|
||||
* A transform to apply to the tar entry, to derive the relative path from the entry name. If the return value is null, the entry is
|
||||
* dropped from the exploded tar archive.
|
||||
*
|
||||
* @param transform the transform
|
||||
*/
|
||||
@Input
|
||||
public void setTransform(Function<String, Path> transform) {
|
||||
this.transform = transform;
|
||||
}
|
||||
|
||||
@Inject
|
||||
public SymbolicLinkPreservingUntarTask(final ObjectFactory objectFactory) {
|
||||
this.tarFile = objectFactory.fileProperty();
|
||||
this.extractPath = objectFactory.directoryProperty();
|
||||
this.transform = name -> Paths.get(name);
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
final void execute() {
|
||||
// ensure the target extraction path is empty
|
||||
getProject().delete(extractPath);
|
||||
try (TarArchiveInputStream tar =
|
||||
new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(tarFile.getAsFile().get())))) {
|
||||
final Path destinationPath = extractPath.get().getAsFile().toPath();
|
||||
TarArchiveEntry entry = tar.getNextTarEntry();
|
||||
while (entry != null) {
|
||||
final Path relativePath = transform.apply(entry.getName());
|
||||
if (relativePath == null) {
|
||||
entry = tar.getNextTarEntry();
|
||||
continue;
|
||||
}
|
||||
|
||||
final Path destination = destinationPath.resolve(relativePath);
|
||||
final Path parent = destination.getParent();
|
||||
if (Files.exists(parent) == false) {
|
||||
Files.createDirectories(parent);
|
||||
}
|
||||
if (entry.isDirectory()) {
|
||||
Files.createDirectory(destination);
|
||||
} else if (entry.isSymbolicLink()) {
|
||||
Files.createSymbolicLink(destination, Paths.get(entry.getLinkName()));
|
||||
} else {
|
||||
// copy the file from the archive using a small buffer to avoid heaping
|
||||
Files.createFile(destination);
|
||||
try (FileOutputStream fos = new FileOutputStream(destination.toFile())) {
|
||||
tar.transferTo(fos);
|
||||
}
|
||||
}
|
||||
if (entry.isSymbolicLink() == false) {
|
||||
// check if the underlying file system supports POSIX permissions
|
||||
final PosixFileAttributeView view = Files.getFileAttributeView(destination, PosixFileAttributeView.class);
|
||||
if (view != null) {
|
||||
final Set<PosixFilePermission> permissions = PosixFilePermissions.fromString(
|
||||
permissions((entry.getMode() >> 6) & 07) +
|
||||
permissions((entry.getMode() >> 3) & 07) +
|
||||
permissions((entry.getMode() >> 0) & 07));
|
||||
Files.setPosixFilePermissions(destination, permissions);
|
||||
}
|
||||
}
|
||||
entry = tar.getNextTarEntry();
|
||||
}
|
||||
} catch (final IOException e) {
|
||||
throw new GradleException("unable to extract tar [" + tarFile.getAsFile().get().toPath() + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
private String permissions(final int permissions) {
|
||||
if (permissions < 0 || permissions > 7) {
|
||||
throw new IllegalArgumentException("permissions [" + permissions + "] out of range");
|
||||
}
|
||||
final StringBuilder sb = new StringBuilder(3);
|
||||
if ((permissions & 4) == 4) {
|
||||
sb.append('r');
|
||||
} else {
|
||||
sb.append('-');
|
||||
}
|
||||
if ((permissions & 2) == 2) {
|
||||
sb.append('w');
|
||||
} else {
|
||||
sb.append('-');
|
||||
}
|
||||
if ((permissions & 1) == 1) {
|
||||
sb.append('x');
|
||||
} else {
|
||||
sb.append('-');
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.tar;
|
||||
|
||||
import org.gradle.api.Plugin;
|
||||
import org.gradle.api.Project;
|
||||
|
||||
public class SymoblicLinkPreservingTarPlugin implements Plugin<Project> {
|
||||
|
||||
@Override
|
||||
public void apply(final Project target) {
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
implementation-class=org.elasticsearch.gradle.tar.SymoblicLinkPreservingTarPlugin
|
|
@ -23,6 +23,10 @@ public class VersionProperties {
|
|||
return bundledJdk;
|
||||
}
|
||||
|
||||
public static String getBundledJdkVendor() {
|
||||
return bundledJdkVendor;
|
||||
}
|
||||
|
||||
public static Map<String, String> getVersions() {
|
||||
return versions;
|
||||
}
|
||||
|
@ -30,12 +34,14 @@ public class VersionProperties {
|
|||
private static final String elasticsearch;
|
||||
private static final String lucene;
|
||||
private static final String bundledJdk;
|
||||
private static final String bundledJdkVendor;
|
||||
private static final Map<String, String> versions = new HashMap<String, String>();
|
||||
|
||||
static {
|
||||
Properties props = getVersionProperties();
|
||||
elasticsearch = props.getProperty("elasticsearch");
|
||||
lucene = props.getProperty("lucene");
|
||||
bundledJdkVendor = props.getProperty("bundled_jdk_vendor");
|
||||
bundledJdk = props.getProperty("bundled_jdk");
|
||||
|
||||
for (String property : props.stringPropertyNames()) {
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
public class AdoptOpenJdkDownloadPluginIT extends JdkDownloadPluginIT {
|
||||
|
||||
@Override
|
||||
public String oldJdkVersion() {
|
||||
return "1+99";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String jdkVersion() {
|
||||
return "12.0.2+10";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String jdkVendor() {
|
||||
return "adoptopenjdk";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String urlPath(final boolean isOld, final String platform, final String extension) {
|
||||
final String module = platform.equals("osx") ? "mac" : platform;
|
||||
if (isOld) {
|
||||
return "/adoptopenjdk/OpenJDK1U-jdk_x64_" + module + "_hotspot_1_99." + extension;
|
||||
} else {
|
||||
return "/adoptopenjdk/OpenJDK12U-jdk_x64_" + module + "_hotspot_12.0.2_10." + extension;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] filebytes(final String platform, final String extension) throws IOException {
|
||||
try (InputStream stream = JdkDownloadPluginIT.class.getResourceAsStream("fake_adoptopenjdk_" + platform + "." + extension)) {
|
||||
return stream.readAllBytes();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -25,7 +25,6 @@ import org.gradle.testkit.runner.BuildResult;
|
|||
import org.gradle.testkit.runner.GradleRunner;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
@ -39,74 +38,79 @@ import static com.github.tomakehurst.wiremock.client.WireMock.head;
|
|||
import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
||||
public class JdkDownloadPluginIT extends GradleIntegrationTestCase {
|
||||
public abstract class JdkDownloadPluginIT extends GradleIntegrationTestCase {
|
||||
|
||||
private static final String OLD_JDK_VERSION = "1+99";
|
||||
private static final String JDK_VERSION = "12.0.1+99@123456789123456789123456789abcde";
|
||||
private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)");
|
||||
private static final Pattern NUM_CONFIGS_LOGLINE = Pattern.compile("NUM CONFIGS: (.*)");
|
||||
|
||||
public void testLinuxExtraction() throws IOException {
|
||||
assertExtraction("getLinuxJdk", "linux", "bin/java", JDK_VERSION);
|
||||
protected abstract String oldJdkVersion();
|
||||
|
||||
protected abstract String jdkVersion();
|
||||
|
||||
protected abstract String jdkVendor();
|
||||
|
||||
public final void testLinuxExtraction() throws IOException {
|
||||
assertExtraction("getLinuxJdk", "linux", "bin/java", jdkVendor(), jdkVersion());
|
||||
}
|
||||
|
||||
public void testDarwinExtraction() throws IOException {
|
||||
assertExtraction("getDarwinJdk", "osx", "Contents/Home/bin/java", JDK_VERSION);
|
||||
public final void testDarwinExtraction() throws IOException {
|
||||
assertExtraction("getDarwinJdk", "osx", "Contents/Home/bin/java", jdkVendor(), jdkVersion());
|
||||
}
|
||||
|
||||
public void testWindowsExtraction() throws IOException {
|
||||
assertExtraction("getWindowsJdk", "windows", "bin/java", JDK_VERSION);
|
||||
public final void testWindowsExtraction() throws IOException {
|
||||
assertExtraction("getWindowsJdk", "windows", "bin/java", jdkVendor(), jdkVersion());
|
||||
}
|
||||
|
||||
public void testLinuxExtractionOldVersion() throws IOException {
|
||||
assertExtraction("getLinuxJdk", "linux", "bin/java", OLD_JDK_VERSION);
|
||||
public final void testLinuxExtractionOldVersion() throws IOException {
|
||||
assertExtraction("getLinuxJdk", "linux", "bin/java", jdkVendor(), oldJdkVersion());
|
||||
}
|
||||
|
||||
public void testDarwinExtractionOldVersion() throws IOException {
|
||||
assertExtraction("getDarwinJdk", "osx", "Contents/Home/bin/java", OLD_JDK_VERSION);
|
||||
public final void testDarwinExtractionOldVersion() throws IOException {
|
||||
assertExtraction("getDarwinJdk", "osx", "Contents/Home/bin/java", jdkVendor(), oldJdkVersion());
|
||||
}
|
||||
|
||||
public void testWindowsExtractionOldVersion() throws IOException {
|
||||
assertExtraction("getWindowsJdk", "windows", "bin/java", OLD_JDK_VERSION);
|
||||
public final void testWindowsExtractionOldVersion() throws IOException {
|
||||
assertExtraction("getWindowsJdk", "windows", "bin/java", jdkVendor(), oldJdkVersion());
|
||||
}
|
||||
|
||||
public void testCrossProjectReuse() throws IOException {
|
||||
public final void testCrossProjectReuse() throws IOException {
|
||||
runBuild("numConfigurations", "linux", result -> {
|
||||
Matcher matcher = NUM_CONFIGS_LOGLINE.matcher(result.getOutput());
|
||||
assertTrue("could not find num configs in output: " + result.getOutput(), matcher.find());
|
||||
assertThat(Integer.parseInt(matcher.group(1)), equalTo(6)); // 3 import configs, 3 export configs
|
||||
}, JDK_VERSION);
|
||||
}, jdkVendor(), jdkVersion());
|
||||
}
|
||||
|
||||
public void assertExtraction(String taskname, String platform, String javaBin, String version) throws IOException {
|
||||
private void assertExtraction(String taskname, String platform, String javaBin, String vendor, String version) throws IOException {
|
||||
runBuild(taskname, platform, result -> {
|
||||
Matcher matcher = JDK_HOME_LOGLINE.matcher(result.getOutput());
|
||||
assertTrue("could not find jdk home in output: " + result.getOutput(), matcher.find());
|
||||
String jdkHome = matcher.group(1);
|
||||
Path javaPath = Paths.get(jdkHome, javaBin);
|
||||
assertTrue(javaPath.toString(), Files.exists(javaPath));
|
||||
}, version);
|
||||
}, vendor, version);
|
||||
}
|
||||
|
||||
private void runBuild(String taskname, String platform, Consumer<BuildResult> assertions, String version) throws IOException {
|
||||
protected abstract String urlPath(boolean isOld, String platform, String extension);
|
||||
|
||||
protected abstract byte[] filebytes(String platform, String extension) throws IOException;
|
||||
|
||||
private void runBuild(
|
||||
String taskname, String platform, Consumer<BuildResult> assertions, String vendor, String version) throws IOException {
|
||||
WireMockServer wireMock = new WireMockServer(0);
|
||||
try {
|
||||
String extension = platform.equals("windows") ? "zip" : "tar.gz";
|
||||
boolean isOld = version.equals(OLD_JDK_VERSION);
|
||||
String filename = "openjdk-" + (isOld ? "1" : "12.0.1") + "_" + platform + "-x64_bin." + extension;
|
||||
final byte[] filebytes;
|
||||
try (InputStream stream = JdkDownloadPluginIT.class.getResourceAsStream("fake_openjdk_" + platform + "." + extension)) {
|
||||
filebytes = stream.readAllBytes();
|
||||
}
|
||||
String versionPath = isOld ? "jdk1/99" : "jdk12.0.1/123456789123456789123456789abcde/99";
|
||||
String urlPath = "/java/GA/" + versionPath + "/GPL/" + filename;
|
||||
wireMock.stubFor(head(urlEqualTo(urlPath)).willReturn(aResponse().withStatus(200)));
|
||||
wireMock.stubFor(get(urlEqualTo(urlPath)).willReturn(aResponse().withStatus(200).withBody(filebytes)));
|
||||
boolean isOld = version.equals(oldJdkVersion());
|
||||
|
||||
wireMock.stubFor(head(urlEqualTo(urlPath(isOld, platform, extension))).willReturn(aResponse().withStatus(200)));
|
||||
wireMock.stubFor(get(urlEqualTo(urlPath(isOld, platform, extension)))
|
||||
.willReturn(aResponse().withStatus(200).withBody(filebytes(platform, extension))));
|
||||
wireMock.start();
|
||||
|
||||
GradleRunner runner = GradleRunner.create().withProjectDir(getProjectDir("jdk-download"))
|
||||
.withArguments(taskname,
|
||||
"-Dlocal.repo.path=" + getLocalTestRepoPath(),
|
||||
"-Dtests.jdk_vendor=" + vendor,
|
||||
"-Dtests.jdk_version=" + version,
|
||||
"-Dtests.jdk_repo=" + wireMock.baseUrl(),
|
||||
"-i")
|
||||
|
|
|
@ -35,32 +35,50 @@ public class JdkDownloadPluginTests extends GradleUnitTestCase {
|
|||
rootProject = ProjectBuilder.builder().build();
|
||||
}
|
||||
|
||||
public void testMissingVendor() {
|
||||
assertJdkError(createProject(), "testjdk", null, "11.0.2+33", "linux", "vendor not specified for jdk [testjdk]");
|
||||
}
|
||||
|
||||
public void testUnknownVendor() {
|
||||
assertJdkError(
|
||||
createProject(),
|
||||
"testjdk",
|
||||
"unknown",
|
||||
"11.0.2+33",
|
||||
"linux",
|
||||
"unknown vendor [unknown] for jdk [testjdk], must be one of [adoptopenjdk, openjdk]");
|
||||
}
|
||||
|
||||
public void testMissingVersion() {
|
||||
assertJdkError(createProject(), "testjdk", null, "linux", "version not specified for jdk [testjdk]");
|
||||
}
|
||||
|
||||
public void testMissingPlatform() {
|
||||
assertJdkError(createProject(), "testjdk", "11.0.2+33", null, "platform not specified for jdk [testjdk]");
|
||||
}
|
||||
|
||||
public void testUnknownPlatform() {
|
||||
assertJdkError(createProject(), "testjdk", "11.0.2+33", "unknown",
|
||||
"unknown platform [unknown] for jdk [testjdk], must be one of [linux, windows, darwin]");
|
||||
assertJdkError(createProject(), "testjdk", "openjdk", null, "linux", "version not specified for jdk [testjdk]");
|
||||
}
|
||||
|
||||
public void testBadVersionFormat() {
|
||||
assertJdkError(createProject(), "testjdk", "badversion", "linux", "malformed version [badversion] for jdk [testjdk]");
|
||||
assertJdkError(createProject(), "testjdk", "openjdk", "badversion", "linux", "malformed version [badversion] for jdk [testjdk]");
|
||||
}
|
||||
|
||||
private void assertJdkError(Project project, String name, String version, String platform, String message) {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createJdk(project, name, version, platform));
|
||||
public void testMissingPlatform() {
|
||||
assertJdkError(createProject(), "testjdk", "openjdk", "11.0.2+33", null, "platform not specified for jdk [testjdk]");
|
||||
}
|
||||
|
||||
public void testUnknownPlatform() {
|
||||
assertJdkError(createProject(), "testjdk", "openjdk", "11.0.2+33", "unknown",
|
||||
"unknown platform [unknown] for jdk [testjdk], must be one of [darwin, linux, windows]");
|
||||
}
|
||||
|
||||
private void assertJdkError(Project project, String name, String vendor, String version, String platform, String message) {
|
||||
IllegalArgumentException e =
|
||||
expectThrows(IllegalArgumentException.class, () -> createJdk(project, name, vendor, version, platform));
|
||||
assertThat(e.getMessage(), equalTo(message));
|
||||
}
|
||||
|
||||
private void createJdk(Project project, String name, String version, String platform) {
|
||||
private void createJdk(Project project, String name, String vendor, String version, String platform) {
|
||||
@SuppressWarnings("unchecked")
|
||||
NamedDomainObjectContainer<Jdk> jdks = (NamedDomainObjectContainer<Jdk>) project.getExtensions().getByName("jdks");
|
||||
jdks.create(name, jdk -> {
|
||||
if (vendor != null) {
|
||||
jdk.setVendor(vendor);
|
||||
}
|
||||
if (version != null) {
|
||||
jdk.setVersion(version);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
public class OpenJdkDownloadPluginIT extends JdkDownloadPluginIT {
|
||||
|
||||
@Override
|
||||
public String oldJdkVersion() {
|
||||
return "1+99";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String jdkVersion() {
|
||||
return "12.0.1+99@123456789123456789123456789abcde";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String jdkVendor() {
|
||||
return "openjdk";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String urlPath(final boolean isOld, final String platform, final String extension) {
|
||||
final String versionPath = isOld ? "jdk1/99" : "jdk12.0.1/123456789123456789123456789abcde/99";
|
||||
final String filename = "openjdk-" + (isOld ? "1" : "12.0.1") + "_" + platform + "-x64_bin." + extension;
|
||||
return "/java/GA/" + versionPath + "/GPL/" + filename;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] filebytes(final String platform, final String extension) throws IOException {
|
||||
try (InputStream stream = JdkDownloadPluginIT.class.getResourceAsStream("fake_openjdk_" + platform + "." + extension)) {
|
||||
return stream.readAllBytes();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
package org.elasticsearch.gradle.tar;
|
||||
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
||||
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
||||
public class SymbolicLinkPreservingTarIT extends GradleIntegrationTestCase {
|
||||
|
||||
@Rule
|
||||
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
@Before
|
||||
public void before() throws IOException {
|
||||
final Path realFolder = temporaryFolder.getRoot().toPath().resolve("real-folder");
|
||||
Files.createDirectory(realFolder);
|
||||
Files.createFile(realFolder.resolve("file"));
|
||||
Files.createSymbolicLink(realFolder.resolve("link-to-file"), Paths.get("./file"));
|
||||
final Path linkInFolder = temporaryFolder.getRoot().toPath().resolve("link-in-folder");
|
||||
Files.createDirectory(linkInFolder);
|
||||
Files.createSymbolicLink(linkInFolder.resolve("link-to-file"), Paths.get("../real-folder/file"));
|
||||
final Path linkToRealFolder = temporaryFolder.getRoot().toPath().resolve("link-to-real-folder");
|
||||
Files.createSymbolicLink(linkToRealFolder, Paths.get("./real-folder"));
|
||||
}
|
||||
|
||||
public void testBZip2Tar() throws IOException {
|
||||
runBuild("buildBZip2Tar", true);
|
||||
assertTar(".bz2", BZip2CompressorInputStream::new, true);
|
||||
}
|
||||
|
||||
public void testBZip2TarDoNotPreserveFileTimestamps() throws IOException {
|
||||
runBuild("buildBZip2Tar", false);
|
||||
assertTar(".bz2", BZip2CompressorInputStream::new, false);
|
||||
}
|
||||
|
||||
public void testGZipTar() throws IOException {
|
||||
runBuild("buildGZipTar", true);
|
||||
assertTar(".gz", GzipCompressorInputStream::new, true);
|
||||
}
|
||||
|
||||
public void testGZipTarDoNotPreserveFileTimestamps() throws IOException {
|
||||
runBuild("buildGZipTar", false);
|
||||
assertTar(".gz", GzipCompressorInputStream::new, false);
|
||||
}
|
||||
|
||||
public void testTar() throws IOException {
|
||||
runBuild("buildTar", true);
|
||||
assertTar("", fis -> fis, true);
|
||||
}
|
||||
|
||||
public void testTarDoNotPreserveFileTimestamps() throws IOException {
|
||||
runBuild("buildTar", false);
|
||||
assertTar("", fis -> fis, false);
|
||||
}
|
||||
|
||||
interface FileInputStreamWrapper {
|
||||
InputStream apply(FileInputStream fis) throws IOException;
|
||||
}
|
||||
|
||||
private void assertTar(
|
||||
final String extension, final FileInputStreamWrapper wrapper, boolean preserveFileTimestamps) throws IOException {
|
||||
try (TarArchiveInputStream tar = new TarArchiveInputStream(wrapper.apply(new FileInputStream(getOutputFile(extension))))) {
|
||||
TarArchiveEntry entry = tar.getNextTarEntry();
|
||||
boolean realFolderEntry = false;
|
||||
boolean fileEntry = false;
|
||||
boolean linkToFileEntry = false;
|
||||
boolean linkInFolderEntry = false;
|
||||
boolean linkInFolderLinkToFileEntry = false;
|
||||
boolean linkToRealFolderEntry = false;
|
||||
while (entry != null) {
|
||||
if (entry.getName().equals("real-folder/")) {
|
||||
assertTrue(entry.isDirectory());
|
||||
realFolderEntry = true;
|
||||
} else if (entry.getName().equals("real-folder/file")) {
|
||||
assertTrue(entry.isFile());
|
||||
fileEntry = true;
|
||||
} else if (entry.getName().equals("real-folder/link-to-file")) {
|
||||
assertTrue(entry.isSymbolicLink());
|
||||
assertThat(entry.getLinkName(), equalTo("./file"));
|
||||
linkToFileEntry = true;
|
||||
} else if (entry.getName().equals("link-in-folder/")) {
|
||||
assertTrue(entry.isDirectory());
|
||||
linkInFolderEntry = true;
|
||||
} else if (entry.getName().equals("link-in-folder/link-to-file")) {
|
||||
assertTrue(entry.isSymbolicLink());
|
||||
assertThat(entry.getLinkName(), equalTo("../real-folder/file"));
|
||||
linkInFolderLinkToFileEntry = true;
|
||||
} else if (entry.getName().equals("link-to-real-folder")) {
|
||||
assertTrue(entry.isSymbolicLink());
|
||||
assertThat(entry.getLinkName(), equalTo("./real-folder"));
|
||||
linkToRealFolderEntry = true;
|
||||
} else {
|
||||
throw new GradleException("unexpected entry [" + entry.getName() + "]");
|
||||
}
|
||||
if (preserveFileTimestamps) {
|
||||
assertTrue(entry.getModTime().getTime() > 0);
|
||||
} else {
|
||||
assertThat(entry.getModTime().getTime(), equalTo(0L));
|
||||
}
|
||||
entry = tar.getNextTarEntry();
|
||||
}
|
||||
assertTrue(realFolderEntry);
|
||||
assertTrue(fileEntry);
|
||||
assertTrue(linkToFileEntry);
|
||||
assertTrue(linkInFolderEntry);
|
||||
assertTrue(linkInFolderLinkToFileEntry);
|
||||
assertTrue(linkToRealFolderEntry);
|
||||
}
|
||||
}
|
||||
|
||||
private void runBuild(final String task, final boolean preserveFileTimestamps) {
|
||||
final GradleRunner runner = GradleRunner.create().withProjectDir(getProjectDir())
|
||||
.withArguments(
|
||||
task,
|
||||
"-Dtests.symbolic_link_preserving_tar_source=" + temporaryFolder.getRoot().toString(),
|
||||
"-Dtests.symbolic_link_preserving_tar_preserve_file_timestamps=" + preserveFileTimestamps,
|
||||
"-i")
|
||||
.withPluginClasspath();
|
||||
|
||||
runner.build();
|
||||
}
|
||||
|
||||
private File getProjectDir() {
|
||||
return getProjectDir("symbolic-link-preserving-tar");
|
||||
}
|
||||
|
||||
private File getOutputFile(final String extension) {
|
||||
return getProjectDir().toPath().resolve("build/distributions/symbolic-link-preserving-tar.tar" + extension).toFile();
|
||||
}
|
||||
|
||||
}
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -2,9 +2,11 @@
|
|||
project.gradle.projectsEvaluated {
|
||||
// wire the jdk repo to wiremock
|
||||
String fakeJdkRepo = Objects.requireNonNull(System.getProperty('tests.jdk_repo'))
|
||||
String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor'))
|
||||
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
|
||||
println rootProject.repositories.asMap.keySet()
|
||||
IvyArtifactRepository repository = (IvyArtifactRepository) rootProject.repositories.getByName("jdk_repo_${fakeJdkVersion}")
|
||||
IvyArtifactRepository repository =
|
||||
(IvyArtifactRepository) rootProject.repositories.getByName("jdk_repo_${fakeJdkVendor}_${fakeJdkVersion}")
|
||||
repository.setUrl(fakeJdkRepo)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
evaluationDependsOn ':subproj'
|
||||
|
||||
String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor'))
|
||||
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
|
||||
jdks {
|
||||
linux_jdk {
|
||||
vendor = fakeJdkVendor
|
||||
version = fakeJdkVersion
|
||||
platform = "linux"
|
||||
}
|
||||
|
|
|
@ -3,17 +3,21 @@ plugins {
|
|||
}
|
||||
|
||||
|
||||
String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor'))
|
||||
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
|
||||
jdks {
|
||||
linux {
|
||||
vendor = fakeJdkVendor
|
||||
version = fakeJdkVersion
|
||||
platform = "linux"
|
||||
}
|
||||
darwin {
|
||||
vendor = fakeJdkVendor
|
||||
version = fakeJdkVersion
|
||||
platform = "darwin"
|
||||
}
|
||||
windows {
|
||||
vendor = fakeJdkVendor
|
||||
version = fakeJdkVersion
|
||||
platform = "windows"
|
||||
}
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
import org.elasticsearch.gradle.tar.SymbolicLinkPreservingTar
|
||||
|
||||
plugins {
|
||||
id 'base'
|
||||
id 'distribution'
|
||||
id 'elasticsearch.symbolic-link-preserving-tar'
|
||||
}
|
||||
|
||||
final String source = Objects.requireNonNull(System.getProperty('tests.symbolic_link_preserving_tar_source'))
|
||||
boolean preserveFileTimestamps;
|
||||
final String testPreserveFileTimestamps =
|
||||
Objects.requireNonNull(System.getProperty('tests.symbolic_link_preserving_tar_preserve_file_timestamps'))
|
||||
switch (testPreserveFileTimestamps) {
|
||||
case "true":
|
||||
preserveFileTimestamps = true
|
||||
break
|
||||
case "false":
|
||||
preserveFileTimestamps = false
|
||||
break
|
||||
default:
|
||||
throw new IllegalArgumentException(
|
||||
"tests.symbolic_link_preserving_tar_preserve_file_timestamps must be [true] or [false] but was ["
|
||||
+ testPreserveFileTimestamps + "]")
|
||||
}
|
||||
|
||||
task buildBZip2Tar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar ->
|
||||
tar.archiveExtension = 'tar.bz2'
|
||||
tar.compression = Compression.BZIP2
|
||||
tar.preserveFileTimestamps = preserveFileTimestamps
|
||||
from fileTree(source)
|
||||
doLast {
|
||||
println archiveFile.get().asFile.path
|
||||
}
|
||||
}
|
||||
|
||||
task buildGZipTar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar ->
|
||||
tar.archiveExtension = 'tar.gz'
|
||||
tar.compression = Compression.GZIP
|
||||
tar.preserveFileTimestamps = preserveFileTimestamps
|
||||
from fileTree(source)
|
||||
doLast{
|
||||
println archiveFile.get().asFile.path
|
||||
}
|
||||
}
|
||||
|
||||
task buildTar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar ->
|
||||
tar.archiveExtension = 'tar'
|
||||
tar.preserveFileTimestamps = preserveFileTimestamps
|
||||
from fileTree(source)
|
||||
doLast{
|
||||
println archiveFile.get().asFile.path
|
||||
}
|
||||
}
|
|
@ -1,7 +1,8 @@
|
|||
elasticsearch = 7.5.0
|
||||
lucene = 8.2.0
|
||||
|
||||
bundled_jdk = 12.0.2+10@e482c34c86bd4bf8b56c0b35558996b9
|
||||
bundled_jdk_vendor = adoptopenjdk
|
||||
bundled_jdk = 12.0.2+10
|
||||
|
||||
# optional dependencies
|
||||
spatial4j = 0.7
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.gradle.LoggedExec
|
|||
import org.elasticsearch.gradle.MavenFilteringHack
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
||||
import org.elasticsearch.gradle.tar.SymbolicLinkPreservingTar
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
|
@ -141,49 +142,49 @@ Closure commonTarConfig = {
|
|||
fileMode 0644
|
||||
}
|
||||
|
||||
task buildDarwinTar(type: Tar) {
|
||||
task buildDarwinTar(type: SymbolicLinkPreservingTar) {
|
||||
configure(commonTarConfig)
|
||||
archiveClassifier = 'darwin-x86_64'
|
||||
with archiveFiles(modulesFiles(false, 'darwin'), 'tar', 'darwin', false, true)
|
||||
}
|
||||
|
||||
task buildOssDarwinTar(type: Tar) {
|
||||
task buildOssDarwinTar(type: SymbolicLinkPreservingTar) {
|
||||
configure(commonTarConfig)
|
||||
archiveClassifier = 'darwin-x86_64'
|
||||
with archiveFiles(modulesFiles(true, 'darwin'), 'tar', 'darwin', true, true)
|
||||
}
|
||||
|
||||
task buildNoJdkDarwinTar(type: Tar) {
|
||||
task buildNoJdkDarwinTar(type: SymbolicLinkPreservingTar) {
|
||||
configure(commonTarConfig)
|
||||
archiveClassifier = 'no-jdk-darwin-x86_64'
|
||||
with archiveFiles(modulesFiles(false, 'darwin'), 'tar', 'darwin', false, false)
|
||||
}
|
||||
|
||||
task buildOssNoJdkDarwinTar(type: Tar) {
|
||||
task buildOssNoJdkDarwinTar(type: SymbolicLinkPreservingTar) {
|
||||
configure(commonTarConfig)
|
||||
archiveClassifier = 'no-jdk-darwin-x86_64'
|
||||
with archiveFiles(modulesFiles(true, 'darwin'), 'tar', 'darwin', true, false)
|
||||
}
|
||||
|
||||
task buildLinuxTar(type: Tar) {
|
||||
task buildLinuxTar(type: SymbolicLinkPreservingTar) {
|
||||
configure(commonTarConfig)
|
||||
archiveClassifier = 'linux-x86_64'
|
||||
with archiveFiles(modulesFiles(false, 'linux'), 'tar', 'linux', false, true)
|
||||
}
|
||||
|
||||
task buildOssLinuxTar(type: Tar) {
|
||||
task buildOssLinuxTar(type: SymbolicLinkPreservingTar) {
|
||||
configure(commonTarConfig)
|
||||
archiveClassifier = 'linux-x86_64'
|
||||
with archiveFiles(modulesFiles(true, 'linux'), 'tar', 'linux', true, true)
|
||||
}
|
||||
|
||||
task buildNoJdkLinuxTar(type: Tar) {
|
||||
task buildNoJdkLinuxTar(type: SymbolicLinkPreservingTar) {
|
||||
configure(commonTarConfig)
|
||||
archiveClassifier = 'no-jdk-linux-x86_64'
|
||||
with archiveFiles(modulesFiles(false, 'linux'), 'tar', 'linux', false, false)
|
||||
}
|
||||
|
||||
task buildOssNoJdkLinuxTar(type: Tar) {
|
||||
task buildOssNoJdkLinuxTar(type: SymbolicLinkPreservingTar) {
|
||||
configure(commonTarConfig)
|
||||
archiveClassifier = 'no-jdk-linux-x86_64'
|
||||
with archiveFiles(modulesFiles(true, 'linux'), 'tar', 'linux', true, false)
|
||||
|
@ -375,4 +376,3 @@ configure(subprojects.findAll { it.name == 'integ-test-zip' }) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,8 @@ import org.elasticsearch.gradle.VersionProperties
|
|||
import org.elasticsearch.gradle.test.RunTask
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.Path
|
||||
|
||||
/*****************************************************************************
|
||||
* Third party dependencies report *
|
||||
*****************************************************************************/
|
||||
|
@ -384,11 +385,12 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
}
|
||||
}
|
||||
|
||||
jdkFiles = { project, platform ->
|
||||
jdkFiles = { Project project, String platform ->
|
||||
project.jdks {
|
||||
"bundled_${platform}" {
|
||||
it.platform = platform
|
||||
it.version = VersionProperties.bundledJdk
|
||||
it.vendor = VersionProperties.bundledJdkVendor
|
||||
}
|
||||
}
|
||||
return copySpec {
|
||||
|
|
Loading…
Reference in New Issue