Restore Java 8 compatibility for build tools. (#2300) (#2321)

* Restore Java 8 compatibility for build tools.

Signed-off-by: dblock <dblock@dblock.org>

* Make source code compatible with Java 8.

Signed-off-by: dblock <dblock@dblock.org>
This commit is contained in:
Daniel Doubrovkine (dB.) 2022-03-03 15:34:53 -05:00 committed by GitHub
parent cb57b9202b
commit ae14259a2c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 165 additions and 102 deletions

View File

@ -158,8 +158,8 @@ if (project != rootProject) {
apply plugin: 'opensearch.publish'
allprojects {
targetCompatibility = 11
sourceCompatibility = 11
targetCompatibility = 8
sourceCompatibility = 8
}
// groovydoc succeeds, but has some weird internal exception...

View File

@ -55,7 +55,7 @@ public class ResolveAllDependencies extends DefaultTask {
return false;
}
if (configuration instanceof org.gradle.internal.deprecation.DeprecatableConfiguration) {
var deprecatableConfiguration = (DeprecatableConfiguration) configuration;
DeprecatableConfiguration deprecatableConfiguration = (DeprecatableConfiguration) configuration;
if (deprecatableConfiguration.canSafelyBeResolved() == false) {
return false;
}

View File

@ -64,7 +64,6 @@ import java.util.regex.Pattern;
/**
* A wrapper around gradle's Exec task to capture output and log on error.
*/
@SuppressWarnings("unchecked")
public class LoggedExec extends Exec implements FileSystemOperationsAware {
private static final Logger LOGGER = Logging.getLogger(LoggedExec.class);

View File

@ -63,8 +63,8 @@ import org.gradle.external.javadoc.CoreJavadocOptions;
import org.gradle.language.base.plugins.LifecycleBasePlugin;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import static org.opensearch.gradle.util.Util.toStringable;
@ -212,10 +212,12 @@ public class OpenSearchJavaPlugin implements Plugin<Project> {
public void execute(Task task) {
// this doFirst is added before the info plugin, therefore it will run
// after the doFirst added by the info plugin, and we can override attributes
jarTask.getManifest()
.attributes(
Map.of("Build-Date", BuildParams.getBuildDate(), "Build-Java-Version", BuildParams.getGradleJavaVersion())
);
jarTask.getManifest().attributes(new HashMap<String, Object>() {
{
put("Build-Date", BuildParams.getBuildDate());
put("Build-Java-Version", BuildParams.getGradleJavaVersion());
}
});
}
});
});

View File

@ -49,6 +49,7 @@ import org.gradle.api.tasks.SourceSetContainer;
import org.gradle.api.tasks.testing.Test;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import static org.opensearch.gradle.util.FileUtils.mkdirs;
@ -95,7 +96,7 @@ public class OpenSearchTestBasePlugin implements Plugin<Project> {
// We specifically use an anonymous inner class here because lambda task actions break Gradle cacheability
// See: https://docs.gradle.org/current/userguide/more_about_tasks.html#sec:how_does_it_work
test.doFirst(new Action<>() {
test.doFirst(new Action<Task>() {
@Override
public void execute(Task t) {
mkdirs(testOutputDir);
@ -137,20 +138,16 @@ public class OpenSearchTestBasePlugin implements Plugin<Project> {
test.jvmArgs("-ea", "-esa");
}
Map<String, String> sysprops = Map.of(
"java.awt.headless",
"true",
"tests.gradle",
"true",
"tests.artifact",
project.getName(),
"tests.task",
test.getPath(),
"tests.security.manager",
"true",
"jna.nosys",
"true"
);
Map<String, String> sysprops = new HashMap<String, String>() {
{
put("java.awt.headless", "true");
put("tests.gradle", "true");
put("tests.artifact", project.getName());
put("tests.task", test.getPath());
put("tests.security.manager", "true");
put("jna.nosys", "true");
}
};
test.systemProperties(sysprops);
// ignore changing test seed when build is passed -Dignore.tests.seed for cacheability experimentation

View File

@ -36,6 +36,7 @@ import org.opensearch.gradle.info.GlobalBuildInfoPlugin;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import java.lang.management.ManagementFactory;
import java.nio.file.Path;
/**
@ -51,14 +52,31 @@ public class ReaperPlugin implements Plugin<Project> {
project.getPlugins().apply(GlobalBuildInfoPlugin.class);
Path inputDir = project.getRootDir()
.toPath()
.resolve(".gradle")
.resolve("reaper")
.resolve("build-" + ProcessHandle.current().pid());
Path inputDir = project.getRootDir().toPath().resolve(".gradle").resolve("reaper").resolve("build-" + getProcessId("xx"));
ReaperService service = project.getExtensions()
.create("reaper", ReaperService.class, project, project.getBuildDir().toPath(), inputDir);
project.getGradle().buildFinished(result -> service.shutdown());
}
private static String getProcessId(final String fallback) {
// Note: may fail in some JVM implementations
// therefore fallback has to be provided
// something like '<pid>@<hostname>', at least in SUN / Oracle JVMs
final String jvmName = ManagementFactory.getRuntimeMXBean().getName();
final int index = jvmName.indexOf('@');
if (index < 1) {
// part before '@' empty (index = 0) / '@' not found (index = -1)
return fallback;
}
try {
return Long.toString(Long.parseLong(jvmName.substring(0, index)));
} catch (NumberFormatException e) {
// ignore
}
return fallback;
}
}

View File

@ -179,7 +179,11 @@ public class ReaperService {
InputStream jarInput = this.getClass().getResourceAsStream("/META-INF/reaper.jar");
) {
logger.info("Copying reaper.jar...");
jarInput.transferTo(out);
byte[] buffer = new byte[4096];
int len;
while ((len = jarInput.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}

View File

@ -49,6 +49,7 @@ import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@ -280,7 +281,7 @@ public abstract class DockerSupportService implements BuildService<DockerSupport
*/
private Optional<String> getDockerPath() {
// Check if the Docker binary exists
return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst();
return Arrays.asList(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst();
}
/**
@ -291,7 +292,7 @@ public abstract class DockerSupportService implements BuildService<DockerSupport
*/
private Optional<String> getDockerComposePath() {
// Check if the Docker binary exists
return List.of(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst();
return Arrays.asList(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst();
}
private void throwDockerRequiredException(final String message) {

View File

@ -109,7 +109,7 @@ public class InternalBwcGitPlugin implements Plugin<Project> {
});
TaskProvider<LoggedExec> fetchLatestTaskProvider = tasks.register("fetchLatest", LoggedExec.class, fetchLatest -> {
var gitFetchLatest = project.getProviders()
Provider<Object> gitFetchLatest = project.getProviders()
.systemProperty("tests.bwc.git_fetch_latest")
.forUseAtConfigurationTime()
.orElse("true")
@ -122,7 +122,7 @@ public class InternalBwcGitPlugin implements Plugin<Project> {
}
throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + fetchProp + "]");
});
fetchLatest.onlyIf(t -> project.getGradle().getStartParameter().isOffline() == false && gitFetchLatest.get());
fetchLatest.onlyIf(t -> project.getGradle().getStartParameter().isOffline() == false && gitFetchLatest.get() != null);
fetchLatest.dependsOn(addRemoteTaskProvider);
fetchLatest.setWorkingDir(gitExtension.getCheckoutDir().get());
fetchLatest.setCommandLine(asList("git", "fetch", "--all"));

View File

@ -39,6 +39,7 @@ import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.type.ArtifactTypeDefinition;
import org.gradle.api.plugins.BasePlugin;
import org.gradle.api.logging.Logger;
@ -46,6 +47,7 @@ import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.AbstractCopyTask;
import org.gradle.api.tasks.Sync;
import org.gradle.api.tasks.TaskContainer;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.bundling.AbstractArchiveTask;
import org.gradle.api.tasks.bundling.Compression;
import org.gradle.api.tasks.bundling.Zip;
@ -105,14 +107,18 @@ public class InternalDistributionArchiveSetupPlugin implements Plugin<Project> {
private void registerAndConfigureDistributionArchivesExtension(Project project) {
container = project.container(DistributionArchive.class, name -> {
var subProjectDir = archiveToSubprojectName(name);
var copyDistributionTaskName = "build" + capitalize(name.substring(0, name.length() - 3));
String subProjectDir = archiveToSubprojectName(name);
String copyDistributionTaskName = "build" + capitalize(name.substring(0, name.length() - 3));
TaskContainer tasks = project.getTasks();
var explodedDist = tasks.register(copyDistributionTaskName, Sync.class, sync -> sync.into(subProjectDir + "/build/install/"));
TaskProvider<Sync> explodedDist = tasks.register(
copyDistributionTaskName,
Sync.class,
sync -> sync.into(subProjectDir + "/build/install/")
);
explodedDist.configure(configure(name));
var archiveTaskName = "build" + capitalize(name);
String archiveTaskName = "build" + capitalize(name);
var archiveTask = name.endsWith("Tar")
TaskProvider<? extends AbstractArchiveTask> archiveTask = name.endsWith("Tar")
? tasks.register(archiveTaskName, SymbolicLinkPreservingTar.class)
: tasks.register(archiveTaskName, Zip.class);
archiveTask.configure(configure(name));
@ -122,11 +128,11 @@ public class InternalDistributionArchiveSetupPlugin implements Plugin<Project> {
// Each defined distribution archive is linked to a subproject.
// A distribution archive definition not matching a sub project will result in build failure.
container.whenObjectAdded(distributionArchive -> {
var subProjectName = archiveToSubprojectName(distributionArchive.getName());
String subProjectName = archiveToSubprojectName(distributionArchive.getName());
project.project(subProjectName, sub -> {
sub.getPlugins().apply(BasePlugin.class);
sub.getArtifacts().add(DEFAULT_CONFIGURATION_NAME, distributionArchive.getArchiveTask());
var extractedConfiguration = sub.getConfigurations().create("extracted");
Configuration extractedConfiguration = sub.getConfigurations().create("extracted");
extractedConfiguration.setCanBeResolved(false);
extractedConfiguration.getAttributes().attribute(ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE);
sub.getArtifacts().add(EXTRACTED_CONFIGURATION_NAME, distributionArchive.getExpandedDistTask());

View File

@ -48,8 +48,9 @@ import org.gradle.api.tasks.TaskProvider;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin {
@Override
@ -90,14 +91,14 @@ public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin {
// TODO: forbidden apis does not yet support java 15, rethink using runtime version
t.setTargetCompatibility(JavaVersion.VERSION_14.getMajorVersion());
}
t.setBundledSignatures(Set.of("jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out"));
t.setBundledSignatures(new HashSet<>(Arrays.asList("jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out")));
t.setSignaturesFiles(
project.files(
resourcesDir.resolve("forbidden/jdk-signatures.txt"),
resourcesDir.resolve("forbidden/opensearch-all-signatures.txt")
)
);
t.setSuppressAnnotations(Set.of("**.SuppressForbidden"));
t.setSuppressAnnotations(new HashSet<>(Arrays.asList("**.SuppressForbidden")));
if (t.getName().endsWith("Test")) {
t.setSignaturesFiles(
t.getSignaturesFiles()

View File

@ -51,6 +51,7 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
@ -126,14 +127,17 @@ public class ValidateJsonNoKeywordsTask extends DefaultTask {
final JsonNode jsonNode = mapper.readTree(file);
if (jsonNode.isObject() == false) {
errors.put(file, Set.of("Expected an object, but found: " + jsonNode.getNodeType()));
errors.put(file, new HashSet<>(Arrays.asList("Expected an object, but found: " + jsonNode.getNodeType())));
return;
}
final ObjectNode rootNode = (ObjectNode) jsonNode;
if (rootNode.size() != 1) {
errors.put(file, Set.of("Expected an object with exactly 1 key, but found " + rootNode.size() + " keys"));
errors.put(
file,
new HashSet<>(Arrays.asList("Expected an object with exactly 1 key, but found " + rootNode.size() + " keys"))
);
return;
}
@ -148,7 +152,7 @@ public class ValidateJsonNoKeywordsTask extends DefaultTask {
}
}
} catch (IOException e) {
errors.put(file, Set.of("Failed to load file: " + e.getMessage()));
errors.put(file, new HashSet<>(Arrays.asList("Failed to load file: " + e.getMessage())));
}
});

View File

@ -70,6 +70,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.function.Supplier;
import java.util.stream.Stream;
@ -204,7 +205,7 @@ public class DistroTestPlugin implements Plugin<Project> {
vmDependencies
);
} else {
for (var entry : linuxTestTasks.entrySet()) {
for (Entry<OpenSearchDistribution.Type, List<TaskProvider<Test>>> entry : linuxTestTasks.entrySet()) {
OpenSearchDistribution.Type type = entry.getKey();
TaskProvider<?> vmLifecycleTask = vmLifecyleTasks.get(type);
configureVMWrapperTasks(vmProject, entry.getValue(), depsTasks, wrapperTask -> {
@ -227,7 +228,7 @@ public class DistroTestPlugin implements Plugin<Project> {
}, vmDependencies);
}
for (var entry : upgradeTestTasks.entrySet()) {
for (Entry<String, List<TaskProvider<Test>>> entry : upgradeTestTasks.entrySet()) {
String version = entry.getKey();
TaskProvider<?> vmVersionTask = vmVersionTasks.get(version);
configureVMWrapperTasks(
@ -321,7 +322,12 @@ public class DistroTestPlugin implements Plugin<Project> {
private static Configuration configureExamplePlugin(Project project) {
Configuration examplePlugin = project.getConfigurations().create(EXAMPLE_PLUGIN_CONFIGURATION);
DependencyHandler deps = project.getDependencies();
Map<String, String> examplePluginProject = Map.of("path", ":example-plugins:custom-settings", "configuration", "zip");
Map<String, String> examplePluginProject = new HashMap<String, String>() {
{
put("path", ":example-plugins:custom-settings");
put("configuration", "zip");
}
};
deps.add(EXAMPLE_PLUGIN_CONFIGURATION, deps.project(examplePluginProject));
return examplePlugin;
}

View File

@ -164,7 +164,7 @@ public class CopyRestApiTask extends DefaultTask {
getFileSystemOperations().copy(c -> {
c.from(getArchiveOperations().zipTree(coreConfig.getSingleFile()));
// this ends up as the same dir as outputDir
c.into(Objects.requireNonNull(getSourceSet().orElseThrow().getOutput().getResourcesDir()));
c.into(Objects.requireNonNull(getSourceSet().get().getOutput().getResourcesDir()));
if (includeCore.get().isEmpty()) {
c.include(REST_API_PREFIX + "/**");
} else {

View File

@ -155,7 +155,7 @@ public class CopyRestTestsTask extends DefaultTask {
getFileSystemOperations().copy(c -> {
c.from(getArchiveOperations().zipTree(coreConfig.getSingleFile()));
// this ends up as the same dir as outputDir
c.into(Objects.requireNonNull(getSourceSet().orElseThrow().getOutput().getResourcesDir()));
c.into(Objects.requireNonNull(getSourceSet().get().getOutput().getResourcesDir()));
c.include(
includeCore.get().stream().map(prefix -> REST_TEST_PREFIX + "/" + prefix + "*/**").collect(Collectors.toList())
);

View File

@ -41,7 +41,7 @@ import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.SourceSet;
import org.gradle.api.tasks.SourceSetContainer;
import java.util.Map;
import java.util.HashMap;
/**
* <p>
@ -88,8 +88,12 @@ public class RestResourcesPlugin implements Plugin<Project> {
task.sourceSetName = SourceSet.TEST_SOURCE_SET_NAME;
if (BuildParams.isInternal()) {
// core
Dependency restTestdependency = project.getDependencies()
.project(Map.of("path", ":rest-api-spec", "configuration", "restTests"));
Dependency restTestdependency = project.getDependencies().project(new HashMap<String, String>() {
{
put("path", ":rest-api-spec");
put("configuration", "restTests");
}
});
project.getDependencies().add(task.coreConfig.getName(), restTestdependency);
} else {
Dependency dependency = project.getDependencies()
@ -109,8 +113,12 @@ public class RestResourcesPlugin implements Plugin<Project> {
task.coreConfig = specConfig;
task.sourceSetName = SourceSet.TEST_SOURCE_SET_NAME;
if (BuildParams.isInternal()) {
Dependency restSpecDependency = project.getDependencies()
.project(Map.of("path", ":rest-api-spec", "configuration", "restSpecs"));
Dependency restSpecDependency = project.getDependencies().project(new HashMap<String, String>() {
{
put("path", ":rest-api-spec");
put("configuration", "restSpecs");
}
});
project.getDependencies().add(task.coreConfig.getName(), restSpecDependency);
} else {
Dependency dependency = project.getDependencies()

View File

@ -93,9 +93,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import java.util.function.Function;
@ -385,8 +383,12 @@ public class OpenSearchNode implements TestClusterConfiguration {
private Provider<RegularFile> maybeCreatePluginOrModuleDependency(String path) {
Configuration configuration = pluginAndModuleConfigurations.computeIfAbsent(
path,
key -> project.getConfigurations()
.detachedConfiguration(project.getDependencies().project(Map.of("path", path, "configuration", "zip")))
key -> project.getConfigurations().detachedConfiguration(project.getDependencies().project(new HashMap<String, String>() {
{
put("path", path);
put("configuration", "zip");
}
}))
);
Provider<File> fileProvider = configuration.getElements()
.map(
@ -679,10 +681,6 @@ public class OpenSearchNode implements TestClusterConfiguration {
setting("node.attr.upgraded", "true");
}
private boolean isSettingTrue(String name) {
return Boolean.valueOf(settings.getOrDefault(name, "false").toString());
}
private void copyExtraConfigFiles() {
if (extraConfigFiles.isEmpty() == false) {
logToProcessStdout("Setting up " + extraConfigFiles.size() + " additional config files");
@ -910,7 +908,7 @@ public class OpenSearchNode implements TestClusterConfiguration {
} catch (IOException e) {
throw new TestClustersException("Failed to start " + currentConfig.command + " process for " + this, e);
}
reaper.registerPid(toString(), opensearchProcess.pid());
// reaper.registerPid(toString(), opensearchProcess.pid());
}
@Internal
@ -977,7 +975,7 @@ public class OpenSearchNode implements TestClusterConfiguration {
LOGGER.info("Stopping `{}`, tailLogs: {}", this, tailLogs);
requireNonNull(opensearchProcess, "Can't stop `" + this + "` as it was not started or already stopped.");
// Test clusters are not reused, don't spend time on a graceful shutdown
stopHandle(opensearchProcess.toHandle(), true);
stopHandle(opensearchProcess, true);
reaper.unregister(toString());
if (tailLogs) {
logFileContents("Standard output of node", currentConfig.stdoutFile);
@ -1002,9 +1000,9 @@ public class OpenSearchNode implements TestClusterConfiguration {
this.nameCustomization = nameCustomizer;
}
private void stopHandle(ProcessHandle processHandle, boolean forcibly) {
private void stopHandle(Process process, boolean forcibly) {
// No-op if the process has already exited by itself.
if (processHandle.isAlive() == false) {
if (process.isAlive() == false) {
LOGGER.info("Process was not running when we tried to terminate it.");
return;
}
@ -1013,19 +1011,19 @@ public class OpenSearchNode implements TestClusterConfiguration {
// they'll be recorded as having failed and won't restart when the cluster restarts.
// ES could actually be a child when there's some wrapper process like on Windows,
// and in that case the ML processes will be grandchildren of the wrapper.
List<ProcessHandle> children = processHandle.children().collect(Collectors.toList());
// List<Process> children = process.children().collect(Collectors.toList());
try {
logProcessInfo(
"Terminating " + currentConfig.command + " process" + (forcibly ? " forcibly " : "gracefully") + ":",
processHandle.info()
);
// logProcessInfo(
// "Terminating " + currentConfig.command + " process" + (forcibly ? " forcibly " : "gracefully") + ":",
// process.info()
// );
if (forcibly) {
processHandle.destroyForcibly();
process.destroyForcibly();
} else {
processHandle.destroy();
waitForProcessToExit(processHandle);
if (processHandle.isAlive() == false) {
process.destroy();
waitForProcessToExit(process);
if (process.isAlive() == false) {
return;
}
LOGGER.info(
@ -1033,19 +1031,25 @@ public class OpenSearchNode implements TestClusterConfiguration {
OPENSEARCH_DESTROY_TIMEOUT,
OPENSEARCH_DESTROY_TIMEOUT_UNIT
);
processHandle.destroyForcibly();
process.destroyForcibly();
}
waitForProcessToExit(processHandle);
if (processHandle.isAlive()) {
waitForProcessToExit(process);
if (process.isAlive()) {
throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this);
}
} finally {
children.forEach(each -> stopHandle(each, forcibly));
// children.forEach(each -> stopHandle(each, forcibly));
}
// waitForProcessToExit(process);
// if (process.isAlive()) {
// throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this);
// }
}
private void logProcessInfo(String prefix, ProcessHandle.Info info) {
/*
private void logProcessInfo(String prefix, Process info) {
LOGGER.info(
prefix + " commandLine:`{}` command:`{}` args:`{}`",
info.commandLine().orElse("-"),
@ -1053,6 +1057,7 @@ public class OpenSearchNode implements TestClusterConfiguration {
Arrays.stream(info.arguments().orElse(new String[] {})).map(each -> "'" + each + "'").collect(Collectors.joining(" "))
);
}
*/
private void logFileContents(String description, Path from) {
final Map<String, Integer> errorsAndWarnings = new LinkedHashMap<>();
@ -1121,16 +1126,14 @@ public class OpenSearchNode implements TestClusterConfiguration {
return line;
}
private void waitForProcessToExit(ProcessHandle processHandle) {
private void waitForProcessToExit(Process process) {
try {
processHandle.onExit().get(OPENSEARCH_DESTROY_TIMEOUT, OPENSEARCH_DESTROY_TIMEOUT_UNIT);
process.waitFor(OPENSEARCH_DESTROY_TIMEOUT, OPENSEARCH_DESTROY_TIMEOUT_UNIT);
} catch (InterruptedException e) {
LOGGER.info("Interrupted while waiting for {} process", currentConfig.command, e);
Thread.currentThread().interrupt();
} catch (ExecutionException e) {
} catch (NullPointerException e) {
LOGGER.info("Failure while waiting for process to exist", e);
} catch (TimeoutException e) {
LOGGER.info("Timed out waiting for process to exit", e);
}
}

View File

@ -172,11 +172,9 @@ public interface TestClusterConfiguration {
} else {
String extraCause = "";
Throwable cause = lastException;
int ident = 2;
while (cause != null) {
if (cause.getMessage() != null && cause.getMessage().isEmpty() == false) {
extraCause += "\n" + " " + cause.getMessage();
ident += 2;
}
cause = cause.getCause();
}

View File

@ -64,9 +64,9 @@ import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collections;
import java.util.function.BiConsumer;
import java.util.List;
import java.util.Optional;
public class TestFixturesPlugin implements Plugin<Project> {
@ -162,7 +162,7 @@ public class TestFixturesPlugin implements Plugin<Project> {
final Integer timeout = ext.has("dockerComposeHttpTimeout") ? (Integer) ext.get("dockerComposeHttpTimeout") : 120;
composeExtension.getEnvironment().put("COMPOSE_HTTP_TIMEOUT", timeout);
Optional<String> dockerCompose = List.of(DOCKER_COMPOSE_BINARIES)
Optional<String> dockerCompose = Arrays.asList(DOCKER_COMPOSE_BINARIES)
.stream()
.filter(path -> project.file(path).exists())
.findFirst();

View File

@ -84,7 +84,11 @@ public abstract class SymbolicLinkPreservingUntarTransform implements UnpackTran
// copy the file from the archive using a small buffer to avoid heaping
Files.createFile(destination);
try (FileOutputStream fos = new FileOutputStream(destination.toFile())) {
tar.transferTo(fos);
byte[] buffer = new byte[4096];
int len;
while ((len = tar.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
}
}
if (entry.isSymbolicLink() == false) {
@ -94,6 +98,5 @@ public abstract class SymbolicLinkPreservingUntarTransform implements UnpackTran
entry = tar.getNextTarEntry();
}
}
}
}

View File

@ -55,6 +55,7 @@ import org.gradle.plugins.ide.idea.model.IdeaModel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -168,7 +169,11 @@ public abstract class GradleUtils {
project.getPluginManager().withPlugin("idea", p -> {
IdeaModel idea = project.getExtensions().getByType(IdeaModel.class);
idea.getModule().setTestSourceDirs(testSourceSet.getJava().getSrcDirs());
idea.getModule().getScopes().put(testSourceSet.getName(), Map.of("plus", List.of(runtimeClasspathConfiguration)));
idea.getModule().getScopes().put(testSourceSet.getName(), new HashMap<String, Collection<Configuration>>() {
{
put("plus", Arrays.asList(runtimeClasspathConfiguration));
}
});
});
project.getPluginManager().withPlugin("eclipse", p -> {
EclipseModel eclipse = project.getExtensions().getByType(EclipseModel.class);

View File

@ -42,7 +42,6 @@ import org.gradle.api.execution.TaskExecutionListener;
import org.gradle.api.tasks.TaskState;
import java.io.ByteArrayOutputStream;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.function.Consumer;
import java.util.regex.Matcher;
@ -102,7 +101,7 @@ public class VagrantBasePlugin implements Plugin<Project> {
spec.setCommandLine(tool, "--version");
spec.setStandardOutput(pipe);
});
String output = pipe.toString(StandardCharsets.UTF_8).trim();
String output = pipe.toString().trim();
Matcher matcher = versionRegex.matcher(output);
if (matcher.find() == false) {
throw new IllegalStateException(

View File

@ -33,6 +33,7 @@ package org.opensearch.gradle.docker;
import org.opensearch.gradle.test.GradleIntegrationTestCase;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -44,7 +45,7 @@ import static org.hamcrest.CoreMatchers.equalTo;
public class DockerSupportServiceTests extends GradleIntegrationTestCase {
public void testParseOsReleaseOnOracle() {
final List<String> lines = List.of(
final List<String> lines = Arrays.asList(
"NAME=\"Oracle Linux Server\"",
"VERSION=\"6.10\"",
"ID=\"ol\"",
@ -85,11 +86,15 @@ public class DockerSupportServiceTests extends GradleIntegrationTestCase {
* Trailing whitespace should be removed
*/
public void testRemoveTrailingWhitespace() {
final List<String> lines = List.of("NAME=\"Oracle Linux Server\" ");
final List<String> lines = Arrays.asList("NAME=\"Oracle Linux Server\" ");
final Map<String, String> results = parseOsRelease(lines);
final Map<String, String> expected = Map.of("NAME", "oracle linux server");
final Map<String, String> expected = new HashMap<String, String>() {
{
put("NAME", "oracle linux server");
}
};
assertThat(expected, equalTo(results));
}
@ -98,11 +103,15 @@ public class DockerSupportServiceTests extends GradleIntegrationTestCase {
* Comments should be removed
*/
public void testRemoveComments() {
final List<String> lines = List.of("# A comment", "NAME=\"Oracle Linux Server\"");
final List<String> lines = Arrays.asList("# A comment", "NAME=\"Oracle Linux Server\"");
final Map<String, String> results = parseOsRelease(lines);
final Map<String, String> expected = Map.of("NAME", "oracle linux server");
final Map<String, String> expected = new HashMap<String, String>() {
{
put("NAME", "oracle linux server");
}
};
assertThat(expected, equalTo(results));
}