Gradle build: cleanup of dependency resolution and consolidation of dependency versions (#13484)

This commit is contained in:
Dawid Weiss 2024-06-17 09:49:21 +02:00 committed by GitHub
parent 8f50976c26
commit dc287862dd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
72 changed files with 2276 additions and 1048 deletions

View File

@ -15,30 +15,50 @@
* limitations under the License.
*/
plugins {
id "java-gradle-plugin"
alias(deps.plugins.spotless) apply false
alias(deps.plugins.forbiddenapis) apply false
}
repositories {
mavenCentral()
}
ext {
// Minimum Java version required to compile buildSrc.
minJavaVersion = JavaVersion.VERSION_21
}
group = "org.apache"
// Make sure the build environment is consistent.
apply from: file('../gradle/validation/check-environment.gradle')
apply from: file('../../gradle/conventions.gradle')
apply from: file('../../gradle/validation/check-environment.gradle')
// Load common buildSrc and script deps.
apply from: file("scriptDepVersions.gradle")
// Add spotless/ tidy.
tasks.register("checkJdkInternalsExportedToGradle") {}
apply from: file('../../gradle/validation/spotless.gradle')
apply from: file('../../gradle/validation/forbidden-apis.gradle')
java {
sourceCompatibility = JavaVersion.toVersion(deps.versions.minJava.get())
targetCompatibility = JavaVersion.toVersion(deps.versions.minJava.get())
}
gradlePlugin {
automatedPublishing = false
plugins {
buildInfra {
id = 'lucene.build-infra'
implementationClass = 'org.apache.lucene.gradle.buildinfra.BuildInfraPlugin'
}
}
}
dependencies {
implementation gradleApi()
implementation localGroovy()
implementation "commons-codec:commons-codec:${scriptDepVersions['commons-codec']}"
implementation deps.commons.codec
}
if (!rootProject.hasJavaFlightRecorder) {
if (!hasJavaFlightRecorder) {
logger.warn('Module jdk.jfr is not available; skipping compilation of Java Flight Recorder support.')
tasks.named('compileJava').configure {
exclude('**/ProfileResults.java')

View File

@ -15,18 +15,12 @@
* limitations under the License.
*/
plugins {
id 'java-library'
}
version = "1.0.0-SNAPSHOT"
group = "org.apache.lucene.tools"
description = 'Doclet-based javadoc validation'
sourceCompatibility = JavaVersion.VERSION_21
targetCompatibility = JavaVersion.VERSION_21
tasks.withType(JavaCompile) {
options.compilerArgs += ["--release", targetCompatibility.toString()]
options.encoding = "UTF-8"
rootProject.name = 'build-infra'
dependencyResolutionManagement {
versionCatalogs {
deps {
from(files('../../versions.toml'))
}
}
}

View File

@ -27,6 +27,11 @@
package org.apache.lucene.gradle;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Locale;
import org.apache.commons.codec.digest.DigestUtils;
import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
@ -39,16 +44,10 @@ import org.gradle.api.tasks.TaskAction;
import org.gradle.work.Incremental;
import org.gradle.work.InputChanges;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Locale;
public class Checksum extends DefaultTask {
private FileCollection files;
private File outputDir;
private Algorithm algorithm;
private Algorithm algorithm = Algorithm.SHA512;
public enum Algorithm {
MD5(new DigestUtils(DigestUtils.getMd5Digest())),
@ -190,6 +189,8 @@ public class Checksum extends DefaultTask {
private FileCollection filesFor(final Algorithm algo) {
return getProject()
.fileTree(getOutputDir(), files -> files.include("**/*." + algo.toString().toLowerCase(Locale.ROOT)));
.fileTree(
getOutputDir(),
files -> files.include("**/*." + algo.toString().toLowerCase(Locale.ROOT)));
}
}

View File

@ -0,0 +1,288 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.gradle;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
import org.gradle.api.internal.tasks.testing.logging.FullExceptionFormatter;
import org.gradle.api.internal.tasks.testing.logging.TestExceptionFormatter;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.testing.TestDescriptor;
import org.gradle.api.tasks.testing.TestListener;
import org.gradle.api.tasks.testing.TestOutputEvent;
import org.gradle.api.tasks.testing.TestOutputListener;
import org.gradle.api.tasks.testing.TestResult;
import org.gradle.api.tasks.testing.logging.TestLogging;
/**
* An error reporting listener that queues test output streams and displays them on failure.
*
* <p>Heavily inspired by Elasticsearch's ErrorReportingTestListener (ASL 2.0 licensed).
*/
public class ErrorReportingTestListener implements TestOutputListener, TestListener {
private static final Logger LOGGER = Logging.getLogger(ErrorReportingTestListener.class);
private final TestExceptionFormatter formatter;
private final Map<TestKey, OutputHandler> outputHandlers = new ConcurrentHashMap<>();
private final Path spillDir;
private final Path outputsDir;
private final boolean verboseMode;
public ErrorReportingTestListener(
TestLogging testLogging, Path spillDir, Path outputsDir, boolean verboseMode) {
this.formatter = new FullExceptionFormatter(testLogging);
this.spillDir = spillDir;
this.outputsDir = outputsDir;
this.verboseMode = verboseMode;
}
@Override
public void onOutput(TestDescriptor testDescriptor, TestOutputEvent outputEvent) {
handlerFor(testDescriptor).write(outputEvent);
}
@Override
public void beforeSuite(TestDescriptor suite) {
// noop.
}
@Override
public void beforeTest(TestDescriptor testDescriptor) {
// Noop.
}
@Override
public void afterSuite(final TestDescriptor suite, TestResult result) {
if (suite.getParent() == null || suite.getName().startsWith("Gradle")) {
return;
}
TestKey key = TestKey.of(suite);
try {
OutputHandler outputHandler = outputHandlers.get(key);
if (outputHandler != null) {
long length = outputHandler.length();
if (length > 1024 * 1024 * 10) {
LOGGER.warn(
String.format(
Locale.ROOT,
"WARNING: Test %s wrote %,d bytes of output.",
suite.getName(),
length));
}
}
boolean echoOutput = Objects.equals(result.getResultType(), TestResult.ResultType.FAILURE);
boolean dumpOutput = echoOutput;
// If the test suite failed, report output.
if (dumpOutput || echoOutput) {
Files.createDirectories(outputsDir);
Path outputLog = outputsDir.resolve(getOutputLogName(suite));
// Save the output of a failing test to disk.
try (Writer w = Files.newBufferedWriter(outputLog, StandardCharsets.UTF_8)) {
if (outputHandler != null) {
outputHandler.copyTo(w);
}
}
if (echoOutput && !verboseMode) {
synchronized (this) {
System.out.println();
System.out.println(
suite.getClassName()
+ " > test suite's output saved to "
+ outputLog
+ ", copied below:");
try (BufferedReader reader =
Files.newBufferedReader(outputLog, StandardCharsets.UTF_8)) {
char[] buf = new char[1024];
int len;
while ((len = reader.read(buf)) >= 0) {
System.out.print(new String(buf, 0, len));
}
System.out.println();
}
}
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
} finally {
OutputHandler handler = outputHandlers.remove(key);
if (handler != null) {
try {
handler.close();
} catch (IOException e) {
LOGGER.error("Failed to close output handler for: " + key, e);
}
}
}
}
private static Pattern SANITIZE = Pattern.compile("[^a-zA-Z .\\-_0-9]+");
public static String getOutputLogName(TestDescriptor suite) {
return SANITIZE.matcher("OUTPUT-" + suite.getName() + ".txt").replaceAll("_");
}
@Override
public void afterTest(TestDescriptor testDescriptor, TestResult result) {
// Include test failure exception stacktrace(s) in test output log.
if (result.getResultType() == TestResult.ResultType.FAILURE) {
if (result.getExceptions().size() > 0) {
String message = formatter.format(testDescriptor, result.getExceptions());
handlerFor(testDescriptor).write(message);
}
}
}
private OutputHandler handlerFor(TestDescriptor descriptor) {
// Attach output of leaves (individual tests) to their parent.
if (!descriptor.isComposite()) {
descriptor = descriptor.getParent();
}
return outputHandlers.computeIfAbsent(TestKey.of(descriptor), (key) -> new OutputHandler());
}
public static class TestKey {
private final String key;
private TestKey(String key) {
this.key = key;
}
public static TestKey of(TestDescriptor d) {
StringBuilder key = new StringBuilder();
key.append(d.getClassName());
key.append("::");
key.append(d.getName());
key.append("::");
key.append(d.getParent() == null ? "-" : d.getParent().toString());
return new TestKey(key.toString());
}
@Override
public boolean equals(Object o) {
return o != null && o.getClass() == this.getClass() && Objects.equals(((TestKey) o).key, key);
}
@Override
public int hashCode() {
return key.hashCode();
}
@Override
public String toString() {
return key;
}
}
private class OutputHandler implements Closeable {
// Max single-line buffer before automatic wrap occurs.
private static final int MAX_LINE_WIDTH = 1024 * 4;
private final SpillWriter buffer;
// internal stream.
private final PrefixedWriter sint;
// stdout
private final PrefixedWriter sout;
// stderr
private final PrefixedWriter serr;
// last used stream (so that we can flush it properly and prefixes are not screwed up).
private PrefixedWriter last;
public OutputHandler() {
buffer =
new SpillWriter(
() -> {
try {
return Files.createTempFile(spillDir, "spill-", ".tmp");
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
Writer sink = buffer;
if (verboseMode) {
sink = new StdOutTeeWriter(buffer);
}
sint = new PrefixedWriter(" > ", sink, MAX_LINE_WIDTH);
sout = new PrefixedWriter(" 1> ", sink, MAX_LINE_WIDTH);
serr = new PrefixedWriter(" 2> ", sink, MAX_LINE_WIDTH);
last = sint;
}
public void write(TestOutputEvent event) {
write(
(event.getDestination() == TestOutputEvent.Destination.StdOut ? sout : serr),
event.getMessage());
}
public void write(String message) {
write(sint, message);
}
public long length() throws IOException {
return buffer.length();
}
private void write(PrefixedWriter out, String message) {
try {
if (out != last) {
last.completeLine();
last = out;
}
out.write(message);
} catch (IOException e) {
throw new UncheckedIOException("Unable to write to test output.", e);
}
}
public void copyTo(Writer out) throws IOException {
flush();
buffer.copyTo(out);
}
public void flush() throws IOException {
sout.completeLine();
serr.completeLine();
buffer.flush();
}
@Override
public void close() throws IOException {
buffer.close();
}
}
}

View File

@ -67,6 +67,6 @@ public class GradlePropertiesGenerator {
fileContent = fileContent.replace(entry.getKey(), String.valueOf(entry.getValue()));
}
Files.writeString(
destination, fileContent, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW);
destination, fileContent, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW);
}
}

View File

@ -20,12 +20,13 @@ import java.io.IOException;
import java.io.Writer;
/**
* Prefixes every new line with a given string, synchronizing multiple streams to emit consistent lines.
* Prefixes every new line with a given string, synchronizing multiple streams to emit consistent
* lines.
*/
public class PrefixedWriter extends Writer {
Writer sink;
private final static char LF = '\n';
private static final char LF = '\n';
private final String prefix;
private final StringBuilder lineBuffer = new StringBuilder();
private final int maxLineLength;
@ -45,7 +46,7 @@ public class PrefixedWriter extends Writer {
sink.write(LF);
lineBuffer.setLength(0);
if (c != LF) {
if (c != LF) {
lineBuffer.append((char) c);
}
} else {
@ -70,9 +71,7 @@ public class PrefixedWriter extends Writer {
throw new UnsupportedOperationException();
}
/**
* Complete the current line (emit LF if not at the start of the line already).
*/
/** Complete the current line (emit LF if not at the start of the line already). */
public void completeLine() throws IOException {
if (lineBuffer.length() > 0) {
write(LF);

View File

@ -20,13 +20,12 @@ package org.apache.lucene.gradle;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.AbstractMap.SimpleEntry;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import jdk.jfr.consumer.RecordedClass;
import jdk.jfr.consumer.RecordedEvent;
import jdk.jfr.consumer.RecordedFrame;
@ -36,15 +35,12 @@ import jdk.jfr.consumer.RecordedThread;
import jdk.jfr.consumer.RecordingFile;
/**
* Processes an array of recording files (from tests), and prints a simple histogram.
* Inspired by the JFR example code.
* Whole stacks are deduplicated (with the default stacksize being 1): you can drill deeper
* by adjusting the parameters.
* Processes an array of recording files (from tests), and prints a simple histogram. Inspired by
* the JFR example code. Whole stacks are deduplicated (with the default stacksize being 1): you can
* drill deeper by adjusting the parameters.
*/
public class ProfileResults {
/**
* Formats a frame to a formatted line. This is deduplicated on!
*/
/** Formats a frame to a formatted line. This is deduplicated on! */
static String frameToString(RecordedFrame frame, boolean lineNumbers) {
StringBuilder builder = new StringBuilder();
RecordedMethod method = frame.getMethod();
@ -84,29 +80,32 @@ public class ProfileResults {
/**
* Driver method, for testing standalone.
*
* <pre>
* java -Dtests.profile.count=5 buildSrc/src/main/java/org/apache/lucene/gradle/ProfileResults.java \
* ./lucene/core/build/tmp/tests-cwd/somefile.jfr ...
* </pre>
*/
public static void main(String[] args) throws IOException {
printReport(Arrays.asList(args),
System.getProperty(MODE_KEY, MODE_DEFAULT),
Integer.parseInt(System.getProperty(STACKSIZE_KEY, STACKSIZE_DEFAULT)),
Integer.parseInt(System.getProperty(COUNT_KEY, COUNT_DEFAULT)),
Boolean.parseBoolean(System.getProperty(LINENUMBERS_KEY, LINENUMBERS_DEFAULT)));
printReport(
Arrays.asList(args),
System.getProperty(MODE_KEY, MODE_DEFAULT),
Integer.parseInt(System.getProperty(STACKSIZE_KEY, STACKSIZE_DEFAULT)),
Integer.parseInt(System.getProperty(COUNT_KEY, COUNT_DEFAULT)),
Boolean.parseBoolean(System.getProperty(LINENUMBERS_KEY, LINENUMBERS_DEFAULT)));
}
/** true if we care about this event */
static boolean isInteresting(String mode, RecordedEvent event) {
String name = event.getEventType().getName();
switch(mode) {
switch (mode) {
case "cpu":
return (name.equals("jdk.ExecutionSample") || name.equals("jdk.NativeMethodSample")) &&
!isGradlePollThread(event.getThread("sampledThread"));
return (name.equals("jdk.ExecutionSample") || name.equals("jdk.NativeMethodSample"))
&& !isGradlePollThread(event.getThread("sampledThread"));
case "heap":
return (name.equals("jdk.ObjectAllocationInNewTLAB") || name.equals("jdk.ObjectAllocationOutsideTLAB")) &&
!isGradlePollThread(event.getThread("eventThread"));
return (name.equals("jdk.ObjectAllocationInNewTLAB")
|| name.equals("jdk.ObjectAllocationOutsideTLAB"))
&& !isGradlePollThread(event.getThread("eventThread"));
default:
throw new UnsupportedOperationException(event.toString());
}
@ -119,7 +118,7 @@ public class ProfileResults {
/** value we accumulate for this event */
static long getValue(RecordedEvent event) {
switch(event.getEventType().getName()) {
switch (event.getEventType().getName()) {
case "jdk.ObjectAllocationInNewTLAB":
return event.getLong("tlabSize");
case "jdk.ObjectAllocationOutsideTLAB":
@ -133,10 +132,10 @@ public class ProfileResults {
}
}
/** format a value, if its huge, we show millions */
/** format a value, if it's huge, we show millions */
static String formatValue(long value) {
if (value > 1_000_000) {
return String.format("%dM", value / 1_000_000);
return String.format(Locale.ROOT, "%dM", value / 1_000_000);
} else {
return Long.toString(value);
}
@ -144,15 +143,17 @@ public class ProfileResults {
/** fixed width used for printing the different columns */
private static final int COLUMN_SIZE = 14;
private static final String COLUMN_PAD = "%-" + COLUMN_SIZE + "s";
private static String pad(String input) {
return String.format(Locale.ROOT, COLUMN_PAD, input);
}
/**
* Process all the JFR files passed in args and print a merged summary.
*/
public static void printReport(List<String> files, String mode, int stacksize, int count, boolean lineNumbers) throws IOException {
/** Process all the JFR files passed in args and print a merged summary. */
public static void printReport(
List<String> files, String mode, int stacksize, int count, boolean lineNumbers)
throws IOException {
if (!"cpu".equals(mode) && !"heap".equals(mode)) {
throw new IllegalArgumentException("tests.profile.mode must be one of (cpu,heap)");
}
@ -178,14 +179,13 @@ public class ProfileResults {
StringBuilder stack = new StringBuilder();
for (int i = 0; i < Math.min(stacksize, trace.getFrames().size()); i++) {
if (stack.length() > 0) {
stack.append("\n")
.append(framePadding)
.append(" at ");
stack.append("\n").append(framePadding).append(" at ");
}
stack.append(frameToString(trace.getFrames().get(i), lineNumbers));
}
String line = stack.toString();
SimpleEntry<String,Long> entry = histogram.computeIfAbsent(line, u -> new SimpleEntry<String, Long>(line, 0L));
SimpleEntry<String, Long> entry =
histogram.computeIfAbsent(line, u -> new SimpleEntry<String, Long>(line, 0L));
long value = getValue(event);
entry.setValue(entry.getValue() + value);
totalEvents++;
@ -195,12 +195,20 @@ public class ProfileResults {
}
}
// print summary from histogram
System.out.printf(Locale.ROOT, "PROFILE SUMMARY from %d events (total: %s)\n", totalEvents, formatValue(sumValues));
System.out.printf(
Locale.ROOT,
"PROFILE SUMMARY from %d events (total: %s)\n",
totalEvents,
formatValue(sumValues));
System.out.printf(Locale.ROOT, " tests.profile.mode=%s\n", mode);
System.out.printf(Locale.ROOT, " tests.profile.count=%d\n", count);
System.out.printf(Locale.ROOT, " tests.profile.stacksize=%d\n", stacksize);
System.out.printf(Locale.ROOT, " tests.profile.linenumbers=%b\n", lineNumbers);
System.out.printf(Locale.ROOT, "%s%sSTACK\n", pad("PERCENT"), pad(mode.toUpperCase(Locale.ROOT) + " SAMPLES"));
System.out.printf(
Locale.ROOT,
"%s%sSTACK\n",
pad("PERCENT"),
pad(mode.toUpperCase(Locale.ROOT) + " SAMPLES"));
List<SimpleEntry<String, Long>> entries = new ArrayList<>(histogram.values());
entries.sort((u, v) -> v.getValue().compareTo(u.getValue()));
int seen = 0;
@ -208,8 +216,10 @@ public class ProfileResults {
if (seen++ == count) {
break;
}
String percent = String.format("%2.2f%%", 100 * (c.getValue() / (float) sumValues));
System.out.printf(Locale.ROOT, "%s%s%s\n", pad(percent), pad(formatValue(c.getValue())), c.getKey());
String percent =
String.format(Locale.ROOT, "%2.2f%%", 100 * (c.getValue() / (float) sumValues));
System.out.printf(
Locale.ROOT, "%s%s%s\n", pad(percent), pad(formatValue(c.getValue())), c.getKey());
}
}
}

View File

@ -26,7 +26,7 @@ import java.nio.file.Path;
import java.util.function.Supplier;
public class SpillWriter extends Writer {
private final static int MAX_BUFFERED = 2 * 1024;
private static final int MAX_BUFFERED = 2 * 1024;
private final StringWriter buffer = new StringWriter(MAX_BUFFERED);
private final Supplier<Path> spillPathSupplier;

View File

@ -0,0 +1,93 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.gradle;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Writer;
class StdOutTeeWriter extends Writer {
private final Writer delegate;
private final PrintStream out = System.out;
public StdOutTeeWriter(Writer delegate) {
this.delegate = delegate;
}
@Override
public void write(int c) throws IOException {
delegate.write(c);
out.write(c);
}
@Override
public void write(char[] cbuf) throws IOException {
delegate.write(cbuf);
out.print(cbuf);
}
@Override
public void write(String str) throws IOException {
delegate.write(str);
out.print(str);
}
@Override
public void write(String str, int off, int len) throws IOException {
delegate.write(str, off, len);
out.append(str, off, len);
}
@Override
public Writer append(CharSequence csq) throws IOException {
delegate.append(csq);
out.append(csq);
return this;
}
@Override
public Writer append(CharSequence csq, int start, int end) throws IOException {
delegate.append(csq, start, end);
out.append(csq, start, end);
return this;
}
@Override
public Writer append(char c) throws IOException {
delegate.append(c);
out.append(c);
return this;
}
@Override
public void write(char[] cbuf, int off, int len) throws IOException {
delegate.write(cbuf, off, len);
out.print(new String(cbuf, off, len));
}
@Override
public void flush() throws IOException {
delegate.flush();
out.flush();
}
@Override
public void close() throws IOException {
delegate.close();
// Don't close the actual output.
}
}

View File

@ -16,12 +16,18 @@
*/
package org.apache.lucene.gradle;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
@ -31,12 +37,10 @@ import java.security.NoSuchAlgorithmException;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
/**
* Standalone class that can be used to download a gradle-wrapper.jar
* <p>
* Has no dependencies outside of standard java libraries
*
* <p>Has no dependencies outside of standard java libraries
*/
public class WrapperDownloader {
public static void main(String[] args) {
@ -62,13 +66,15 @@ public class WrapperDownloader {
}
public void run(Path destination) throws IOException, NoSuchAlgorithmException {
Path checksumPath = destination.resolveSibling(destination.getFileName().toString() + ".sha256");
Path checksumPath =
destination.resolveSibling(destination.getFileName().toString() + ".sha256");
if (!Files.exists(checksumPath)) {
throw new IOException("Checksum file not found: " + checksumPath);
}
String expectedChecksum = Files.readString(checksumPath, StandardCharsets.UTF_8).trim();
Path versionPath = destination.resolveSibling(destination.getFileName().toString() + ".version");
Path versionPath =
destination.resolveSibling(destination.getFileName().toString() + ".version");
if (!Files.exists(versionPath)) {
throw new IOException("Wrapper version file not found: " + versionPath);
}
@ -87,7 +93,12 @@ public class WrapperDownloader {
}
}
URL url = URI.create("https://raw.githubusercontent.com/gradle/gradle/v" + wrapperVersion + "/gradle/wrapper/gradle-wrapper.jar").toURL();
URL url =
URI.create(
"https://raw.githubusercontent.com/gradle/gradle/v"
+ wrapperVersion
+ "/gradle/wrapper/gradle-wrapper.jar")
.toURL();
System.err.println("Downloading gradle-wrapper.jar from " + url);
// Zero-copy save the jar to a temp file
@ -103,8 +114,9 @@ public class WrapperDownloader {
} catch (IOException e) {
if (retries-- > 0) {
// Retry after a short delay
System.err.println("Error connecting to server: " + e + ", will retry in " + retryDelay + " seconds.");
Thread.sleep(TimeUnit.SECONDS.toMillis(retryDelay));
System.err.println(
"Error connecting to server: " + e + ", will retry in " + retryDelay + " seconds.");
sleep(TimeUnit.SECONDS.toMillis(retryDelay));
continue;
}
}
@ -115,8 +127,13 @@ public class WrapperDownloader {
case HttpURLConnection.HTTP_BAD_GATEWAY:
if (retries-- > 0) {
// Retry after a short delay.
System.err.println("Server returned HTTP " + connection.getResponseCode() + ", will retry in " + retryDelay + " seconds.");
Thread.sleep(TimeUnit.SECONDS.toMillis(retryDelay));
System.err.println(
"Server returned HTTP "
+ connection.getResponseCode()
+ ", will retry in "
+ retryDelay
+ " seconds.");
sleep(TimeUnit.SECONDS.toMillis(retryDelay));
continue;
}
}
@ -126,13 +143,15 @@ public class WrapperDownloader {
}
try (InputStream is = connection.getInputStream();
OutputStream out = Files.newOutputStream(temp)){
OutputStream out = Files.newOutputStream(temp)) {
is.transferTo(out);
}
String checksum = checksum(digest, temp);
if (!checksum.equalsIgnoreCase(expectedChecksum)) {
throw new IOException(String.format(Locale.ROOT,
throw new IOException(
String.format(
Locale.ROOT,
"Checksum mismatch on downloaded gradle-wrapper.jar (was: %s, expected: %s).",
checksum,
expectedChecksum));
@ -141,8 +160,12 @@ public class WrapperDownloader {
Files.move(temp, destination, REPLACE_EXISTING);
temp = null;
} catch (IOException | InterruptedException e) {
throw new IOException("Could not download gradle-wrapper.jar (" +
e.getClass().getSimpleName() + ": " + e.getMessage() + ").");
throw new IOException(
"Could not download gradle-wrapper.jar ("
+ e.getClass().getSimpleName()
+ ": "
+ e.getMessage()
+ ").");
} finally {
if (temp != null) {
Files.deleteIfExists(temp);
@ -150,6 +173,11 @@ public class WrapperDownloader {
}
}
@SuppressForbidden(reason = "Correct use of thread.sleep.")
private static void sleep(long millis) throws InterruptedException {
Thread.sleep(millis);
}
private String checksum(MessageDigest messageDigest, Path path) throws IOException {
try {
char[] hex = "0123456789abcdef".toCharArray();
@ -160,7 +188,15 @@ public class WrapperDownloader {
}
return sb.toString();
} catch (IOException e) {
throw new IOException("Could not compute digest of file: " + path + " (" + e.getMessage() + ")");
throw new IOException(
"Could not compute digest of file: " + path + " (" + e.getMessage() + ")");
}
}
@Retention(RetentionPolicy.CLASS)
@Target({ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE})
public @interface SuppressForbidden {
/** A reason for suppressing should always be given. */
String reason();
}
}

View File

@ -0,0 +1,59 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.gradle.buildinfra;
import java.nio.file.Path;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.lucene.gradle.Checksum;
import org.apache.lucene.gradle.ErrorReportingTestListener;
import org.apache.lucene.gradle.datasets.ExtractReuters;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.tasks.testing.TestDescriptor;
import org.gradle.api.tasks.testing.logging.TestLogging;
public class BuildInfraPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
project.getExtensions().create(BuildInfraExtension.NAME, BuildInfraExtension.class);
}
public static class BuildInfraExtension {
public static final String NAME = "buildinfra";
public ErrorReportingTestListener newErrorReportingTestListener(
TestLogging testLogging, Path spillDir, Path outputsDir, boolean verboseMode) {
return new ErrorReportingTestListener(testLogging, spillDir, outputsDir, verboseMode);
}
public DigestUtils sha1Digest() {
return new DigestUtils(DigestUtils.getSha1Digest());
}
public void extractReuters(String reutersDir, String outputDir) throws Exception {
ExtractReuters.main(new String[] {reutersDir, outputDir});
}
public String getOutputLogName(TestDescriptor suite) {
return ErrorReportingTestListener.getOutputLogName(suite);
}
public Class<?> checksumClass() {
return Checksum.class;
}
}
}

View File

@ -30,8 +30,7 @@ import java.util.regex.Pattern;
import java.util.stream.Stream;
/**
* Split the Reuters SGML documents into Simple Text files containing:
* Title, Date, Dateline, Body
* Split the Reuters SGML documents into Simple Text files containing: Title, Date, Dateline, Body
*/
public class ExtractReuters {
private final Path reutersDir;
@ -67,7 +66,9 @@ public class ExtractReuters {
private static final String[] META_CHARS = {"&", "<", ">", "\"", "'"};
private static final String[] META_CHARS_SERIALIZATIONS = {"&amp;", "&lt;", "&gt;", "&quot;", "&apos;"};
private static final String[] META_CHARS_SERIALIZATIONS = {
"&amp;", "&lt;", "&gt;", "&quot;", "&apos;"
};
/** Override if you wish to change what is extracted */
protected void extractFile(Path sgmFile) throws IOException {

View File

@ -0,0 +1,49 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
plugins {
id 'java-library'
alias(deps.plugins.spotless) apply false
alias(deps.plugins.forbiddenapis) apply false
}
repositories {
mavenCentral()
}
version = "1.0.0-SNAPSHOT"
group = "org.apache.lucene.tools"
description = 'Doclet-based javadoc validation'
// Make sure the build environment is consistent.
apply from: file('../../gradle/conventions.gradle')
apply from: file('../../gradle/validation/check-environment.gradle')
// Add spotless/ tidy.
tasks.register("checkJdkInternalsExportedToGradle") {}
apply from: file('../../gradle/validation/spotless.gradle')
apply from: file('../../gradle/validation/forbidden-apis.gradle')
java {
sourceCompatibility = JavaVersion.toVersion(deps.versions.minJava.get())
targetCompatibility = JavaVersion.toVersion(deps.versions.minJava.get())
}
tasks.withType(JavaCompile).configureEach {
options.compilerArgs += ["--release", java.targetCompatibility.toString()]
options.encoding = "UTF-8"
}

View File

@ -15,3 +15,10 @@
* limitations under the License.
*/
dependencyResolutionManagement {
versionCatalogs {
deps {
from(files('../../versions.toml'))
}
}
}

View File

@ -16,6 +16,9 @@
*/
package org.apache.lucene.missingdoclet;
import com.sun.source.doctree.DocCommentTree;
import com.sun.source.doctree.ParamTree;
import com.sun.source.util.DocTrees;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
@ -24,7 +27,6 @@ import java.util.Locale;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
@ -36,24 +38,19 @@ import javax.lang.model.util.ElementFilter;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Elements.Origin;
import javax.tools.Diagnostic;
import com.sun.source.doctree.DocCommentTree;
import com.sun.source.doctree.ParamTree;
import com.sun.source.util.DocTrees;
import jdk.javadoc.doclet.Doclet;
import jdk.javadoc.doclet.DocletEnvironment;
import jdk.javadoc.doclet.Reporter;
import jdk.javadoc.doclet.StandardDoclet;
/**
* Checks for missing javadocs, where missing also means "only whitespace" or "license header".
* Has option --missing-level (package, class, method, parameter) so that we can improve over time.
* Has option --missing-ignore to ignore individual elements (such as split packages).
* It isn't recursive, just ignores exactly the elements you tell it.
* This should be removed when packaging is fixed to no longer be split across JARs.
* Has option --missing-method to apply "method" level to selected packages (fix one at a time).
* Matches package names exactly: so you'll need to list subpackages separately.
* Checks for missing javadocs, where missing also means "only whitespace" or "license header". Has
* option --missing-level (package, class, method, parameter) so that we can improve over time. Has
* option --missing-ignore to ignore individual elements (such as split packages). It isn't
* recursive, just ignores exactly the elements you tell it. This should be removed when packaging
* is fixed to no longer be split across JARs. Has option --missing-method to apply "method" level
* to selected packages (fix one at a time). Matches package names exactly: so you'll need to list
* subpackages separately.
*/
public class MissingDoclet extends StandardDoclet {
// checks that modules and packages have documentation
@ -71,120 +68,123 @@ public class MissingDoclet extends StandardDoclet {
Elements elementUtils;
Set<String> ignored = Collections.emptySet();
Set<String> methodPackages = Collections.emptySet();
@Override
public Set<Doclet.Option> getSupportedOptions() {
Set<Doclet.Option> options = new HashSet<>(super.getSupportedOptions());
options.add(new Doclet.Option() {
@Override
public int getArgumentCount() {
return 1;
}
options.add(
new Doclet.Option() {
@Override
public int getArgumentCount() {
return 1;
}
@Override
public String getDescription() {
return "level to enforce for missing javadocs: [package, class, method, parameter]";
}
@Override
public String getDescription() {
return "level to enforce for missing javadocs: [package, class, method, parameter]";
}
@Override
public Kind getKind() {
return Option.Kind.STANDARD;
}
@Override
public Kind getKind() {
return Option.Kind.STANDARD;
}
@Override
public List<String> getNames() {
return Collections.singletonList("--missing-level");
}
@Override
public List<String> getNames() {
return Collections.singletonList("--missing-level");
}
@Override
public String getParameters() {
return "level";
}
@Override
public String getParameters() {
return "level";
}
@Override
public boolean process(String option, List<String> arguments) {
switch (arguments.get(0)) {
case "package":
level = PACKAGE;
@Override
public boolean process(String option, List<String> arguments) {
switch (arguments.get(0)) {
case "package":
level = PACKAGE;
return true;
case "class":
level = CLASS;
return true;
case "method":
level = METHOD;
return true;
case "parameter":
level = PARAMETER;
return true;
default:
return false;
}
}
});
options.add(
new Doclet.Option() {
@Override
public int getArgumentCount() {
return 1;
}
@Override
public String getDescription() {
return "comma separated list of element names to ignore (e.g. as a workaround for split packages)";
}
@Override
public Kind getKind() {
return Option.Kind.STANDARD;
}
@Override
public List<String> getNames() {
return Collections.singletonList("--missing-ignore");
}
@Override
public String getParameters() {
return "ignoredNames";
}
@Override
public boolean process(String option, List<String> arguments) {
ignored = new HashSet<>(Arrays.asList(arguments.get(0).split(",")));
return true;
case "class":
level = CLASS;
}
});
options.add(
new Doclet.Option() {
@Override
public int getArgumentCount() {
return 1;
}
@Override
public String getDescription() {
return "comma separated list of packages to check at 'method' level";
}
@Override
public Kind getKind() {
return Option.Kind.STANDARD;
}
@Override
public List<String> getNames() {
return Collections.singletonList("--missing-method");
}
@Override
public String getParameters() {
return "packages";
}
@Override
public boolean process(String option, List<String> arguments) {
methodPackages = new HashSet<>(Arrays.asList(arguments.get(0).split(",")));
return true;
case "method":
level = METHOD;
return true;
case "parameter":
level = PARAMETER;
return true;
default:
return false;
}
}
});
options.add(new Doclet.Option() {
@Override
public int getArgumentCount() {
return 1;
}
@Override
public String getDescription() {
return "comma separated list of element names to ignore (e.g. as a workaround for split packages)";
}
@Override
public Kind getKind() {
return Option.Kind.STANDARD;
}
@Override
public List<String> getNames() {
return Collections.singletonList("--missing-ignore");
}
@Override
public String getParameters() {
return "ignoredNames";
}
@Override
public boolean process(String option, List<String> arguments) {
ignored = new HashSet<>(Arrays.asList(arguments.get(0).split(",")));
return true;
}
});
options.add(new Doclet.Option() {
@Override
public int getArgumentCount() {
return 1;
}
@Override
public String getDescription() {
return "comma separated list of packages to check at 'method' level";
}
@Override
public Kind getKind() {
return Option.Kind.STANDARD;
}
@Override
public List<String> getNames() {
return Collections.singletonList("--missing-method");
}
@Override
public String getParameters() {
return "packages";
}
@Override
public boolean process(String option, List<String> arguments) {
methodPackages = new HashSet<>(Arrays.asList(arguments.get(0).split(",")));
return true;
}
});
}
});
return options;
}
@ -205,10 +205,8 @@ public class MissingDoclet extends StandardDoclet {
return super.run(docEnv);
}
/**
* Returns effective check level for this element
*/
/** Returns effective check level for this element */
private int level(Element element) {
String pkg = elementUtils.getPackageOf(element).getQualifiedName().toString();
if (methodPackages.contains(pkg)) {
@ -217,24 +215,24 @@ public class MissingDoclet extends StandardDoclet {
return level;
}
}
/**
* Check an individual element.
* This checks packages and types from the doctrees.
* It will recursively check methods/fields from encountered types when the level is "method"
/**
* Check an individual element. This checks packages and types from the doctrees. It will
* recursively check methods/fields from encountered types when the level is "method"
*/
private void check(Element element) {
switch(element.getKind()) {
switch (element.getKind()) {
case MODULE:
// don't check the unnamed module, it won't have javadocs
if (!((ModuleElement)element).isUnnamed()) {
if (!((ModuleElement) element).isUnnamed()) {
checkComment(element);
}
break;
case PACKAGE:
checkComment(element);
break;
// class-like elements, check them, then recursively check their children (fields and methods)
// class-like elements, check them, then recursively check their children (fields and
// methods)
case CLASS:
case INTERFACE:
case ENUM:
@ -242,21 +240,24 @@ public class MissingDoclet extends StandardDoclet {
case ANNOTATION_TYPE:
if (level(element) >= CLASS) {
checkComment(element);
if (element instanceof TypeElement te && element.getKind() == ElementKind.RECORD && level(element) >= METHOD) {
if (element instanceof TypeElement te
&& element.getKind() == ElementKind.RECORD
&& level(element) >= METHOD) {
checkRecordParameters(te, docTrees.getDocCommentTree(element));
}
for (var subElement : element.getEnclosedElements()) {
// don't recurse into enclosed types, otherwise we'll double-check since they are already in the included docTree
if (subElement.getKind() == ElementKind.METHOD ||
subElement.getKind() == ElementKind.CONSTRUCTOR ||
subElement.getKind() == ElementKind.FIELD ||
subElement.getKind() == ElementKind.ENUM_CONSTANT) {
// don't recurse into enclosed types, otherwise we'll double-check since they are
// already in the included docTree
if (subElement.getKind() == ElementKind.METHOD
|| subElement.getKind() == ElementKind.CONSTRUCTOR
|| subElement.getKind() == ElementKind.FIELD
|| subElement.getKind() == ElementKind.ENUM_CONSTANT) {
check(subElement);
}
}
}
break;
// method-like elements, check them if we are configured to do so
// method-like elements, check them if we are configured to do so
case METHOD:
case CONSTRUCTOR:
case FIELD:
@ -272,8 +273,8 @@ public class MissingDoclet extends StandardDoclet {
/**
* Return true if the method is synthetic enum (values/valueOf) or record accessor method.
* According to the doctree documentation, the "included" set never includes synthetic/mandated elements.
* UweSays: It should not happen but it happens!
* According to the doctree documentation, the "included" set never includes synthetic/mandated
* elements. UweSays: It should not happen but it happens!
*/
private boolean isSyntheticMethod(Element element) {
// exclude all not explicitely declared methods
@ -293,20 +294,23 @@ public class MissingDoclet extends StandardDoclet {
}
return false;
}
/**
* Checks that an element doesn't have missing javadocs.
* In addition to truly "missing", check that comments aren't solely whitespace (generated by some IDEs),
* that they aren't a license header masquerading as a javadoc comment.
* Checks that an element doesn't have missing javadocs. In addition to truly "missing", check
* that comments aren't solely whitespace (generated by some IDEs), that they aren't a license
* header masquerading as a javadoc comment.
*/
private void checkComment(Element element) {
// sanity check that the element is really "included", because we do some recursion into types
if (!docEnv.isIncluded(element)) {
return;
}
// check that this element isn't on our ignore list. This is only used as a workaround for "split packages".
// ignoring a package isn't recursive (on purpose), we still check all the classes, etc. inside it.
// we just need to cope with the fact package-info.java isn't there because it is split across multiple jars.
// check that this element isn't on our ignore list. This is only used as a workaround for
// "split packages".
// ignoring a package isn't recursive (on purpose), we still check all the classes, etc. inside
// it.
// we just need to cope with the fact package-info.java isn't there because it is split across
// multiple jars.
if (ignored.contains(element.toString())) {
return;
}
@ -319,14 +323,17 @@ public class MissingDoclet extends StandardDoclet {
error(element, "javadocs are missing");
}
} else {
var normalized = tree.getFirstSentence().get(0).toString()
.replace('\u00A0', ' ')
.trim()
.toLowerCase(Locale.ROOT);
var normalized =
tree.getFirstSentence()
.get(0)
.toString()
.replace('\u00A0', ' ')
.trim()
.toLowerCase(Locale.ROOT);
if (normalized.isEmpty()) {
error(element, "blank javadoc comment");
} else if (normalized.startsWith("licensed to the apache software foundation") ||
normalized.startsWith("copyright 2004 the apache software foundation")) {
} else if (normalized.startsWith("licensed to the apache software foundation")
|| normalized.startsWith("copyright 2004 the apache software foundation")) {
error(element, "comment is really a license");
}
}
@ -336,13 +343,15 @@ public class MissingDoclet extends StandardDoclet {
}
private boolean hasInheritedJavadocs(Element element) {
boolean hasOverrides = element.getAnnotationMirrors().stream()
.anyMatch(ann -> ann.getAnnotationType().toString().equals(Override.class.getName()));
boolean hasOverrides =
element.getAnnotationMirrors().stream()
.anyMatch(ann -> ann.getAnnotationType().toString().equals(Override.class.getName()));
if (hasOverrides) {
// If an element has explicit @Overrides annotation, assume it does
// have inherited javadocs somewhere.
// reporter.print(Diagnostic.Kind.NOTE, element, "javadoc empty but @Override declared, skipping.");
// reporter.print(Diagnostic.Kind.NOTE, element, "javadoc empty but @Override declared,
// skipping.");
return true;
}
@ -359,7 +368,8 @@ public class MissingDoclet extends StandardDoclet {
// We could check supMethod for non-empty javadoc here. Don't know if this makes
// sense though as all methods will be verified in the end so it'd fail on the
// top of the hierarchy (if empty) anyway.
// reporter.print(Diagnostic.Kind.NOTE, element, "javadoc empty but method overrides another, skipping.");
// reporter.print(Diagnostic.Kind.NOTE, element, "javadoc empty but method overrides
// another, skipping.");
return true;
}
}
@ -369,15 +379,14 @@ public class MissingDoclet extends StandardDoclet {
return false;
}
/* Find types from which methods in type may inherit javadoc, in the proper order.*/
private Stream<Element> superTypeForInheritDoc(Element type) {
TypeElement clazz = (TypeElement) type;
List<Element> interfaces = clazz.getInterfaces()
.stream()
.filter(tm -> tm.getKind() == TypeKind.DECLARED)
.map(tm -> ((DeclaredType) tm).asElement())
.collect(Collectors.toList());
List<Element> interfaces =
clazz.getInterfaces().stream()
.filter(tm -> tm.getKind() == TypeKind.DECLARED)
.map(tm -> ((DeclaredType) tm).asElement())
.collect(Collectors.toList());
Stream<Element> result = interfaces.stream();
result = Stream.concat(result, interfaces.stream().flatMap(this::superTypeForInheritDoc));
@ -394,12 +403,12 @@ public class MissingDoclet extends StandardDoclet {
/** Returns all {@code @param} parameters we see in the javadocs of the element */
private Set<String> getDocParameters(DocCommentTree tree) {
return Stream.ofNullable(tree)
.flatMap(t -> t.getBlockTags().stream())
.filter(ParamTree.class::isInstance)
.map(tag -> ((ParamTree)tag).getName().getName().toString())
.collect(Collectors.toSet());
.flatMap(t -> t.getBlockTags().stream())
.filter(ParamTree.class::isInstance)
.map(tag -> ((ParamTree) tag).getName().getName().toString())
.collect(Collectors.toSet());
}
/** Checks there is a corresponding "param" tag for each method parameter */
private void checkMethodParameters(ExecutableElement element, DocCommentTree tree) {
// record each @param that we see
@ -412,7 +421,7 @@ public class MissingDoclet extends StandardDoclet {
}
}
}
/** Checks there is a corresponding "param" tag for each record component */
private void checkRecordParameters(TypeElement element, DocCommentTree tree) {
// record each @param that we see
@ -425,7 +434,7 @@ public class MissingDoclet extends StandardDoclet {
}
}
}
/** logs a new error for the particular element */
private void error(Element element, String message) {
var fullMessage = new StringBuilder();

View File

@ -20,13 +20,18 @@ import java.time.format.DateTimeFormatter
plugins {
id "base"
id "com.palantir.consistent-versions" version "2.11.0"
id "org.owasp.dependencycheck" version "7.2.0"
id 'de.thetaphi.forbiddenapis' version '3.7' apply false
id "de.undercouch.download" version "5.2.0" apply false
id "net.ltgt.errorprone" version "3.1.0" apply false
id 'com.diffplug.spotless' version "6.5.2" apply false
id 'org.barfuin.gradle.jacocolog' version "3.1.0" apply false
id "lucene.build-infra"
alias(deps.plugins.dependencychecks)
alias(deps.plugins.spotless) apply false
alias(deps.plugins.benmanes.versions)
alias(deps.plugins.forbiddenapis) apply false
alias(deps.plugins.versionCatalogUpdate) apply false
alias(deps.plugins.randomizedtesting) apply false
alias(deps.plugins.owasp.dependencycheck)
alias(deps.plugins.undercouch.download) apply false
alias(deps.plugins.errorprone) apply false
alias(deps.plugins.jacocolog) apply false
}
apply from: file('gradle/globals.gradle')
@ -73,7 +78,7 @@ ext {
}
// Minimum Java version required to compile and run Lucene.
minJavaVersion = JavaVersion.VERSION_21
minJavaVersion = JavaVersion.toVersion(deps.versions.minJava.get())
// snapshot build marker used in scripts.
snapshotBuild = version.contains("SNAPSHOT")
@ -98,17 +103,15 @@ configurations {
dependencies {
// Use a newer groovy that doesn't have illegal reflective accesses.
groovy "org.codehaus.groovy:groovy-all:3.0.21"
groovy deps.groovy
}
apply from: file('buildSrc/scriptDepVersions.gradle')
// Include smaller chunks configuring dedicated build areas.
// Some of these intersect or add additional functionality.
// The order of inclusion of these files shouldn't matter (but may
// if the build file is incorrectly written and evaluates something
// eagerly).
apply from: file('gradle/conventions.gradle')
apply from: file('gradle/generation/local-settings.gradle')
// Make sure the build environment is consistent.
@ -140,15 +143,25 @@ apply from: file('gradle/validation/precommit.gradle')
apply from: file('gradle/validation/forbidden-apis.gradle')
apply from: file('gradle/validation/jar-checks.gradle')
apply from: file('gradle/validation/git-status.gradle')
apply from: file('gradle/validation/versions-props-sorted.gradle')
apply from: file('gradle/validation/validate-source-patterns.gradle')
apply from: file('gradle/validation/rat-sources.gradle')
apply from: file('gradle/validation/owasp-dependency-check.gradle')
apply from: file('gradle/validation/ecj-lint.gradle')
apply from: file('gradle/validation/gradlew-scripts-tweaked.gradle')
apply from: file('gradle/validation/dependencies.gradle')
apply from: file('gradle/validation/spotless.gradle')
// Wire up included builds to some validation tasks.
rootProject.tasks.named("tidy").configure {
dependsOn gradle.includedBuilds*.task(":tidy")
}
rootProject.tasks.named("clean").configure {
dependsOn gradle.includedBuilds*.task(":clean")
}
rootProject.tasks.named("check").configure {
dependsOn gradle.includedBuilds*.task(":forbiddenApis")
}
// Source or data regeneration tasks
apply from: file('gradle/generation/regenerate.gradle')
apply from: file('gradle/generation/jflex.gradle')

View File

@ -1,279 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.gradle;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
import org.gradle.api.internal.tasks.testing.logging.FullExceptionFormatter;
import org.gradle.api.internal.tasks.testing.logging.TestExceptionFormatter;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.testing.TestDescriptor;
import org.gradle.api.tasks.testing.TestListener;
import org.gradle.api.tasks.testing.TestOutputEvent;
import org.gradle.api.tasks.testing.TestOutputListener;
import org.gradle.api.tasks.testing.TestResult;
import org.gradle.api.tasks.testing.logging.TestLogging;
/**
* An error reporting listener that queues test output streams and displays them
* on failure.
* <p>
* Heavily inspired by Elasticsearch's ErrorReportingTestListener (ASL 2.0 licensed).
*/
public class ErrorReportingTestListener implements TestOutputListener, TestListener {
private static final Logger LOGGER = Logging.getLogger(ErrorReportingTestListener.class);
private final TestExceptionFormatter formatter;
private final Map<TestKey, OutputHandler> outputHandlers = new ConcurrentHashMap<>();
private final Path spillDir;
private final Path outputsDir;
private final boolean verboseMode;
public ErrorReportingTestListener(TestLogging testLogging, Path spillDir, Path outputsDir, boolean verboseMode) {
this.formatter = new FullExceptionFormatter(testLogging);
this.spillDir = spillDir;
this.outputsDir = outputsDir;
this.verboseMode = verboseMode;
}
@Override
public void onOutput(TestDescriptor testDescriptor, TestOutputEvent outputEvent) {
handlerFor(testDescriptor).write(outputEvent);
}
@Override
public void beforeSuite(TestDescriptor suite) {
// noop.
}
@Override
public void beforeTest(TestDescriptor testDescriptor) {
// Noop.
}
@Override
public void afterSuite(final TestDescriptor suite, TestResult result) {
if (suite.getParent() == null || suite.getName().startsWith("Gradle")) {
return;
}
TestKey key = TestKey.of(suite);
try {
OutputHandler outputHandler = outputHandlers.get(key);
if (outputHandler != null) {
long length = outputHandler.length();
if (length > 1024 * 1024 * 10) {
LOGGER.warn(String.format(Locale.ROOT, "WARNING: Test %s wrote %,d bytes of output.",
suite.getName(),
length));
}
}
boolean echoOutput = Objects.equals(result.getResultType(), TestResult.ResultType.FAILURE);
boolean dumpOutput = echoOutput;
// If the test suite failed, report output.
if (dumpOutput || echoOutput) {
Files.createDirectories(outputsDir);
Path outputLog = outputsDir.resolve(getOutputLogName(suite));
// Save the output of a failing test to disk.
try (Writer w = Files.newBufferedWriter(outputLog, StandardCharsets.UTF_8)) {
if (outputHandler != null) {
outputHandler.copyTo(w);
}
}
if (echoOutput && !verboseMode) {
synchronized (this) {
System.out.println();
System.out.println(suite.getClassName() + " > test suite's output saved to " + outputLog + ", copied below:");
try (BufferedReader reader = Files.newBufferedReader(outputLog, StandardCharsets.UTF_8)) {
char[] buf = new char[1024];
int len;
while ((len = reader.read(buf)) >= 0) {
System.out.print(new String(buf, 0, len));
}
System.out.println();
}
}
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
} finally {
OutputHandler handler = outputHandlers.remove(key);
if (handler != null) {
try {
handler.close();
} catch (IOException e) {
LOGGER.error("Failed to close output handler for: " + key, e);
}
}
}
}
private static Pattern SANITIZE = Pattern.compile("[^a-zA-Z .\\-_0-9]+");
public static String getOutputLogName(TestDescriptor suite) {
return SANITIZE.matcher("OUTPUT-" + suite.getName() + ".txt").replaceAll("_");
}
@Override
public void afterTest(TestDescriptor testDescriptor, TestResult result) {
// Include test failure exception stacktrace(s) in test output log.
if (result.getResultType() == TestResult.ResultType.FAILURE) {
if (result.getExceptions().size() > 0) {
String message = formatter.format(testDescriptor, result.getExceptions());
handlerFor(testDescriptor).write(message);
}
}
}
private OutputHandler handlerFor(TestDescriptor descriptor) {
// Attach output of leaves (individual tests) to their parent.
if (!descriptor.isComposite()) {
descriptor = descriptor.getParent();
}
return outputHandlers.computeIfAbsent(TestKey.of(descriptor), (key) -> new OutputHandler());
}
public static class TestKey {
private final String key;
private TestKey(String key) {
this.key = key;
}
public static TestKey of(TestDescriptor d) {
StringBuilder key = new StringBuilder();
key.append(d.getClassName());
key.append("::");
key.append(d.getName());
key.append("::");
key.append(d.getParent() == null ? "-" : d.getParent().toString());
return new TestKey(key.toString());
}
@Override
public boolean equals(Object o) {
return o != null &&
o.getClass() == this.getClass() &&
Objects.equals(((TestKey) o).key, key);
}
@Override
public int hashCode() {
return key.hashCode();
}
@Override
public String toString() {
return key;
}
}
private class OutputHandler implements Closeable {
// Max single-line buffer before automatic wrap occurs.
private static final int MAX_LINE_WIDTH = 1024 * 4;
private final SpillWriter buffer;
// internal stream.
private final PrefixedWriter sint;
// stdout
private final PrefixedWriter sout;
// stderr
private final PrefixedWriter serr;
// last used stream (so that we can flush it properly and prefixes are not screwed up).
private PrefixedWriter last;
public OutputHandler() {
buffer = new SpillWriter(() -> {
try {
return Files.createTempFile(spillDir, "spill-", ".tmp");
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
Writer sink = buffer;
if (verboseMode) {
sink = new StdOutTeeWriter(buffer);
}
sint = new PrefixedWriter(" > ", sink, MAX_LINE_WIDTH);
sout = new PrefixedWriter(" 1> ", sink, MAX_LINE_WIDTH);
serr = new PrefixedWriter(" 2> ", sink, MAX_LINE_WIDTH);
last = sint;
}
public void write(TestOutputEvent event) {
write((event.getDestination() == TestOutputEvent.Destination.StdOut ? sout : serr), event.getMessage());
}
public void write(String message) {
write(sint, message);
}
public long length() throws IOException {
return buffer.length();
}
private void write(PrefixedWriter out, String message) {
try {
if (out != last) {
last.completeLine();
last = out;
}
out.write(message);
} catch (IOException e) {
throw new UncheckedIOException("Unable to write to test output.", e);
}
}
public void copyTo(Writer out) throws IOException {
flush();
buffer.copyTo(out);
}
public void flush() throws IOException {
sout.completeLine();
serr.completeLine();
buffer.flush();
}
@Override
public void close() throws IOException {
buffer.close();
}
}
}

View File

@ -1,93 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.gradle;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Writer;
class StdOutTeeWriter extends Writer {
private final Writer delegate;
private final PrintStream out = System.out;
public StdOutTeeWriter(Writer delegate) {
this.delegate = delegate;
}
@Override
public void write(int c) throws IOException {
delegate.write(c);
out.write(c);
}
@Override
public void write(char[] cbuf) throws IOException {
delegate.write(cbuf);
out.print(cbuf);
}
@Override
public void write(String str) throws IOException {
delegate.write(str);
out.print(str);
}
@Override
public void write(String str, int off, int len) throws IOException {
delegate.write(str, off, len);
out.append(str, off, len);
}
@Override
public Writer append(CharSequence csq) throws IOException {
delegate.append(csq);
out.append(csq);
return this;
}
@Override
public Writer append(CharSequence csq, int start, int end) throws IOException {
delegate.append(csq, start, end);
out.append(csq, start, end);
return this;
}
@Override
public Writer append(char c) throws IOException {
delegate.append(c);
out.append(c);
return this;
}
@Override
public void write(char[] cbuf, int off, int len) throws IOException {
delegate.write(cbuf, off, len);
out.print(new String(cbuf, off, len));
}
@Override
public void flush() throws IOException {
delegate.flush();
out.flush();
}
@Override
public void close() throws IOException {
delegate.close();
// Don't close the actual output.
}
}

View File

@ -15,19 +15,19 @@
* limitations under the License.
*/
// Declare script dependency versions outside of palantir's
// version unification control. These are not our main dependencies
// but are reused in buildSrc and across applied scripts.
ext {
scriptDepVersions = [
"apache-rat": "0.14",
"asm": "9.7",
"commons-codec": "1.13",
"ecj": "3.36.0",
"flexmark": "0.61.24",
"javacc": "7.0.12",
"jflex": "1.8.2",
"jgit": "5.13.1.202206130422-r",
]
configure(allprojects) {
tasks.register("tidy").configure {
description "Applies formatters and cleanups to sources."
group "verification"
}
}
// Locate script-relative resource folder. This is context-sensitive so pass
// the right buildscript (top-level).
configure(rootProject) {
ext {
scriptResources = { buildscript ->
return file(buildscript.sourceFile.absolutePath.replaceAll('.gradle$', ""))
}
}
}

View File

@ -1,5 +1,3 @@
import org.apache.lucene.gradle.datasets.ExtractReuters
import java.nio.file.Files
/*
@ -25,7 +23,7 @@ buildscript {
}
dependencies {
classpath "com.github.luben:zstd-jni:1.5.5-11"
classpath deps.zstd
}
}
@ -40,7 +38,7 @@ def unzstd(java.nio.file.Path src, java.nio.file.Path dst) {
// TODO: not sure whether this should live in benchmarks, but for now let it be.
configure(project(":lucene:benchmark")) {
apply plugin: "java"
apply plugin: "de.undercouch.download"
apply plugin: deps.plugins.undercouch.download.get().pluginId
ext {
dataDir = file("work")
@ -164,7 +162,7 @@ configure(project(":lucene:benchmark")) {
logger.lifecycle("Extracting ${ext.name} into ${ext.dst}...")
ext.dst.deleteDir()
ExtractReuters.main(untarPath.toString(), ext.dst.toString())
buildinfra.extractReuters(untarPath.toString(), ext.dst.toString())
}
}

View File

@ -34,11 +34,11 @@ buildscript {
}
dependencies {
classpath "com.vladsch.flexmark:flexmark:${scriptDepVersions['flexmark']}"
classpath "com.vladsch.flexmark:flexmark-ext-abbreviation:${scriptDepVersions['flexmark']}"
classpath "com.vladsch.flexmark:flexmark-ext-attributes:${scriptDepVersions['flexmark']}"
classpath "com.vladsch.flexmark:flexmark-ext-autolink:${scriptDepVersions['flexmark']}"
classpath "com.vladsch.flexmark:flexmark-ext-tables:${scriptDepVersions['flexmark']}"
classpath deps.flexmark.core
classpath deps.flexmark.ext.abbreviation
classpath deps.flexmark.ext.attributes
classpath deps.flexmark.ext.autolink
classpath deps.flexmark.ext.tables
}
}

View File

@ -23,7 +23,7 @@ configure(project(":lucene:expressions")) {
}
dependencies {
antlr "org.antlr:antlr4"
antlr deps.antlr.core
}
task generateAntlrInternal() {

View File

@ -35,42 +35,44 @@ configure(project(":lucene:core")) {
}
dependencies {
apiextractor "org.ow2.asm:asm:${scriptDepVersions['asm']}"
apiextractor deps.asm.core
}
mrjarJavaVersions.each { jdkVersion ->
def task = tasks.create(name: "generateJdkApiJar${jdkVersion}", type: JavaExec) {
description "Regenerate the API-only JAR file with public Panama Foreign & Vector API from JDK ${jdkVersion}"
group "generation"
javaLauncher = javaToolchains.launcherFor {
languageVersion = JavaLanguageVersion.of(jdkVersion)
}
onlyIf {
try {
javaLauncher.get()
return true
} catch (Exception e) {
logger.warn('Launcher for Java {} is not available; skipping regeneration of Panama Foreign & Vector API JAR.', jdkVersion)
logger.warn('Error: {}', e.cause?.message)
logger.warn("Please make sure to point env 'JAVA{}_HOME' to exactly JDK version {} or enable Gradle toolchain auto-download.", jdkVersion, jdkVersion)
return false
plugins.withType(JavaPlugin) {
mrjarJavaVersions.each { jdkVersion ->
def task = tasks.create(name: "generateJdkApiJar${jdkVersion}", type: JavaExec) {
description "Regenerate the API-only JAR file with public Panama Foreign & Vector API from JDK ${jdkVersion}"
group "generation"
javaLauncher = javaToolchains.launcherFor {
languageVersion = JavaLanguageVersion.of(jdkVersion)
}
onlyIf {
try {
javaLauncher.get()
return true
} catch (Exception e) {
logger.warn('Launcher for Java {} is not available; skipping regeneration of Panama Foreign & Vector API JAR.', jdkVersion)
logger.warn('Error: {}', e.cause?.message)
logger.warn("Please make sure to point env 'JAVA{}_HOME' to exactly JDK version {} or enable Gradle toolchain auto-download.", jdkVersion, jdkVersion)
return false
}
}
classpath = configurations.apiextractor
mainClass = file("${resources}/ExtractJdkApis.java") as String
systemProperties = [
'user.timezone': 'UTC',
'file.encoding': 'UTF-8',
]
args = [
jdkVersion,
apijars.file("jdk${jdkVersion}.apijar"),
]
}
classpath = configurations.apiextractor
mainClass = file("${resources}/ExtractJdkApis.java") as String
systemProperties = [
'user.timezone': 'UTC',
'file.encoding': 'UTF-8',
]
args = [
jdkVersion,
apijars.file("jdk${jdkVersion}.apijar"),
]
regenerate.dependsOn task
}
regenerate.dependsOn task
}
}

View File

@ -33,18 +33,11 @@ def resources = scriptResources(buildscript)
// Configure different icu4j dependencies.
configure(rootProject) {
configurations {
// icu_xyz
icu_current
}
dependencies {
// icu_xyz "com.ibm.icu:icu4j:xyz"
icu_current 'com.ibm.icu:icu4j'
}
// Exclude explicit ICU configs from palantir's version unification.
versionRecommendations {
// excludeConfigurations "icu_xyz"
icu_current deps.icu4j
}
}

View File

@ -26,7 +26,7 @@ configure(rootProject) {
}
dependencies {
javacc "net.java.dev.javacc:javacc:${scriptDepVersions['javacc']}"
javacc deps.javacc
}
task javacc() {

View File

@ -25,7 +25,7 @@ configure(rootProject) {
}
dependencies {
jflex "de.jflex:jflex:${scriptDepVersions['jflex']}"
jflex deps.jflex
}
}

View File

@ -30,7 +30,7 @@ def recompileDictionary(project, dictionaryName, Closure closure) {
}
configure(project(":lucene:analysis:kuromoji")) {
apply plugin: "de.undercouch.download"
apply plugin: deps.plugins.undercouch.download.get().pluginId
plugins.withType(JavaPlugin) {
ext {

View File

@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
apply plugin: "de.undercouch.download"
apply plugin: deps.plugins.undercouch.download.get().pluginId
def resources = scriptResources(buildscript)

View File

@ -30,7 +30,7 @@ def recompileDictionary(project, dictionaryName, Closure closure) {
}
configure(project(":lucene:analysis:nori")) {
apply plugin: "de.undercouch.download"
apply plugin: deps.plugins.undercouch.download.get().pluginId
plugins.withType(JavaPlugin) {
ext {

View File

@ -1,7 +1,5 @@
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import org.apache.commons.codec.digest.DigestUtils
import java.util.function.Function
/*
@ -58,7 +56,7 @@ def computeChecksummedEntries = { Task sourceTask ->
allFiles.files.forEach { file ->
allEntries.put(
sourceTask.project.rootDir.relativePath(file),
file.exists() ? new DigestUtils(DigestUtils.sha1Digest).digestAsHex(file).trim() : "--")
file.exists() ? buildinfra.sha1Digest().digestAsHex(file).trim() : "--")
}
return allEntries

View File

@ -19,7 +19,7 @@ import org.apache.tools.ant.taskdefs.condition.Os
def resources = scriptResources(buildscript)
apply plugin: "de.undercouch.download"
apply plugin: deps.plugins.undercouch.download.get().pluginId
configure(project(":lucene:analysis:common")) {
ext {

View File

@ -27,7 +27,7 @@ allprojects {
// Artifacts will have names after full gradle project path
// so :solr:core will have solr-core.jar, etc.
project.archivesBaseName = project.path.replaceAll("^:", "").replace(':', '-')
project.base.archivesName = project.path.replaceAll("^:", "").replace(':', '-')
ext {
// Utility method to support passing overrides via -P or -D.
@ -59,12 +59,6 @@ allprojects {
return propertyOrDefault(propName, envOrDefault(envName, defValue));
}
// Locate script-relative resource folder. This is context-sensitive so pass
// the right buildscript (top-level).
scriptResources = { buildscript ->
return file(buildscript.sourceFile.absolutePath.replaceAll('.gradle$', ""))
}
// Utility function similar to project.exec but not emitting
// any output unless an error code is returned from the executed command.
quietExec = { closure ->

View File

@ -20,7 +20,11 @@ allprojects {
tasks.withType(AbstractArchiveTask).configureEach { task ->
duplicatesStrategy = DuplicatesStrategy.FAIL
reproducibleFileOrder = true
dirMode = 0755
fileMode = 0644
dirPermissions {
it.unix(0755)
}
filePermissions {
it.unix(0644)
}
}
}

View File

@ -22,48 +22,49 @@ import org.gradle.plugins.ide.eclipse.model.ClasspathEntry
def resources = scriptResources(buildscript)
configure(rootProject) {
apply plugin: "eclipse"
plugins.withType(JavaPlugin) {
apply plugin: "eclipse"
def eclipseJavaVersion = propertyOrDefault("eclipse.javaVersion", rootProject.minJavaVersion)
def relativize = { other -> rootProject.rootDir.relativePath(other).toString() }
def eclipseJavaVersion = propertyOrDefault("eclipse.javaVersion", rootProject.minJavaVersion)
def relativize = { other -> rootProject.rootDir.relativePath(other).toString() }
eclipse {
project {
name = "Apache Lucene ${version}"
}
eclipse {
project {
name = "Apache Lucene ${version}"
}
classpath {
defaultOutputDir = file('build/eclipse')
classpath {
defaultOutputDir = file('build/eclipse')
file {
beforeMerged { classpath -> classpath.entries.removeAll { it.kind == "src" } }
file {
beforeMerged { classpath -> classpath.entries.removeAll { it.kind == "src" } }
whenMerged { classpath ->
def projects = allprojects.findAll { prj ->
return prj.plugins.hasPlugin(JavaPlugin)
}
Set<String> sourceSetNames = ['main', 'test', "main${eclipseJavaVersion}" as String, "test${eclipseJavaVersion}" as String, 'tools'] as Set
Set<String> sources = []
Set<File> jars = []
projects.each { prj ->
prj.sourceSets.each { sourceSet ->
if (sourceSetNames.contains(sourceSet.name)) {
sources += sourceSet.java.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) }
sources += sourceSet.resources.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) }
}
whenMerged { classpath ->
def projects = allprojects.findAll { prj ->
return prj.plugins.hasPlugin(JavaPlugin)
}
// This is hacky - we take the resolved compile classpath and just
// include JAR files from there. We should probably make it smarter
// by looking at real dependencies. But then: this Eclipse configuration
// doesn't really separate sources anyway so why bother.
jars += prj.configurations.compileClasspath.resolve()
jars += prj.configurations.testCompileClasspath.resolve()
}
Set<String> sourceSetNames = ['main', 'test', "main${eclipseJavaVersion}" as String, "test${eclipseJavaVersion}" as String, 'tools'] as Set
Set<String> sources = []
Set<File> jars = []
projects.each { prj ->
prj.sourceSets.each { sourceSet ->
if (sourceSetNames.contains(sourceSet.name)) {
sources += sourceSet.java.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) }
sources += sourceSet.resources.srcDirs.findAll { dir -> dir.exists() }.collect { dir -> relativize(dir) }
}
}
classpath.entries += sources.sort().collect { name ->
def sourceFolder = new SourceFolder(name, "build/eclipse/" + name)
// This is hacky - we take the resolved compile classpath and just
// include JAR files from there. We should probably make it smarter
// by looking at real dependencies. But then: this Eclipse configuration
// doesn't really separate sources anyway so why bother.
jars += prj.configurations.compileClasspath.resolve()
jars += prj.configurations.testCompileClasspath.resolve()
}
classpath.entries += sources.sort().collect { name ->
def sourceFolder = new SourceFolder(name, "build/eclipse/" + name)
sourceFolder.setExcludes(["module-info.java"])
return sourceFolder
}
@ -81,36 +82,38 @@ configure(rootProject) {
}
}
task luceneEclipseJdt(type: Sync) {
def errorMode = project.propertyOrDefault('eclipse.errors','warning');
def ecjLintFile = rootProject.file('gradle/validation/ecj-lint/ecj.javadocs.prefs');
description = 'Generates the Eclipse JDT settings file.'
inputs.file(ecjLintFile)
inputs.property('errorMode', errorMode)
inputs.property('eclipseJavaVersion', eclipseJavaVersion as String)
from rootProject.file("${resources}/dot.settings")
into rootProject.file(".settings")
filter(ReplaceTokens, tokens: [
'ecj-lint-config': ecjLintFile.getText('UTF-8').replaceAll(/=error\b/, '=' + errorMode)
])
filteringCharset = 'UTF-8'
doLast {
logger.lifecycle('Eclipse config for Java {} written with ECJ errors configured as {}. Change by passing -Peclipse.errors=ignore/warning/error.', eclipseJavaVersion, errorMode)
logger.lifecycle('To edit classes of MR-JARs for a specific Java version, use e.g., -Peclipse.javaVersion=19')
task luceneEclipseJdt(type: Sync) {
def errorMode = project.propertyOrDefault('eclipse.errors' ,'warning');
def ecjLintFile = rootProject.file('gradle/validation/ecj-lint/ecj.javadocs.prefs');
description = 'Generates the Eclipse JDT settings file.'
inputs.file(ecjLintFile)
inputs.property('errorMode', errorMode)
inputs.property('eclipseJavaVersion', eclipseJavaVersion as String)
from rootProject.file("${resources}/dot.settings")
into rootProject.file(".settings")
filter(ReplaceTokens, tokens: [
'ecj-lint-config': ecjLintFile.getText('UTF-8').replaceAll(/=error\b/, '=' + errorMode)
])
filteringCharset = 'UTF-8'
doLast {
logger.lifecycle('Eclipse config for Java {} written with ECJ errors configured as {}. Change by passing -Peclipse.errors=ignore/warning/error.', eclipseJavaVersion, errorMode)
logger.lifecycle('To edit classes of MR-JARs for a specific Java version, use e.g., -Peclipse.javaVersion=19')
}
}
eclipseJdt {
enabled = false
dependsOn 'luceneEclipse'
}
eclipseClasspath {
inputs.property('eclipseJavaVersion', eclipseJavaVersion as String
)
}
}
eclipseJdt {
enabled = false
dependsOn 'luceneEclipseJdt'
}
eclipseClasspath {
inputs.property('eclipseJavaVersion', eclipseJavaVersion as String)
}
}
@ -131,6 +134,6 @@ public class LibEntry implements ClasspathEntry {
node.appendNode("classpathentry", Map.of(
"kind", "lib",
"path", path
));
))
}
}

View File

@ -49,7 +49,7 @@ configure(rootProject.ext.mavenProjects) { Project project ->
// This moves pom metadata configuration after all the scripts of all projects
// have been evaluated. This is required because we set artifact groups
// and archivesBaseName in other scripts and some of the properties below don't
// and archivesName in other scripts and some of the properties below don't
// accept lazy property providers (so everything must be in its final form).
gradle.projectsEvaluated {
publishing {
@ -57,22 +57,10 @@ configure(rootProject.ext.mavenProjects) { Project project ->
configure(publication) {
from components.java
groupId = project.group
artifactId = project.archivesBaseName
artifactId = project.base.archivesName.get()
artifact sourcesJar
artifact javadocJar
// LUCENE-9561:
// Remove dependencyManagement section created by a combination of
// Palantir and the publishing plugin.
//
// https://github.com/palantir/gradle-consistent-versions/issues/550
pom({
withXml {
def dm = asNode().dependencyManagement
if (dm) dm.replaceNode {}
}
})
}
}
}

View File

@ -104,3 +104,6 @@ org.gradle.java.installations.auto-download=true
# Set these to enable automatic JVM location discovery.
org.gradle.java.installations.fromEnv=JAVA21_HOME,JAVA22_HOME,RUNTIME_JAVA_HOME
#org.gradle.java.installations.paths=(custom paths)
# Opt out of gradle enterprise build scan plugin entire.
# gradle.ge=false

View File

@ -18,7 +18,6 @@
import org.apache.tools.ant.taskdefs.condition.Os
import org.apache.tools.ant.types.Commandline
import org.gradle.api.tasks.testing.logging.*
import org.apache.lucene.gradle.ErrorReportingTestListener
def resources = scriptResources(buildscript)
def verboseModeHookInstalled = false
@ -201,7 +200,7 @@ allprojects {
}
def spillDir = getTemporaryDir().toPath()
def listener = new ErrorReportingTestListener(test.testLogging, spillDir, testOutputsDir.toPath(), verboseMode)
def listener = buildinfra.newErrorReportingTestListener(test.testLogging, spillDir, testOutputsDir.toPath(), verboseMode)
addTestOutputListener(listener)
addTestListener(listener)

View File

@ -15,8 +15,6 @@
* limitations under the License.
*/
import org.apache.lucene.gradle.ErrorReportingTestListener
// Display all failed tests at the end of the build.
def failedTests = []
@ -28,7 +26,7 @@ allprojects {
failedTests << [
"name": "${desc.className}.${desc.name}",
"project": "${test.project.path}",
"output": file("${task.testOutputsDir}/${ErrorReportingTestListener.getOutputLogName(desc.parent)}"),
"output": file("${task.testOutputsDir}/${buildinfra.getOutputLogName(desc.parent)}"),
"reproduce": "gradlew ${project.path}:test --tests \"${desc.className}.${desc.name}\" ${task.project.testOptionsForReproduceLine}"
]
}
@ -39,7 +37,7 @@ allprojects {
failedTests << [
"name": "${desc.name}",
"project": "${test.project.path}",
"output": file("${task.testOutputsDir}/${ErrorReportingTestListener.getOutputLogName(desc)}"),
"output": file("${task.testOutputsDir}/${buildinfra.getOutputLogName(desc)}"),
"reproduce": "gradlew ${project.path}:test --tests \"${desc.name}\" ${task.project.testOptionsForReproduceLine}"
]
}

View File

@ -30,7 +30,7 @@ buildscript {
}
dependencies {
classpath 'com.carrotsearch.randomizedtesting:randomizedtesting-runner:2.7.2'
classpath deps.randomizedtesting.runner
}
}
@ -126,10 +126,10 @@ allprojects {
secManagerExclusions
}
dependencies {
secManagerExclusions ( "com.carrotsearch.randomizedtesting:randomizedtesting-runner", {
secManagerExclusions ( deps.randomizedtesting.runner, {
exclude group: "junit"
})
secManagerExclusions ( "junit:junit", {
secManagerExclusions ( deps.junit, {
exclude group: "org.hamcrest"
})
}

View File

@ -22,7 +22,7 @@ import org.gradle.util.GradleVersion
configure(rootProject) {
ext {
expectedGradleVersion = '8.8'
expectedGradleVersion = deps.versions.minGradle.get()
hasJavaFlightRecorder = ModuleLayer.boot().findModule('jdk.jfr').map(this.class.module::canRead).orElse(false)
}
@ -32,6 +32,7 @@ configure(rootProject) {
}
def currentJavaVersion = JavaVersion.current()
def minJavaVersion = JavaVersion.toVersion(deps.versions.minJava.get())
if (currentJavaVersion < minJavaVersion) {
throw new GradleException("At least Java ${minJavaVersion} is required, you are running Java ${currentJavaVersion} "
+ "[${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]")

View File

@ -0,0 +1,89 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Configure sanity check for conflicting dependencies across certain configurations
allprojects {
apply plugin: deps.plugins.dependencychecks.get().pluginId
def mainConfigurations = project.configurations.matching {
it.name in [
"compileClasspath",
"runtimeClasspath"
]
}
def testConfigurations = project.configurations.matching {
it.name in [
"annotationProcessor",
"testCompileClasspath",
"testRuntimeClasspath"
]
}
dependencyVersionChecks {
lockFileComment = "An inventory of resolved dependency versions. Do not edit this file directly."
configurationGroups {
main_dependencies {
include mainConfigurations
}
test_dependencies {
include testConfigurations
}
}
}
dependencies {
constraints {
mainConfigurations.configureEach { Configuration conf ->
// no resolutions for conflicting dependencies at the moment.
}
}
}
}
// Configure version catalog cleanups plugin.
configure(rootProject) {
apply plugin: deps.plugins.versionCatalogUpdate.get().pluginId
versionCatalogUpdate {
sortByKey = true
versionCatalogs {
deps {
catalogFile = file("versions.toml")
}
}
}
tasks.matching { it.name == "tidy" }.configureEach {
it.dependsOn(":versionCatalogFormatDeps")
}
tasks.matching {
it.path in [
":versionCatalogUpdateDeps"
]
}.configureEach {
it.interactive = true
}
tasks.register("updateDeps", {
dependsOn ":versionCatalogUpdateDeps"
})
}

View File

@ -23,7 +23,7 @@ configure(rootProject) {
}
dependencies {
ecjDeps "org.eclipse.jdt:ecj:${scriptDepVersions['ecj']}"
ecjDeps deps.ecj
}
}

View File

@ -37,24 +37,25 @@ if (skipReason) {
allprojects { prj ->
plugins.withType(JavaPlugin) {
// LUCENE-9650: Errorprone on master/gradle does not work when running as plugin
// inside a forked Javac process. Javac running inside Gradle works, because we have
// additional module system opens in place.
// This is a hack to keep the dependency (so that palantir's version check doesn't complain)
// but don't include the plugin (which fails on JDK16+).
// LUCENE-9650: Errorprone does not work when running as a plugin inside a forked Javac process.
// Javac running inside Gradle works, because we have additional module system opens in place.
if (skipReason) {
tasks.withType(JavaCompile) { task -> task.dependsOn ":errorProneSkipped" }
// Error prone plugin adds error prone to test classpath. We need to add it here too (manually) so that
// versions.lock is consistent with or without error prone.
configurations {
errorprone
}
dependencies {
errorprone("com.google.errorprone:error_prone_core")
errorprone deps.errorprone
}
configurations.annotationProcessor.extendsFrom(configurations.errorprone)
} else {
prj.apply plugin: 'net.ltgt.errorprone'
prj.apply plugin: deps.plugins.errorprone.get().pluginId
dependencies {
errorprone("com.google.errorprone:error_prone_core")
errorprone deps.errorprone
}
tasks.withType(JavaCompile) { task ->

View File

@ -1,4 +1,4 @@
/*
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
@ -57,7 +57,7 @@ allprojects { prj ->
}
// Configure defaults for sourceSets.main
tasks.matching { it.name ==~ /forbiddenApisMain\d*/ }.all {
tasks.matching { it.name ==~ /forbiddenApisMain\d*/ }.configureEach {
bundledSignatures += [
'jdk-unsafe',
'jdk-deprecated',
@ -76,12 +76,12 @@ allprojects { prj ->
// Configure defaults for the MR-JAR feature sourceSets by setting java version and ignore missing classes
// TODO:
// - Get hold of warning messages, see https://github.com/policeman-tools/forbidden-apis/issues/207
tasks.matching { it.name ==~ /forbiddenApisMain\d+/ }.all {
tasks.matching { it.name ==~ /forbiddenApisMain\d+/ }.configureEach {
failOnMissingClasses = false
}
// Configure defaults for sourceSets.test
tasks.matching { it.name in ["forbiddenApisTest", "forbiddenApisTestFixtures"] }.all {
tasks.matching { it.name in ["forbiddenApisTest", "forbiddenApisTestFixtures"] }.configureEach {
bundledSignatures += [
'jdk-unsafe',
'jdk-deprecated',
@ -105,7 +105,7 @@ allprojects { prj ->
}
// Configure defaults for sourceSets.tools (if present).
tasks.matching { it.name == "forbiddenApisTools" }.all {
tasks.matching { it.name == "forbiddenApisTools" }.configureEach {
bundledSignatures += [
'jdk-unsafe',
'jdk-deprecated',
@ -129,12 +129,24 @@ allprojects { prj ->
//
// This is the simplest workaround possible: just point at all the rule files and indicate
// them as inputs. This way if a rule is modified, checks will be reapplied.
configure(tasks.matching { it.name.startsWith("forbiddenApis") }) { task ->
tasks.matching { it.name.startsWith("forbiddenApis") }.configureEach { task ->
task.inputs.dir(file(resources))
}
// Disable sysout signatures for these projects.
if (prj.path in [
if (prj.name in ["missing-doclet", "build-infra"]) {
forbiddenApisMain.bundledSignatures -= [
'jdk-non-portable',
'jdk-system-out'
]
forbiddenApisMain.exclude("**/Checksum*")
forbiddenApisMain.suppressAnnotations += [
"**.*SuppressForbidden"
]
}
if (prj.name in ["missing-doclet"] || prj.path in [
":lucene:demo",
":lucene:benchmark",
":lucene:test-framework"

View File

@ -33,7 +33,7 @@ buildscript {
}
dependencies {
classpath "org.eclipse.jgit:org.eclipse.jgit:${scriptDepVersions['jgit']}"
classpath deps.jgit
}
}

View File

@ -20,8 +20,6 @@
// 2) notice file
// 3) checksum validation/ generation.
import org.apache.commons.codec.digest.DigestUtils
// This should be false only for debugging.
def failOnError = true
@ -136,7 +134,7 @@ subprojects {
jarName : file.toPath().getFileName().toString(),
path : file,
module : resolvedArtifact.moduleVersion,
checksum : provider { new DigestUtils(DigestUtils.sha1Digest).digestAsHex(file).trim() },
checksum : provider { buildinfra.sha1Digest().digestAsHex(file).trim() },
// We keep track of the files referenced by this dependency (sha, license, notice, etc.)
// so that we can determine unused dangling files later on.
referencedFiles: []

View File

@ -23,8 +23,7 @@ configure(rootProject) {
description = "All precommit checks"
// Root-level validation tasks.
dependsOn ":verifyLocks"
dependsOn ":versionsPropsAreSorted"
dependsOn ":checkLocks"
dependsOn ":checkWorkingCopyClean"
}

View File

@ -18,22 +18,23 @@
import groovy.xml.NamespaceBuilder
// Configure rat dependencies for use in the custom task.
configure(rootProject) {
// Configure the rat validation task and all scanned directories.
allprojects {
configurations {
ratDeps
}
dependencies {
ratDeps "org.apache.rat:apache-rat:${scriptDepVersions['apache-rat']}"
ratDeps deps.rat
}
}
// Configure the rat validation task and all scanned directories.
allprojects {
task("rat", type: RatTask) {
tasks.register("rat", RatTask).configure {
group = 'Verification'
description = 'Runs Apache Rat checks.'
dependsOn configurations.ratDeps
def defaultScanFileTree = project.fileTree(projectDir, {
// Don't check under the project's build folder.
exclude project.buildDir.name
@ -78,10 +79,10 @@ allprojects {
// Exclude github stuff (templates, workflows).
exclude ".github"
// The root project also includes patterns for the boostrap (buildSrc) and composite
// The root project also includes patterns for the include composite
// projects. Include their sources in the scan.
include "buildSrc/src/**"
include "dev-tools/missing-doclet/src/**"
include "build-tools/build-infra/src/**"
include "build-tools/missing-doclet/src/**"
// do not let RAT attempt to scan a python venv, it gets lost and confused...
exclude "dev-tools/aws-jmh/build/**"
@ -142,7 +143,7 @@ class RatTask extends DefaultTask {
def generateReport(File reportFile) {
// Set up ant rat task.
def ratClasspath = project.rootProject.configurations.ratDeps.asPath
def ratClasspath = project.configurations.ratDeps.asPath
ant.setLifecycleLogLevel(AntBuilder.AntMessagePriority.ERROR)
ant.taskdef(resource: 'org/apache/rat/anttasks/antlib.xml', classpath: ratClasspath)

View File

@ -20,9 +20,9 @@
* spotless and Google Java Format.
*/
def resources = scriptResources(buildscript)
// def resources = scriptResources(buildscript)
configure(project(":lucene").subprojects) { prj ->
configure(allprojects) { prj ->
plugins.withType(JavaPlugin) {
prj.apply plugin: 'com.diffplug.spotless'
@ -36,7 +36,7 @@ configure(project(":lucene").subprojects) { prj ->
lineEndings 'UNIX'
endWithNewline()
googleJavaFormat('1.18.1')
googleJavaFormat(deps.versions.googleJavaFormat.get())
// Apply to all Java sources
target "src/**/*.java"
@ -100,23 +100,19 @@ configure(project(":lucene").subprojects) { prj ->
// Emit a custom message about how to fix formatting errors.
tasks.matching { task -> task.name == "spotlessJavaCheck" }.configureEach {
runToFixMessage.set("\nIMPORTANT: run the top-level './gradlew tidy' to format code automatically (see help/formatting.txt for more info).")
it.runToFixMessage.set("\nIMPORTANT: run the top-level './gradlew tidy' to format code automatically (see help/formatting.txt for more info).")
}
// Add an alias to 'spotlessApply' simply called 'tidy' and wire up
// spotlessCheck to convention's check.
task tidy() {
description "Applies formatters and cleanups to sources."
group "verification"
// Hook up spotless to tidy and check tasks.
tasks.matching { it.name == "tidy" }.configureEach { v ->
v.dependsOn tasks.matching { it.name == "spotlessApply" }
}
tasks.matching { task -> task.name == "spotlessApply" }.configureEach { v ->
tidy.dependsOn v
v.dependsOn ":checkJdkInternalsExportedToGradle"
tasks.matching { it.name == "check" }.configureEach { v ->
v.dependsOn tasks.matching { it.name == "spotlessCheck" }
}
tasks.matching { task -> task.name == "spotlessCheck" }.configureEach { v ->
check.dependsOn v
tasks.matching { task -> task.name in ["spotlessApply", "spotlessCheck"] }.configureEach { v ->
v.dependsOn ":checkJdkInternalsExportedToGradle"
}
}

View File

@ -33,7 +33,7 @@ buildscript {
}
dependencies {
classpath "org.apache.rat:apache-rat:${scriptDepVersions['apache-rat']}"
classpath deps.rat
}
}

View File

@ -1,34 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This ensures 'versions.props' file is sorted lexicographically.
configure(rootProject) {
task versionsPropsAreSorted() {
doFirst {
def versionsProps = file('versions.props')
def lines = versionsProps.readLines("UTF-8")
def sorted = lines.toSorted()
if (!Objects.equals(lines, sorted)) {
def sortedFile = file("${buildDir}/versions.props")
sortedFile.write(sorted.join("\n"), "UTF-8")
throw new GradleException("${versionsProps} file is not sorted lexicographically. I wrote a sorted file to ${sortedFile} - please review and commit.")
}
}
}
}

4
gradlew vendored
View File

@ -158,7 +158,7 @@ fi
GRADLE_WRAPPER_JAR="$APP_HOME/gradle/wrapper/gradle-wrapper.jar"
if [ ! -e "$GRADLE_WRAPPER_JAR" ]; then
"$JAVACMD" $JAVA_OPTS "$APP_HOME/buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java" "$GRADLE_WRAPPER_JAR"
"$JAVACMD" $JAVA_OPTS "$APP_HOME/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java" "$GRADLE_WRAPPER_JAR"
WRAPPER_STATUS=$?
if [ "$WRAPPER_STATUS" -eq 1 ]; then
echo "ERROR: Something went wrong. Make sure you're using Java version of exactly 21."
@ -173,7 +173,7 @@ CLASSPATH=$GRADLE_WRAPPER_JAR
# START OF LUCENE CUSTOMIZATION
# Generate gradle.properties if they don't exist
if [ ! -e "$APP_HOME/gradle.properties" ]; then
"$JAVACMD" $JAVA_OPTS "$APP_HOME/buildSrc/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java" "$APP_HOME/gradle/template.gradle.properties" "$APP_HOME/gradle.properties"
"$JAVACMD" $JAVA_OPTS "$APP_HOME/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java" "$APP_HOME/gradle/template.gradle.properties" "$APP_HOME/gradle.properties"
GENERATOR_STATUS=$?
if [ "$GENERATOR_STATUS" -ne 0 ]; then
exit $GENERATOR_STATUS

4
gradlew.bat vendored
View File

@ -76,7 +76,7 @@ goto fail
@rem LUCENE-9266: verify and download the gradle wrapper jar if we don't have one.
set GRADLE_WRAPPER_JAR=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
IF NOT EXIST "%GRADLE_WRAPPER_JAR%" (
"%JAVA_EXE%" %JAVA_OPTS% "%APP_HOME%/buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java" "%GRADLE_WRAPPER_JAR%"
"%JAVA_EXE%" %JAVA_OPTS% "%APP_HOME%/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java" "%GRADLE_WRAPPER_JAR%"
IF %ERRORLEVEL% EQU 1 goto failWithJvmMessage
IF %ERRORLEVEL% NEQ 0 goto fail
)
@ -89,7 +89,7 @@ set CLASSPATH=%GRADLE_WRAPPER_JAR%
IF NOT EXIST "%APP_HOME%\gradle.properties" (
@rem local expansion is needed to check ERRORLEVEL inside control blocks.
setlocal enableDelayedExpansion
"%JAVA_EXE%" %JAVA_OPTS% "%APP_HOME%/buildSrc/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java" "%APP_HOME%\gradle\template.gradle.properties" "%APP_HOME%\gradle.properties"
"%JAVA_EXE%" %JAVA_OPTS% "%APP_HOME%/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/GradlePropertiesGenerator.java" "%APP_HOME%\gradle\template.gradle.properties" "%APP_HOME%\gradle.properties"
IF %ERRORLEVEL% NEQ 0 goto fail
endlocal
)

View File

@ -7,81 +7,79 @@ and each configuration can have dependencies attached to it.
There are some standard conventions so, for example, the Java plugin
adds standard configurations such as "api", "implementation",
"testImplementation" and others. These configurations can also inherit
from each other; more about this typic can be found here:
from each other; more about this topic can be found here:
https://docs.gradle.org/current/userguide/dependency_management_for_java_projects.html#dependency_management_for_java_projects
https://docs.gradle.org/current/userguide/java_library_plugin.html#sec:java_library_separation
https://docs.gradle.org/current/userguide/java_plugin.html#sec:java_plugin_and_dependency_management
Lucene typically uses three configurations and attach project
dependencies to them:
Lucene uses the following configurations and attach project dependencies
to them:
api - makes a dependency available for main classes, tests and any
moduleApi - makes the dependency available to main classes, tests and any
other modules importing the project (exportable dependency),
implementation - makes a dependency available for main classes, tests
but will *not* export the dependency for other modules (so their
moduleImplementation - makes the dependency available to main classes, tests
but will *not* export the dependency to other modules (so their
compilation classpath won't contain it).
testImplementation - makes a dependency only available for test classes.
moduleTestImplementation - makes the dependency available for test classes only.
The "module" prefix is used to distinguish configurations which apply
to modular builds, compared to the regular classpath-configurations defined
by gradle's java module. Some Lucene modules may define regular classpath
entries to bypass the limitations of the module system (or gradle's).
Adding a library dependency
---------------------------
Lucene dependencies and their versions are managed globally using version
catalogs (in versions.toml) [https://docs.gradle.org/current/userguide/platforms.html].
Let's say we wish to add a dependency on library "foo.bar:baz" in
version 1.2 to :lucene:core. Let's assume this library is only
used internally by the project. The :lucene:core project is configured
by lucene/core/build.gradle and we would add (or modify) the dependency
by lucene/core/build.gradle, so we add (or modify) the dependency
block as follows:
dependencies {
implementation "foo.bar:baz"
moduleImplementation deps.baz
}
The "implementation" here is a named configuration; we don't need to declare
it because it is declared for us by the java-library plugin.
The "moduleImplementation" here is a named configuration explained in the
section above. The "deps.baz" refers to the version catalog named "deps",
in which the dependency "baz" should be declared. If this is the first
reference to this library, then we have to add it to "versions.toml" catalog:
the version goes under the "versions" and module coordinates
under the "libraries" section:
In "normal" gradle the version of the dependency would be present
directly inside the declaration but we use a plugin
(palantir-consistent-versions) to manage all dependency versions
from the top-level (so that conflicts can be resolved globally).
[versions]
baz = "1.2"
...
[libraries]
baz = { module = "foo.bar:baz", version.ref = "baz" }
If this is the first time "foo.bar:baz" is added to the project, we'd have
to add its version to "versions.props" file at the top level of the
checkout:
The version defined in the "versions" section is the preferred version of the library
we wish to use. Finally, run tidy to sort all entries in versions.toml:
foo.bar:baz=1.2
gradlew tidy
and then regenerate the "versions.lock" file using the following
command:
Gradle will try to consolidate different versions across different
configurations to make sure they're compatible and may complain if it encounters
conflicting versions in the dependency tree. We want all dependencies to be consistent,
so we use an additional build plugin to ensure no accidental version changes
occur. Whenever we add or remove dependencies, we have to follow-up with lock file
regeneration:
gradlew --write-locks
gradlew writeLocks
git diff versions.*
IMPORTANT: The versions.lock file will contain the actual version
of the dependency picked based on other project dependencies and
their transitive dependencies. This selected version may be
different from what each of these actually requires (the highest
version number will be typically selected). To see which dependencies
require which version of the library use:
IMPORTANT: The versions.lock file will contain a list of actual library versions
and configurations they occurred in.
gradlew why --hash=...
where the hash code comes from versions.lock file. For example, at
the time of writing, jackson-databind has the following entry:
com.fasterxml.jackson.core:jackson-databind:2.10.0 (3 constraints: 931a7796)
and "gradlew why --hash=931a7796" prints:
com.fasterxml.jackson.core:jackson-databind:2.10.0
projects -> 2.10.0
net.thisptr:jackson-jq -> 2.7.0
org.carrot2:carrot2-mini -> 2.9.9.3
Once the dependency is added it always makes sense to see the
tree of all module dependencies and maybe exclude transitive
dependencies of foo.bar:baz that we won't need.
Once a new dependency is added it always makes sense to regenerate the lock file
and look at which dependencies have changed (and why).
Inspecting current dependencies
@ -98,12 +96,12 @@ in just the "publicly visible" and "classpath-visible" configurations.
The publicly visible project dependencies (classes shared by other
modules importing our module) can be displayed with:
gradlew -p lucene\analysis\icu dependencies --configuration api
gradlew -p lucene\analysis\icu dependencies --configuration moduleApi
And the "private" set of dependencies (real classpath) can be dumped
with:
gradlew -p lucene\analysis\icu dependencies --configuration runtimeClasspath
gradlew -p lucene\analysis\icu dependencies --configuration moduleRuntimePath
Excluding a transitive dependency
@ -115,7 +113,7 @@ crucial for the functioning of "foo.bar:baz". We can exclude it
by adding an exclusion block to the original declaration:
dependencies {
implementation("foo.bar:baz", {
implementation(deps.baz, {
exclude group: "foo.bar", module: "irrelevant"
})
}

View File

@ -2,7 +2,7 @@ Code formatting
===============
Starting with (LUCENE-9564) Java code is enforced to comply with
google-java-format conventions. In theory you shouldn't worry about
google-java-format conventions. In theory, you shouldn't worry about
what the convention actually looks like - write the code in any way
you like and then run:
@ -13,7 +13,7 @@ your code so that it complies with the convention and passes gradle
'check' task.
IMPORTANT: There is *no* way to mark sections of the code as excluded
from formatting. This is by design and cannot be altered. In vast
from formatting. This is by design and cannot be altered. In the vast
majority of cases the formatter will do a great job of cleaning up the
code. Occasionally you may want to rewrite the code (introduce a local
variable or reshape code paths) so that it's easier to read after

View File

@ -54,7 +54,7 @@ Signing can be enabled by adding the "-Psign" option, for example:
gradlew assembleRelease mavenToApacheReleases -Psign
By default gradle uses a Java-based implementation of PGP for signing, which requieres
By default, gradle uses a Java-based implementation of PGP for signing, which requires
several "signing.*" properties via either ~/.gradle/gradle.properties or command-line options:
https://docs.gradle.org/current/userguide/signing_plugin.html#sec:signatory_credentials
@ -92,9 +92,9 @@ signing.gnupg.passphrase=... # Provide your passphrase to
If in doubt, consult gradle's signing plugin documentation:
https://docs.gradle.org/current/userguide/signing_plugin.html#sec:using_gpg_agent
"signing.gnupg.passphrase" is not recomended because there is no advantage to using an external GPG process if you use it. If you
are comfortable giving gradle your passphrase, then there is no reason to use an external GPG process via '-PuseGpg'. Just use the
"signing.*" options described previuosly to let gradle deal with your key directly.
"signing.gnupg.passphrase" is not recommended because there is no advantage to using an external GPG process if you use it.
If you are comfortable giving gradle your passphrase, then there is no reason to use an external GPG process via '-PuseGpg'.
Just use the "signing.*" options described previuosly to let gradle deal with your key directly.
Because of how Gradle's signing plugin invokes GPG, using an external GPG process *only* works if your GPG configuration uses a
GPG agent (required by gpg2) and if the "pinentry" for your GPG agent does not require access to the tty to prompt you for a password.

View File

@ -23,7 +23,7 @@ dependencies {
moduleApi project(':lucene:core')
moduleApi project(':lucene:analysis:common')
moduleApi 'com.ibm.icu:icu4j'
moduleApi deps.icu4j
moduleTestImplementation project(':lucene:test-framework')
}

View File

@ -22,10 +22,10 @@ description = 'Analyzer for dictionary stemming, built-in Polish dictionary'
dependencies {
moduleApi project(':lucene:core')
moduleApi project(':lucene:analysis:common')
moduleApi 'org.carrot2:morfologik-stemming'
moduleApi deps.morfologik.stemming
moduleImplementation 'org.carrot2:morfologik-polish'
moduleImplementation 'ua.net.nlp:morfologik-ukrainian-search'
moduleImplementation deps.morfologik.polish
moduleImplementation deps.morfologik.ukrainian
moduleTestImplementation project(':lucene:test-framework')
}

View File

@ -22,7 +22,7 @@ description = 'OpenNLP Library Integration'
dependencies {
moduleApi project(':lucene:core')
moduleApi project(':lucene:analysis:common')
moduleApi 'org.apache.opennlp:opennlp-tools'
moduleApi deps.opennlp.tools
moduleTestImplementation project(':lucene:test-framework')
}

View File

@ -23,7 +23,7 @@ dependencies {
moduleApi project(':lucene:core')
moduleApi project(':lucene:analysis:common')
moduleApi 'commons-codec:commons-codec'
moduleApi deps.commons.codec
moduleTestImplementation project(':lucene:test-framework')
}

View File

@ -25,8 +25,8 @@ dependencies {
moduleImplementation project(':lucene:core')
moduleImplementation project(':lucene:expressions')
moduleImplementation "org.openjdk.jmh:jmh-core:1.37"
annotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:1.37"
moduleImplementation deps.jmh.core
annotationProcessor deps.jmh.annprocess
}

View File

@ -31,17 +31,17 @@ dependencies {
moduleImplementation project(':lucene:spatial-extras')
moduleImplementation project(':lucene:queryparser')
moduleImplementation "org.apache.commons:commons-compress"
moduleImplementation "com.ibm.icu:icu4j"
moduleImplementation "org.locationtech.spatial4j:spatial4j"
moduleImplementation ("net.sourceforge.nekohtml:nekohtml", {
moduleImplementation deps.commons.compress
moduleImplementation deps.icu4j
moduleImplementation deps.spatial4j
moduleImplementation(deps.nekohtml, {
exclude module: "xml-apis"
// LUCENE-10337: Exclude xercesImpl from module path because it has split packages with the JDK (!)
exclude module: "xercesImpl"
})
// LUCENE-10337: Include xercesImpl on regular classpath where it won't cause conflicts.
implementation ("xerces:xercesImpl", {
implementation (deps.xerces, {
exclude module: "xml-apis"
})

View File

@ -29,16 +29,16 @@ configurations {
dependencies {
binaryDistribution project(path: ":lucene:distribution", configuration: "binaryDirForTests")
moduleTestImplementation "com.carrotsearch:procfork"
moduleTestImplementation("com.carrotsearch.randomizedtesting:randomizedtesting-runner", {
moduleTestImplementation deps.procfork
moduleTestImplementation(deps.randomizedtesting.runner, {
exclude group: "junit"
})
moduleTestImplementation("junit:junit", {
moduleTestImplementation(deps.junit, {
exclude group: "org.hamcrest"
})
moduleTestImplementation "org.hamcrest:hamcrest"
moduleTestImplementation "org.assertj:assertj-core"
moduleTestImplementation deps.hamcrest
moduleTestImplementation deps.assertj
}
test {

View File

@ -15,8 +15,6 @@
* limitations under the License.
*/
import org.apache.lucene.gradle.Checksum
import java.nio.charset.StandardCharsets
import java.nio.file.Files
@ -60,9 +58,7 @@ dependencies {
// Compute checksums for release archives.
task computeChecksums(type: Checksum) {
algorithm = Checksum.Algorithm.SHA512
task computeChecksums(type: buildinfra.checksumClass()) {
files = objects.fileCollection()
[
tasks.assembleSourceTgz,

View File

@ -24,10 +24,10 @@ dependencies {
moduleImplementation project(':lucene:codecs')
moduleImplementation 'org.antlr:antlr4-runtime'
moduleImplementation deps.antlr.runtime
moduleImplementation 'org.ow2.asm:asm'
moduleImplementation 'org.ow2.asm:asm-commons'
moduleImplementation deps.asm.core
moduleImplementation deps.asm.commons
moduleTestImplementation project(':lucene:test-framework')
}

View File

@ -21,7 +21,7 @@ apply plugin: 'java-library'
description = 'Luke - Lucene Toolbox'
ext {
standaloneDistDir = file("$buildDir/${archivesBaseName}-${project.version}")
standaloneDistDir = file("$buildDir/${project.base.archivesName.get()}-${project.version}")
}
dependencies {
@ -72,7 +72,7 @@ tasks.withType(ProcessResources).configureEach { task ->
task standaloneJar(type: Jar) {
dependsOn classes
archiveFileName = "${archivesBaseName}-${project.version}-standalone.jar"
archiveFileName = "${project.base.archivesName.get()}-${project.version}-standalone.jar"
from(sourceSets.main.output)
@ -127,10 +127,10 @@ assemble.dependsOn standaloneAssemble
task standalonePackage(type: Tar) {
from standaloneAssemble
into "${archivesBaseName}-${project.version}/"
into "${project.base.archivesName.get()}-${project.version}/"
compression = Compression.GZIP
archiveFileName = "${archivesBaseName}-${project.version}-standalone.tgz"
archiveFileName = "${project.base.archivesName.get()}-${project.version}-standalone.tgz"
}
// Utility to launch Luke (and fork it from the build).

View File

@ -27,18 +27,18 @@ dependencies {
moduleApi project(':lucene:core')
moduleApi project(':lucene:spatial3d')
moduleApi 'org.locationtech.spatial4j:spatial4j'
moduleApi 'io.sgr:s2-geometry-library-java'
moduleApi deps.spatial4j
moduleApi deps.s2.geometry
moduleTestImplementation project(':lucene:test-framework')
moduleTestImplementation project(':lucene:spatial-test-fixtures')
moduleTestImplementation 'org.locationtech.jts:jts-core'
moduleTestImplementation deps.jts
// We add patched modules to this configuration because otherwise IDEs would not see the
// dependency at all, even in classpath mode (they don't see --patch-module commands we
// add to the compiler and test tasks).
moduleTestPatchOnly 'org.locationtech.spatial4j:spatial4j::tests'
spatial4jTestPatch 'org.locationtech.spatial4j:spatial4j::tests'
moduleTestPatchOnly(variantOf(deps.spatial4j) { classifier("tests") })
spatial4jTestPatch(variantOf(deps.spatial4j) { classifier("tests") })
}
sourceSets.test.extensions.configure("modularPaths", {

View File

@ -22,13 +22,13 @@ description = 'Framework for testing Lucene-based applications'
dependencies {
moduleApi project(':lucene:core')
moduleApi ("com.carrotsearch.randomizedtesting:randomizedtesting-runner", {
moduleApi (deps.randomizedtesting.runner, {
exclude group: "junit"
})
moduleApi ("junit:junit", {
moduleApi (deps.junit, {
exclude group: "org.hamcrest"
})
moduleApi ('org.hamcrest:hamcrest')
moduleApi deps.hamcrest
moduleImplementation project(':lucene:codecs')
}

View File

@ -20,19 +20,31 @@ pluginManagement {
mavenCentral()
gradlePluginPortal()
}
includeBuild("build-tools/build-infra")
}
plugins {
id("org.gradle.toolchains.foojay-resolver-convention") version "0.7.0"
id "org.gradle.toolchains.foojay-resolver-convention" version "0.8.0"
id 'com.gradle.enterprise' version '3.15.1'
id 'com.gradle.common-custom-user-data-gradle-plugin' version '1.11.3'
}
apply from: file('gradle/ge.gradle')
dependencyResolutionManagement {
versionCatalogs {
deps {
from(files('versions.toml'))
}
}
}
if (Boolean.parseBoolean(providers.gradleProperty("gradle.ge").orElse("true").get())) {
apply from: file('gradle/ge.gradle')
}
rootProject.name = "lucene-root"
includeBuild("dev-tools/missing-doclet")
includeBuild("build-tools/missing-doclet")
include "lucene:analysis:common"
include "lucene:analysis:icu"

View File

@ -1,29 +1,935 @@
# Run ./gradlew --write-locks to regenerate this file
com.carrotsearch.randomizedtesting:randomizedtesting-runner:2.8.1 (1 constraints: 0d050e36)
com.ibm.icu:icu4j:74.2 (1 constraints: e1041731)
commons-codec:commons-codec:1.13 (1 constraints: d904f430)
io.sgr:s2-geometry-library-java:1.0.0 (1 constraints: 0305f035)
junit:junit:4.13.1 (1 constraints: 3b05453b)
net.sf.jopt-simple:jopt-simple:5.0.4 (1 constraints: be0ad6cc)
net.sourceforge.nekohtml:nekohtml:1.9.17 (1 constraints: 4405503b)
org.antlr:antlr4-runtime:4.11.1 (1 constraints: 39053f3b)
org.apache.commons:commons-compress:1.19 (1 constraints: df04fa30)
org.apache.commons:commons-math3:3.6.1 (1 constraints: bf0adbcc)
org.apache.opennlp:opennlp-tools:2.3.2 (1 constraints: 09050036)
org.carrot2:morfologik-fsa:2.1.9 (1 constraints: db0d9c36)
org.carrot2:morfologik-polish:2.1.9 (1 constraints: 0e050136)
org.carrot2:morfologik-stemming:2.1.9 (2 constraints: 1312040d)
org.hamcrest:hamcrest:2.2 (1 constraints: a8041f2c)
org.locationtech.spatial4j:spatial4j:0.8 (1 constraints: ac041f2c)
org.openjdk.jmh:jmh-core:1.37 (1 constraints: df04fc30)
org.ow2.asm:asm:9.6 (3 constraints: 3917ef6d)
org.ow2.asm:asm-commons:9.6 (1 constraints: b304382c)
org.ow2.asm:asm-tree:9.6 (1 constraints: ea09e3a5)
org.slf4j:slf4j-api:1.7.36 (1 constraints: 6f0ed053)
ua.net.nlp:morfologik-ukrainian-search:4.9.1 (1 constraints: 10051b36)
xerces:xercesImpl:2.12.0 (1 constraints: 3705353b)
[Test dependencies]
com.carrotsearch:procfork:1.0.6 (1 constraints: 0905f635)
org.assertj:assertj-core:3.21.0 (1 constraints: 38053c3b)
org.locationtech.jts:jts-core:1.17.0 (1 constraints: 3b053e3b)
{
"comment" : "An inventory of resolved dependency versions. Do not edit this file directly.",
"configurationGroups" : {
"main_dependencies" : {
"com.carrotsearch.randomizedtesting:randomizedtesting-runner:2.8.1" : "fa9ef26b,refs=4",
"com.ibm.icu:icu4j:74.2" : "47ea4550,refs=6",
"commons-codec:commons-codec:1.13" : "e9962aab,refs=4",
"io.sgr:s2-geometry-library-java:1.0.0" : "cbc357ab,refs=4",
"junit:junit:4.13.1" : "fa9ef26b,refs=4",
"net.sf.jopt-simple:jopt-simple:5.0.4" : "85a1e4c6,refs=2",
"net.sourceforge.nekohtml:nekohtml:1.9.17" : "5ce8cdc6,refs=2",
"org.antlr:antlr4-runtime:4.11.1" : "d9953130,refs=4",
"org.apache.commons:commons-compress:1.19" : "5ce8cdc6,refs=2",
"org.apache.commons:commons-math3:3.6.1" : "85a1e4c6,refs=2",
"org.apache.opennlp:opennlp-tools:2.3.2" : "2f760bab,refs=4",
"org.carrot2:morfologik-fsa:2.1.9" : "79af844b,refs=4",
"org.carrot2:morfologik-polish:2.1.9" : "fe494320,refs=3",
"org.carrot2:morfologik-stemming:2.1.9" : "79af844b,refs=4",
"org.hamcrest:hamcrest:2.2" : "fa9ef26b,refs=4",
"org.locationtech.spatial4j:spatial4j:0.8" : "cbc357ab,refs=4",
"org.openjdk.jmh:jmh-core:1.37" : "85a1e4c6,refs=2",
"org.ow2.asm:asm:9.6" : "d9953130,refs=4",
"org.ow2.asm:asm-commons:9.6" : "d9953130,refs=4",
"org.ow2.asm:asm-tree:9.6" : "d9953130,refs=4",
"org.slf4j:slf4j-api:1.7.36" : "2f760bab,refs=4",
"ua.net.nlp:morfologik-ukrainian-search:4.9.1" : "fe494320,refs=3",
"xerces:xercesImpl:2.12.0" : "5ce8cdc6,refs=2"
},
"test_dependencies" : {
"com.carrotsearch.randomizedtesting:randomizedtesting-runner:2.8.1" : "b35e5d7a,refs=74",
"com.carrotsearch:procfork:1.0.6" : "b7ba1646,refs=2",
"com.github.ben-manes.caffeine:caffeine:3.0.5" : "6897bc09,refs=38",
"com.github.kevinstern:software-and-algorithms:1.0" : "6897bc09,refs=38",
"com.google.auto.service:auto-service-annotations:1.0.1" : "6897bc09,refs=38",
"com.google.auto.value:auto-value-annotations:1.9" : "6897bc09,refs=38",
"com.google.auto:auto-common:1.2.1" : "6897bc09,refs=38",
"com.google.code.findbugs:jsr305:3.0.2" : "6897bc09,refs=38",
"com.google.errorprone:error_prone_annotation:2.18.0" : "6897bc09,refs=38",
"com.google.errorprone:error_prone_annotations:2.18.0" : "6897bc09,refs=38",
"com.google.errorprone:error_prone_check_api:2.18.0" : "6897bc09,refs=38",
"com.google.errorprone:error_prone_core:2.18.0" : "6897bc09,refs=38",
"com.google.errorprone:error_prone_type_annotations:2.18.0" : "6897bc09,refs=38",
"com.google.guava:failureaccess:1.0.1" : "6897bc09,refs=38",
"com.google.guava:guava:31.0.1-jre" : "6897bc09,refs=38",
"com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava" : "6897bc09,refs=38",
"com.google.j2objc:j2objc-annotations:1.3" : "6897bc09,refs=38",
"com.google.protobuf:protobuf-java:3.19.2" : "6897bc09,refs=38",
"com.ibm.icu:icu4j:74.2" : "ffa00415,refs=8",
"commons-codec:commons-codec:1.13" : "733734f0,refs=6",
"io.github.java-diff-utils:java-diff-utils:4.0" : "6897bc09,refs=38",
"io.sgr:s2-geometry-library-java:1.0.0" : "1d5a4b2b,refs=4",
"javax.inject:javax.inject:1" : "6897bc09,refs=38",
"junit:junit:4.13.1" : "b35e5d7a,refs=74",
"net.sf.jopt-simple:jopt-simple:5.0.4" : "152d9f78,refs=3",
"net.sourceforge.nekohtml:nekohtml:1.9.17" : "6f16ff86,refs=2",
"org.antlr:antlr4-runtime:4.11.1" : "6fbc4021,refs=5",
"org.apache.commons:commons-compress:1.19" : "6f16ff86,refs=2",
"org.apache.commons:commons-math3:3.6.1" : "152d9f78,refs=3",
"org.apache.opennlp:opennlp-tools:2.3.2" : "b91715f0,refs=6",
"org.assertj:assertj-core:3.21.0" : "b7ba1646,refs=2",
"org.carrot2:morfologik-fsa:2.1.9" : "e077a675,refs=8",
"org.carrot2:morfologik-polish:2.1.9" : "cb00cecf,refs=5",
"org.carrot2:morfologik-stemming:2.1.9" : "e077a675,refs=8",
"org.checkerframework:checker-qual:3.19.0" : "6897bc09,refs=38",
"org.checkerframework:dataflow-errorprone:3.27.0" : "6897bc09,refs=38",
"org.eclipse.jgit:org.eclipse.jgit:4.4.1.201607150455-r" : "6897bc09,refs=38",
"org.hamcrest:hamcrest:2.2" : "b35e5d7a,refs=74",
"org.locationtech.jts:jts-core:1.17.0" : "180518e6,refs=2",
"org.locationtech.spatial4j:spatial4j:0.8" : "1d5a4b2b,refs=4",
"org.openjdk.jmh:jmh-core:1.37" : "152d9f78,refs=3",
"org.openjdk.jmh:jmh-generator-annprocess:1.37" : "ecaf1d73,refs=1",
"org.ow2.asm:asm:9.6" : "6fbc4021,refs=5",
"org.ow2.asm:asm-commons:9.6" : "6fbc4021,refs=5",
"org.ow2.asm:asm-tree:9.6" : "6fbc4021,refs=5",
"org.pcollections:pcollections:3.1.4" : "6897bc09,refs=38",
"org.slf4j:slf4j-api:1.7.36" : "b91715f0,refs=6",
"ua.net.nlp:morfologik-ukrainian-search:4.9.1" : "cb00cecf,refs=5",
"xerces:xercesImpl:2.12.0" : "6f16ff86,refs=2"
}
},
"because" : {
"152d9f78" : [
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:benchmark-jmh"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:benchmark-jmh"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:benchmark-jmh"
}
],
"180518e6" : [
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:spatial-extras"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:spatial-extras"
}
],
"1d5a4b2b" : [
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:spatial-extras"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:spatial-extras"
}
],
"2f760bab" : [
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:analysis:opennlp"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:analysis:opennlp"
}
],
"47ea4550" : [
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:analysis:icu"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:analysis:icu"
}
],
"5ce8cdc6" : [
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:benchmark"
}
],
"6897bc09" : [
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:backward-codecs"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:benchmark-jmh"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:classification"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:codecs"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:core"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:core.tests"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:demo"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:distribution.tests"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:expressions"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:facet"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:grouping"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:highlighter"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:join"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:memory"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:misc"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:monitor"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:queries"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:queryparser"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:replicator"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:sandbox"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:spatial-extras"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:spatial-test-fixtures"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:spatial3d"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:suggest"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:test-framework"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis:common"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis:icu"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis:kuromoji"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis:morfologik"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis:morfologik.tests"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis:nori"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis:opennlp"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis:phonetic"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis:smartcn"
},
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:analysis:stempel"
}
],
"6f16ff86" : [
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:benchmark"
}
],
"6fbc4021" : [
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:benchmark-jmh"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:demo"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:expressions"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:expressions"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:queries"
}
],
"733734f0" : [
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:phonetic"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:phonetic"
}
],
"79af844b" : [
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:analysis:morfologik"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:analysis:morfologik"
}
],
"85a1e4c6" : [
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:benchmark-jmh"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:benchmark-jmh"
}
],
"b35e5d7a" : [
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:backward-codecs"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:backward-codecs"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:classification"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:classification"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:codecs"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:codecs"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:core"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:core"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:core.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:core.tests"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:demo"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:demo"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:distribution.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:distribution.tests"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:expressions"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:expressions"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:facet"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:facet"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:grouping"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:grouping"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:highlighter"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:highlighter"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:join"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:join"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:memory"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:memory"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:misc"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:misc"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:monitor"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:monitor"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:queries"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:queries"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:queryparser"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:queryparser"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:replicator"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:replicator"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:sandbox"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:sandbox"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:spatial-extras"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:spatial-extras"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:spatial-test-fixtures"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:spatial-test-fixtures"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:spatial3d"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:spatial3d"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:suggest"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:suggest"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:test-framework"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:test-framework"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:common"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:common"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:icu"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:icu"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:kuromoji"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:kuromoji"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:morfologik"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:morfologik"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:morfologik.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:morfologik.tests"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:nori"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:nori"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:opennlp"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:opennlp"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:phonetic"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:phonetic"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:smartcn"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:smartcn"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:stempel"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:stempel"
}
],
"b7ba1646" : [
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:distribution.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:distribution.tests"
}
],
"b91715f0" : [
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:opennlp"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:opennlp"
}
],
"cb00cecf" : [
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:morfologik"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:morfologik"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:morfologik.tests"
}
],
"cbc357ab" : [
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:spatial-extras"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:spatial-extras"
}
],
"d9953130" : [
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:benchmark-jmh"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:demo"
},
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:expressions"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:expressions"
}
],
"e077a675" : [
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:morfologik"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:morfologik"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:morfologik.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:morfologik.tests"
}
],
"e9962aab" : [
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:analysis:phonetic"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:analysis:phonetic"
}
],
"ecaf1d73" : [
{
"configuration" : "annotationProcessor",
"projectPath" : ":lucene:benchmark-jmh"
}
],
"fa9ef26b" : [
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:spatial-test-fixtures"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:spatial-test-fixtures"
},
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:test-framework"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:test-framework"
}
],
"fe494320" : [
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "compileClasspath",
"projectPath" : ":lucene:analysis:morfologik"
},
{
"configuration" : "runtimeClasspath",
"projectPath" : ":lucene:analysis:morfologik"
}
],
"ffa00415" : [
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis.tests"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:benchmark"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:luke"
},
{
"configuration" : "testCompileClasspath",
"projectPath" : ":lucene:analysis:icu"
},
{
"configuration" : "testRuntimeClasspath",
"projectPath" : ":lucene:analysis:icu"
}
]
}
}

View File

@ -1,19 +0,0 @@
com.carrotsearch.randomizedtesting:*=2.8.1
com.carrotsearch:procfork=1.0.6
com.google.errorprone:*=2.18.0
com.ibm.icu:icu4j=74.2
commons-codec:commons-codec=1.13
io.sgr:s2-geometry-library-java=1.0.0
junit:junit=4.13.1
net.sourceforge.nekohtml:nekohtml=1.9.17
org.antlr:antlr4*=4.11.1
org.apache.commons:commons-compress=1.19
org.apache.opennlp:opennlp-tools=2.3.2
org.assertj:*=3.21.0
org.carrot2:morfologik-*=2.1.9
org.hamcrest:*=2.2
org.locationtech.jts:jts-core=1.17.0
org.locationtech.spatial4j:*=0.8
org.ow2.asm:*=9.6
ua.net.nlp:morfologik-ukrainian-search=4.9.1
xerces:xercesImpl=2.12.0

85
versions.toml Normal file
View File

@ -0,0 +1,85 @@
[versions]
antlr = "4.11.1"
asm = "9.6"
assertj = "3.21.0"
commons-codec = "1.13"
commons-compress = "1.19"
ecj = "3.36.0"
errorprone = "2.18.0"
flexmark = "0.61.24"
# @keep This is GJF version for spotless/ tidy.
googleJavaFormat = "1.18.1"
groovy = "3.0.21"
hamcrest = "2.2"
icu4j = "74.2"
javacc = "7.0.12"
jflex = "1.8.2"
jgit = "5.13.1.202206130422-r"
jmh = "1.37"
jts = "1.17.0"
junit = "4.13.1"
# @keep Minimum gradle version to run the build
minGradle = "8.8"
# @keep This is the minimum required Java version.
minJava = "21"
morfologik = "2.1.9"
morfologik-ukrainian = "4.9.1"
nekohtml = "1.9.17"
opennlp = "2.3.2"
procfork = "1.0.6"
randomizedtesting = "2.8.1"
rat = "0.14"
s2-geometry = "1.0.0"
spatial4j = "0.8"
xerces = "2.12.0"
zstd = "1.5.5-11"
[libraries]
antlr-core = { module = "org.antlr:antlr4", version.ref = "antlr" }
antlr-runtime = { module = "org.antlr:antlr4-runtime", version.ref = "antlr" }
asm-commons = { module = "org.ow2.asm:asm-commons", version.ref = "asm" }
asm-core = { module = "org.ow2.asm:asm", version.ref = "asm" }
assertj = { module = "org.assertj:assertj-core", version.ref = "assertj" }
commons-codec = { module = "commons-codec:commons-codec", version.ref = "commons-codec" }
commons-compress = { module = "org.apache.commons:commons-compress", version.ref = "commons-compress" }
ecj = { module = "org.eclipse.jdt:ecj", version.ref = "ecj" }
errorprone = { module = "com.google.errorprone:error_prone_core", version.ref = "errorprone" }
flexmark-core = { module = "com.vladsch.flexmark:flexmark", version.ref = "flexmark" }
flexmark-ext-abbreviation = { module = "com.vladsch.flexmark:flexmark-ext-abbreviation", version.ref = "flexmark" }
flexmark-ext-attributes = { module = "com.vladsch.flexmark:flexmark-ext-attributes", version.ref = "flexmark" }
flexmark-ext-autolink = { module = "com.vladsch.flexmark:flexmark-ext-autolink", version.ref = "flexmark" }
flexmark-ext-tables = { module = "com.vladsch.flexmark:flexmark-ext-tables", version.ref = "flexmark" }
groovy = { module = "org.codehaus.groovy:groovy-all", version.ref = "groovy" }
hamcrest = { module = "org.hamcrest:hamcrest", version.ref = "hamcrest" }
icu4j = { module = "com.ibm.icu:icu4j", version.ref = "icu4j" }
javacc = { module = "net.java.dev.javacc:javacc", version.ref = "javacc" }
jflex = { module = "de.jflex:jflex", version.ref = "jflex" }
jgit = { module = "org.eclipse.jgit:org.eclipse.jgit", version.ref = "jgit" }
jmh-annprocess = { module = "org.openjdk.jmh:jmh-generator-annprocess", version.ref = "jmh" }
jmh-core = { module = "org.openjdk.jmh:jmh-core", version.ref = "jmh" }
jts = { module = "org.locationtech.jts:jts-core", version.ref = "jts" }
junit = { module = "junit:junit", version.ref = "junit" }
morfologik-polish = { module = "org.carrot2:morfologik-polish", version.ref = "morfologik" }
morfologik-stemming = { module = "org.carrot2:morfologik-stemming", version.ref = "morfologik" }
morfologik-ukrainian = { module = "ua.net.nlp:morfologik-ukrainian-search", version.ref = "morfologik-ukrainian" }
nekohtml = { module = "net.sourceforge.nekohtml:nekohtml", version.ref = "nekohtml" }
opennlp-tools = { module = "org.apache.opennlp:opennlp-tools", version.ref = "opennlp" }
procfork = { module = "com.carrotsearch:procfork", version.ref = "procfork" }
randomizedtesting-runner = { module = "com.carrotsearch.randomizedtesting:randomizedtesting-runner", version.ref = "randomizedtesting" }
rat = { module = "org.apache.rat:apache-rat", version.ref = "rat" }
s2-geometry = { module = "io.sgr:s2-geometry-library-java", version.ref = "s2-geometry" }
spatial4j = { module = "org.locationtech.spatial4j:spatial4j", version.ref = "spatial4j" }
xerces = { module = "xerces:xercesImpl", version.ref = "xerces" }
zstd = { module = "com.github.luben:zstd-jni", version.ref = "zstd" }
[plugins]
benmanes-versions = "com.github.ben-manes.versions:0.51.0"
dependencychecks = "com.carrotsearch.gradle.dependencychecks:0.0.9"
errorprone = "net.ltgt.errorprone:3.1.0"
forbiddenapis = "de.thetaphi.forbiddenapis:3.7"
jacocolog = "org.barfuin.gradle.jacocolog:3.1.0"
owasp-dependencycheck = "org.owasp.dependencycheck:7.2.0"
randomizedtesting = "com.carrotsearch.gradle.randomizedtesting:0.0.6"
spotless = "com.diffplug.spotless:6.5.2"
undercouch-download = "de.undercouch.download:5.2.0"
versionCatalogUpdate = "nl.littlerobots.version-catalog-update:0.8.4"