mirror of https://github.com/apache/lucene.git
LUCENE-9077: gradle build support.
Merge branch 'gradle-master'
This commit is contained in:
commit
fee8ba6e42
|
@ -0,0 +1,2 @@
|
|||
# Ignore all differences in line endings for the lock file.
|
||||
versions.lock text eol=lf
|
|
@ -30,3 +30,9 @@ __pycache__
|
|||
/dev-tools/scripts/scripts.iml
|
||||
.DS_Store
|
||||
|
||||
build/
|
||||
.gradle/
|
||||
.idea/
|
||||
|
||||
# Ignore the generated local settings file.
|
||||
gradle.properties
|
||||
|
|
38
README.md
38
README.md
|
@ -40,6 +40,8 @@ comprehensive documentation, visit:
|
|||
|
||||
(You do not need to do this if you downloaded a pre-built package)
|
||||
|
||||
### Building with Ant
|
||||
|
||||
Lucene and Solr are built using [Apache Ant](http://ant.apache.org/). To build
|
||||
Lucene and Solr, run:
|
||||
|
||||
|
@ -58,6 +60,29 @@ following command from the `solr/` directory:
|
|||
|
||||
`ant server`
|
||||
|
||||
### Building with Gradle
|
||||
|
||||
There is ongoing work (see [LUCENE-9077](https://issues.apache.org/jira/browse/LUCENE-9077))
|
||||
to switch the legacy ant-based build system to [gradle](https://gradle.org/).
|
||||
Please give it a try!
|
||||
|
||||
At the moment of writing, the gradle build requires precisely Java 11
|
||||
(it may or may not work with newer Java versions).
|
||||
|
||||
To build Lucene and Solr, run (`./` can be omitted on Windows):
|
||||
|
||||
`./gradlew assemble`
|
||||
|
||||
The command above also packages a full distribution of Solr server; the
|
||||
package can be located at:
|
||||
|
||||
`solr/packaging/build/solr-*`
|
||||
|
||||
Note that the gradle build does not create or copy binaries throughout the
|
||||
source repository (like ant build does) so you need to switch to the
|
||||
packaging output folder above; the rest of the instructions below remain
|
||||
identical.
|
||||
|
||||
## Running Solr
|
||||
|
||||
After [building Solr](#building-lucene-solr), the server can be started using
|
||||
|
@ -89,6 +114,12 @@ import Lucene/Solr.
|
|||
- *IntelliJ* - `ant idea` (See [this](https://cwiki.apache.org/confluence/display/lucene/HowtoConfigureIntelliJ) for details)
|
||||
- *Netbeans* - `ant netbeans` (See [this](https://cwiki.apache.org/confluence/display/lucene/HowtoConfigureNetbeans) for details)
|
||||
|
||||
### Gradle build and IDE support
|
||||
|
||||
- *IntelliJ* - IntelliJ idea can import the project out of the box.
|
||||
Code formatting conventions should be manually adjusted.
|
||||
- *Eclipse* - Not tested.
|
||||
- *Netbeans* - Not tested.
|
||||
|
||||
## Running Tests
|
||||
|
||||
|
@ -101,6 +132,13 @@ ways. For an exhaustive discussion of the options available, run:
|
|||
|
||||
`ant test-help`
|
||||
|
||||
### Gradle build and tests
|
||||
|
||||
Run the following command to display an extensive help for running
|
||||
tests with gradle:
|
||||
|
||||
`./gradlew helpTests`
|
||||
|
||||
## Contributing
|
||||
|
||||
Please review the [Contributing to Solr
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
import java.time.ZonedDateTime
|
||||
import java.time.format.DateTimeFormatter
|
||||
|
||||
plugins {
|
||||
id "base"
|
||||
id "com.palantir.consistent-versions" version "1.14.0"
|
||||
id 'de.thetaphi.forbiddenapis' version '2.7' apply false
|
||||
}
|
||||
|
||||
// Project version and main properties. Applies to all projects.
|
||||
allprojects {
|
||||
version = "9.0.0-SNAPSHOT"
|
||||
}
|
||||
|
||||
ext {
|
||||
def tstamp = ZonedDateTime.now()
|
||||
buildDate = DateTimeFormatter.ofPattern("yyyy-MM-dd").format(tstamp)
|
||||
buildTime = DateTimeFormatter.ofPattern("HH:mm:ss").format(tstamp)
|
||||
buildYear = DateTimeFormatter.ofPattern("yyyy").format(tstamp)
|
||||
|
||||
// Workaround for this one, for now:
|
||||
// https://github.com/palantir/gradle-consistent-versions/issues/383
|
||||
scriptDepVersions = [
|
||||
"apache-rat": "0.11"
|
||||
]
|
||||
}
|
||||
|
||||
// Include smaller chunks configuring dedicated build areas.
|
||||
// Some of these intersect or add additional functionality.
|
||||
// The order of inclusion of these files shouldn't matter (but may
|
||||
// if the build file is incorrectly written and evaluates something
|
||||
// eagerly).
|
||||
|
||||
apply from: file('gradle/generate-defaults.gradle')
|
||||
|
||||
// Set up defaults and configure aspects for certain modules or functionality
|
||||
// (java, tests)
|
||||
apply from: file('gradle/defaults.gradle')
|
||||
apply from: file('gradle/defaults-java.gradle')
|
||||
apply from: file('gradle/defaults-javadoc.gradle')
|
||||
apply from: file('gradle/testing/defaults-tests.gradle')
|
||||
apply from: file('gradle/testing/randomization.gradle')
|
||||
apply from: file('gradle/testing/fail-on-no-tests.gradle')
|
||||
apply from: file('gradle/testing/runtime-jvm-support.gradle')
|
||||
|
||||
// Maven publishing.
|
||||
apply from: file('gradle/maven/defaults-maven.gradle')
|
||||
|
||||
// IDE settings and specials.
|
||||
apply from: file('gradle/defaults-idea.gradle')
|
||||
|
||||
// Validation tasks
|
||||
apply from: file('gradle/validation/precommit.gradle')
|
||||
apply from: file('gradle/validation/forbidden-apis.gradle')
|
||||
apply from: file('gradle/validation/jar-checks.gradle')
|
||||
apply from: file('gradle/validation/git-status.gradle')
|
||||
apply from: file('gradle/validation/versions-props-sorted.gradle')
|
||||
apply from: file('gradle/validation/validate-source-patterns.gradle')
|
||||
apply from: file('gradle/validation/config-file-sanity.gradle')
|
||||
apply from: file('gradle/validation/rat-sources.gradle')
|
||||
|
||||
// Additional development aids.
|
||||
apply from: file('gradle/maven/maven-local.gradle')
|
||||
apply from: file('gradle/testing/per-project-summary.gradle')
|
||||
apply from: file('gradle/testing/slowest-tests-at-end.gradle')
|
||||
apply from: file('gradle/testing/failed-tests-at-end.gradle')
|
||||
apply from: file('gradle/help.gradle')
|
||||
|
||||
// Ant-compatibility layer. ALL OF THESE SHOULD BE GONE at some point. They are
|
||||
// here so that we can coexist with current ant build but they are indicative
|
||||
// of potential problems with the build conventions, dependencies, etc.
|
||||
apply from: file('gradle/ant-compat/force-versions.gradle')
|
||||
apply from: file('gradle/ant-compat/folder-layout.gradle')
|
||||
apply from: file('gradle/ant-compat/misc.gradle')
|
||||
apply from: file('gradle/ant-compat/resolve.gradle')
|
||||
apply from: file('gradle/ant-compat/post-jar.gradle')
|
||||
apply from: file('gradle/ant-compat/test-classes-cross-deps.gradle')
|
||||
apply from: file('gradle/ant-compat/artifact-naming.gradle')
|
||||
apply from: file('gradle/ant-compat/solr-forbidden-apis.gradle')
|
||||
apply from: file('gradle/ant-compat/forbidden-api-rules-in-sync.gradle')
|
|
@ -0,0 +1,8 @@
|
|||
|
||||
// Make sure the build environment is consistent.
|
||||
apply from: file('../gradle/validation/check-environment.gradle')
|
||||
|
||||
dependencies {
|
||||
implementation gradleApi()
|
||||
implementation localGroovy()
|
||||
}
|
|
@ -0,0 +1,275 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.gradle;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.gradle.api.internal.tasks.testing.logging.FullExceptionFormatter;
|
||||
import org.gradle.api.internal.tasks.testing.logging.TestExceptionFormatter;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
import org.gradle.api.tasks.testing.TestDescriptor;
|
||||
import org.gradle.api.tasks.testing.TestListener;
|
||||
import org.gradle.api.tasks.testing.TestOutputEvent;
|
||||
import org.gradle.api.tasks.testing.TestOutputListener;
|
||||
import org.gradle.api.tasks.testing.TestResult;
|
||||
import org.gradle.api.tasks.testing.logging.TestLogging;
|
||||
|
||||
/**
|
||||
* An error reporting listener that queues test output streams and displays them
|
||||
* on failure.
|
||||
* <p>
|
||||
* Heavily inspired by Elasticsearch's ErrorReportingTestListener (ASL 2.0 licensed).
|
||||
*/
|
||||
public class ErrorReportingTestListener implements TestOutputListener, TestListener {
|
||||
private static final Logger LOGGER = Logging.getLogger(ErrorReportingTestListener.class);
|
||||
|
||||
private final TestExceptionFormatter formatter;
|
||||
private final Map<TestKey, OutputHandler> outputHandlers = new ConcurrentHashMap<>();
|
||||
private final Path spillDir;
|
||||
private final Path outputsDir;
|
||||
private final boolean verboseMode;
|
||||
|
||||
public ErrorReportingTestListener(TestLogging testLogging, Path spillDir, Path outputsDir, boolean verboseMode) {
|
||||
this.formatter = new FullExceptionFormatter(testLogging);
|
||||
this.spillDir = spillDir;
|
||||
this.outputsDir = outputsDir;
|
||||
this.verboseMode = verboseMode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOutput(TestDescriptor testDescriptor, TestOutputEvent outputEvent) {
|
||||
handlerFor(testDescriptor).write(outputEvent);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void beforeSuite(TestDescriptor suite) {
|
||||
// noop.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void beforeTest(TestDescriptor testDescriptor) {
|
||||
// Noop.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterSuite(final TestDescriptor suite, TestResult result) {
|
||||
if (suite.getParent() == null || suite.getName().startsWith("Gradle")) {
|
||||
return;
|
||||
}
|
||||
|
||||
TestKey key = TestKey.of(suite);
|
||||
try {
|
||||
OutputHandler outputHandler = outputHandlers.get(key);
|
||||
if (outputHandler != null) {
|
||||
long length = outputHandler.length();
|
||||
if (length > 1024 * 1024 * 10) {
|
||||
LOGGER.warn(String.format(Locale.ROOT, "WARNING: Test %s wrote %,d bytes of output.",
|
||||
suite.getName(),
|
||||
length));
|
||||
}
|
||||
}
|
||||
|
||||
boolean echoOutput = Objects.equals(result.getResultType(), TestResult.ResultType.FAILURE);
|
||||
boolean dumpOutput = echoOutput;
|
||||
|
||||
// If the test suite failed, report output.
|
||||
if (dumpOutput || echoOutput) {
|
||||
Files.createDirectories(outputsDir);
|
||||
Path outputLog = outputsDir.resolve(getOutputLogName(suite));
|
||||
|
||||
// Save the output of a failing test to disk.
|
||||
try (Writer w = Files.newBufferedWriter(outputLog, StandardCharsets.UTF_8)) {
|
||||
if (outputHandler != null) {
|
||||
outputHandler.copyTo(w);
|
||||
}
|
||||
}
|
||||
|
||||
if (echoOutput && !verboseMode) {
|
||||
synchronized (this) {
|
||||
System.out.println("");
|
||||
System.out.println(suite.getClassName() + " > test suite's output saved to " + outputLog + ", copied below:");
|
||||
try (BufferedReader reader = Files.newBufferedReader(outputLog, StandardCharsets.UTF_8)) {
|
||||
char[] buf = new char[1024];
|
||||
int len;
|
||||
while ((len = reader.read(buf)) >= 0) {
|
||||
System.out.print(new String(buf, 0, len));
|
||||
}
|
||||
System.out.println();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
} finally {
|
||||
OutputHandler handler = outputHandlers.remove(key);
|
||||
if (handler != null) {
|
||||
try {
|
||||
handler.close();
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Failed to close output handler for: " + key, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Pattern SANITIZE = Pattern.compile("[^a-zA-Z .\\-_0-9]+");
|
||||
|
||||
public static String getOutputLogName(TestDescriptor suite) {
|
||||
return SANITIZE.matcher("OUTPUT-" + suite.getName() + ".txt").replaceAll("_");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterTest(TestDescriptor testDescriptor, TestResult result) {
|
||||
// Include test failure exception stacktrace(s) in test output log.
|
||||
if (result.getResultType() == TestResult.ResultType.FAILURE) {
|
||||
if (result.getExceptions().size() > 0) {
|
||||
String message = formatter.format(testDescriptor, result.getExceptions());
|
||||
handlerFor(testDescriptor).write(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private OutputHandler handlerFor(TestDescriptor descriptor) {
|
||||
// Attach output of leaves (individual tests) to their parent.
|
||||
if (!descriptor.isComposite()) {
|
||||
descriptor = descriptor.getParent();
|
||||
}
|
||||
return outputHandlers.computeIfAbsent(TestKey.of(descriptor), (key) -> new OutputHandler());
|
||||
}
|
||||
|
||||
public static class TestKey {
|
||||
private final String key;
|
||||
|
||||
private TestKey(String key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
public static TestKey of(TestDescriptor d) {
|
||||
StringBuilder key = new StringBuilder();
|
||||
key.append(d.getClassName());
|
||||
key.append("::");
|
||||
key.append(d.getName());
|
||||
key.append("::");
|
||||
key.append(d.getParent() == null ? "-" : d.getParent().toString());
|
||||
return new TestKey(key.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return o != null &&
|
||||
o.getClass() == this.getClass() &&
|
||||
Objects.equals(((TestKey) o).key, key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return key.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
||||
private class OutputHandler implements Closeable {
|
||||
// Max single-line buffer before automatic wrap occurs.
|
||||
private static final int MAX_LINE_WIDTH = 1024 * 4;
|
||||
|
||||
private final SpillWriter buffer;
|
||||
|
||||
// internal stream.
|
||||
private final PrefixedWriter sint;
|
||||
// stdout
|
||||
private final PrefixedWriter sout;
|
||||
// stderr
|
||||
private final PrefixedWriter serr;
|
||||
|
||||
// last used stream (so that we can flush it properly and prefixes are not screwed up).
|
||||
private PrefixedWriter last;
|
||||
|
||||
public OutputHandler() {
|
||||
buffer = new SpillWriter(() -> {
|
||||
try {
|
||||
return Files.createTempFile(spillDir, "spill-", ".tmp");
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
});
|
||||
|
||||
Writer sink = buffer;
|
||||
if (verboseMode) {
|
||||
sink = new StdOutTeeWriter(buffer);
|
||||
}
|
||||
|
||||
sint = new PrefixedWriter(" > ", sink, MAX_LINE_WIDTH);
|
||||
sout = new PrefixedWriter(" 1> ", sink, MAX_LINE_WIDTH);
|
||||
serr = new PrefixedWriter(" 2> ", sink, MAX_LINE_WIDTH);
|
||||
last = sint;
|
||||
}
|
||||
|
||||
public void write(TestOutputEvent event) {
|
||||
write((event.getDestination() == TestOutputEvent.Destination.StdOut ? sout : serr), event.getMessage());
|
||||
}
|
||||
|
||||
public void write(String message) {
|
||||
write(sint, message);
|
||||
}
|
||||
|
||||
public long length() throws IOException {
|
||||
return buffer.length();
|
||||
}
|
||||
|
||||
private void write(PrefixedWriter out, String message) {
|
||||
try {
|
||||
if (out != last) {
|
||||
last.completeLine();
|
||||
last = out;
|
||||
}
|
||||
out.write(message);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException("Unable to write to test output.", e);
|
||||
}
|
||||
}
|
||||
|
||||
public void copyTo(Writer out) throws IOException {
|
||||
flush();
|
||||
buffer.copyTo(out);
|
||||
}
|
||||
|
||||
public void flush() throws IOException {
|
||||
sout.completeLine();
|
||||
serr.completeLine();
|
||||
buffer.flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
buffer.close();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.gradle;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
|
||||
/**
|
||||
* Prefixes every new line with a given string, synchronizing multiple streams to emit consistent lines.
|
||||
*/
|
||||
public class PrefixedWriter extends Writer {
|
||||
Writer sink;
|
||||
|
||||
private final static char LF = '\n';
|
||||
private final String prefix;
|
||||
private final StringBuilder lineBuffer = new StringBuilder();
|
||||
private final int maxLineLength;
|
||||
|
||||
public PrefixedWriter(String prefix, Writer sink, int maxLineLength) {
|
||||
super(sink);
|
||||
this.sink = sink;
|
||||
this.prefix = prefix;
|
||||
this.maxLineLength = maxLineLength;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(int c) throws IOException {
|
||||
if (lineBuffer.length() == maxLineLength || c == LF) {
|
||||
sink.write(prefix);
|
||||
sink.write(lineBuffer.toString());
|
||||
sink.write(LF);
|
||||
|
||||
lineBuffer.setLength(0);
|
||||
if (c != LF) {
|
||||
lineBuffer.append((char) c);
|
||||
}
|
||||
} else {
|
||||
lineBuffer.append((char) c);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(char[] cbuf, int off, int len) throws IOException {
|
||||
for (int i = off; i < off + len; i++) {
|
||||
write(cbuf[i]);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() throws IOException {
|
||||
// don't pass flushes.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete the current line (emit LF if not at the start of the line already).
|
||||
*/
|
||||
public void completeLine() throws IOException {
|
||||
if (lineBuffer.length() > 0) {
|
||||
write(LF);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,131 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.gradle;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.io.StringWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class SpillWriter extends Writer {
|
||||
private final static int MAX_BUFFERED = 2 * 1024;
|
||||
private final StringWriter buffer = new StringWriter(MAX_BUFFERED);
|
||||
|
||||
private final Supplier<Path> spillPathSupplier;
|
||||
private Writer spill;
|
||||
private Path spillPath;
|
||||
|
||||
public SpillWriter(Supplier<Path> spillPathSupplier) {
|
||||
this.spillPathSupplier = spillPathSupplier;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(char[] cbuf, int off, int len) throws IOException {
|
||||
getSink(len).write(cbuf, off, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(int c) throws IOException {
|
||||
getSink(1).write(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(char[] cbuf) throws IOException {
|
||||
getSink(cbuf.length).write(cbuf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(String str) throws IOException {
|
||||
getSink(str.length()).write(str);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(String str, int off, int len) throws IOException {
|
||||
getSink(len).write(str, off, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Writer append(CharSequence csq) throws IOException {
|
||||
getSink(csq.length()).append(csq);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Writer append(CharSequence csq, int start, int end) throws IOException {
|
||||
getSink(Math.max(0, end - start)).append(csq, start, end);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Writer append(char c) throws IOException {
|
||||
getSink(1).append(c);
|
||||
return this;
|
||||
}
|
||||
|
||||
private Writer getSink(int expectedWriteChars) throws IOException {
|
||||
if (spill == null) {
|
||||
if (buffer.getBuffer().length() + expectedWriteChars <= MAX_BUFFERED) {
|
||||
return buffer;
|
||||
}
|
||||
|
||||
spillPath = spillPathSupplier.get();
|
||||
spill = Files.newBufferedWriter(spillPath, StandardCharsets.UTF_8);
|
||||
spill.append(buffer.getBuffer());
|
||||
buffer.getBuffer().setLength(0);
|
||||
}
|
||||
|
||||
return spill;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() throws IOException {
|
||||
getSink(0).flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
buffer.close();
|
||||
if (spill != null) {
|
||||
spill.close();
|
||||
Files.delete(spillPath);
|
||||
}
|
||||
}
|
||||
|
||||
public void copyTo(Writer writer) throws IOException {
|
||||
if (spill != null) {
|
||||
flush();
|
||||
try (Reader reader = Files.newBufferedReader(spillPath, StandardCharsets.UTF_8)) {
|
||||
reader.transferTo(writer);
|
||||
}
|
||||
} else {
|
||||
writer.append(buffer.getBuffer());
|
||||
}
|
||||
}
|
||||
|
||||
public long length() throws IOException {
|
||||
flush();
|
||||
if (spill != null) {
|
||||
return Files.size(spillPath);
|
||||
} else {
|
||||
return buffer.getBuffer().length();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.gradle;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.io.Writer;
|
||||
|
||||
class StdOutTeeWriter extends Writer {
|
||||
private final Writer delegate;
|
||||
private final PrintStream out = System.out;
|
||||
|
||||
public StdOutTeeWriter(Writer delegate) {
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(int c) throws IOException {
|
||||
delegate.write(c);
|
||||
out.write(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(char[] cbuf) throws IOException {
|
||||
delegate.write(cbuf);
|
||||
out.print(cbuf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(String str) throws IOException {
|
||||
delegate.write(str);
|
||||
out.print(str);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(String str, int off, int len) throws IOException {
|
||||
delegate.write(str, off, len);
|
||||
out.append(str, off, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Writer append(CharSequence csq) throws IOException {
|
||||
delegate.append(csq);
|
||||
out.append(csq);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Writer append(CharSequence csq, int start, int end) throws IOException {
|
||||
delegate.append(csq, start, end);
|
||||
out.append(csq, start, end);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Writer append(char c) throws IOException {
|
||||
delegate.append(c);
|
||||
out.append(c);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(char[] cbuf, int off, int len) throws IOException {
|
||||
delegate.write(cbuf, off, len);
|
||||
out.print(new String(cbuf, off, len));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() throws IOException {
|
||||
delegate.flush();
|
||||
out.flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
delegate.close();
|
||||
// Don't close the actual output.
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
// Stick to previous artifact names (not identical to path/ folders).
|
||||
configure(subprojects.findAll { it.path.contains(':solr:contrib:') }) {
|
||||
project.archivesBaseName = project.archivesBaseName.replace("-contrib-", "-")
|
||||
}
|
||||
|
||||
// This project has a different artifact name (solr-contrib-cell). Don't know why.
|
||||
configure(project(":solr:contrib:extraction")) {
|
||||
archivesBaseName = "solr-cell"
|
||||
}
|
||||
|
||||
configure(subprojects.findAll { it.path.contains(':lucene:analysis:') }) {
|
||||
project.archivesBaseName = project.archivesBaseName.replace("-analysis-", "-analyzers-")
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
// Adapt to custom folder convention.
|
||||
allprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
sourceSets {
|
||||
main.java.srcDirs = ['src/java']
|
||||
main.resources.srcDirs = ['src/resources']
|
||||
test.java.srcDirs = ['src/test']
|
||||
test.resources.srcDirs = ['src/test-files']
|
||||
}
|
||||
|
||||
task copyTestResources(type: Copy) {
|
||||
from('src/test') {
|
||||
exclude '**/*.java'
|
||||
}
|
||||
into sourceSets.test.java.outputDir
|
||||
}
|
||||
processTestResources.dependsOn copyTestResources
|
||||
}
|
||||
}
|
||||
|
||||
// Adapt to custom 'web' folder location.
|
||||
configure(project(":solr:webapp")) {
|
||||
plugins.withType(WarPlugin) {
|
||||
webAppDirName = "web"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
|
||||
// Just make sure the forbidden API rules are in sync between gradle and ant versions until
|
||||
// we get rid of ant build.
|
||||
|
||||
def linesOf(FileTree ftree) {
|
||||
return ftree.collectMany { path ->
|
||||
path.readLines("UTF-8")
|
||||
.collect { line -> line.trim() }
|
||||
.findAll { line -> !line.startsWith("#") }
|
||||
.unique()
|
||||
.collect { line -> [path: path, line: line] }
|
||||
}.groupBy { e -> e.line }
|
||||
}
|
||||
|
||||
configure(rootProject) {
|
||||
task verifyForbiddenApiRulesInSync() {
|
||||
doFirst {
|
||||
// Read all rules line by line from ant, gradle, remove comments, uniq.
|
||||
// Rule sets should be identical.
|
||||
def gradleRules = linesOf(fileTree("gradle/validation/forbidden-apis", { include "**/*.txt" }))
|
||||
def antRules = linesOf(project(":lucene").fileTree("tools/forbiddenApis", { include "**/*.txt" }))
|
||||
|
||||
def antOnlyLines = antRules.keySet() - gradleRules.keySet()
|
||||
def gradleOnlyLines = gradleRules.keySet() - antRules.keySet()
|
||||
|
||||
if (!gradleOnlyLines.isEmpty() || !antOnlyLines.isEmpty()) {
|
||||
project.logger.log(LogLevel.ERROR, "The following rules don't have counterparts:\n" +
|
||||
(gradleRules.findAll { gradleOnlyLines.contains(it.key) } + antRules.findAll { antOnlyLines.contains(it.key)})
|
||||
.collectMany { it.value }
|
||||
.join("\n"))
|
||||
throw new GradleException("Forbidden APIs rules out of sync.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
check.dependsOn verifyForbiddenApiRulesInSync
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
|
||||
// Force versions of certain components to align them with ant build.
|
||||
|
||||
subprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
dependencies {
|
||||
implementation enforcedPlatform('org.slf4j:slf4j-api:1.7.24')
|
||||
implementation enforcedPlatform('commons-logging:commons-logging:1.1.3')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:server")) {
|
||||
afterEvaluate {
|
||||
dependencies {
|
||||
libExt enforcedPlatform('org.slf4j:slf4j-api:1.7.24')
|
||||
libExt enforcedPlatform('commons-logging:commons-logging:1.1.3')
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
// This file is not included but is kept in ant-compat so that cleanup can be done later
|
||||
|
||||
// Remove special handling of dependency checksum validation/ collection for Solr where
|
||||
// transitive Lucene dependencies are sucked in to licenses/ folder. We can just copy
|
||||
// Lucene licenses as a whole (they're joint projects after all).
|
||||
//
|
||||
// the hack is in 'jar-checks.gradle' under:
|
||||
// def isSolr = project.path.startsWith(":solr")
|
|
@ -0,0 +1,30 @@
|
|||
|
||||
// Exclude inner classes from testing.
|
||||
allprojects {
|
||||
tasks.withType(Test) { task ->
|
||||
exclude '**/*$*'
|
||||
}
|
||||
}
|
||||
|
||||
// Exclude test classes that are not actually stand-alone tests (they're executed from other stuff).
|
||||
configure(project(":lucene:replicator")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
test {
|
||||
exclude "**/SimpleServer*"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Resources from top-level project folder are looked up via getClass(). Strange.
|
||||
configure(project(":lucene:benchmark")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
task syncConf(type: Sync) {
|
||||
from('conf')
|
||||
into file("${sourceSets.test.java.outputDir}/conf")
|
||||
}
|
||||
processTestResources.dependsOn syncConf
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
// This adds a configuration and artifact to solr-core which exports "post.jar" tool.
|
||||
// this should be a separate project instead (it is self-contained and classes are reused
|
||||
// in many places).
|
||||
|
||||
configure(project(":solr:core")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
configurations {
|
||||
postJar
|
||||
}
|
||||
|
||||
task assemblePostJar(type: Jar) {
|
||||
dependsOn classes
|
||||
|
||||
archiveFileName = "post.jar"
|
||||
destinationDirectory = file("${buildDir}/postJar")
|
||||
|
||||
from(sourceSets.main.output, {
|
||||
include "org/apache/solr/util/CLIO.class"
|
||||
include "org/apache/solr/util/SimplePostTool*.class"
|
||||
include "org/apache/solr/util/RTimer*.class"
|
||||
})
|
||||
|
||||
manifest {
|
||||
attributes("Main-Class": "org.apache.solr.util.SimplePostTool")
|
||||
}
|
||||
}
|
||||
|
||||
artifacts {
|
||||
postJar assemblePostJar
|
||||
}
|
||||
|
||||
assemble.dependsOn assemblePostJar
|
||||
}
|
||||
}
|
|
@ -0,0 +1,210 @@
|
|||
|
||||
// For Lucene, a 'resolve' task that copies any (non-project) dependencies
|
||||
// under lib/ folder.
|
||||
configure(allprojects.findAll {project -> project.path.startsWith(":lucene") }) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
configurations {
|
||||
runtimeLibs {
|
||||
extendsFrom runtimeElements
|
||||
extendsFrom testRuntimeClasspath
|
||||
}
|
||||
}
|
||||
|
||||
task resolve(type: Sync) {
|
||||
from({
|
||||
return configurations.runtimeLibs.copyRecursive { dep ->
|
||||
!(dep instanceof org.gradle.api.artifacts.ProjectDependency)
|
||||
}
|
||||
})
|
||||
|
||||
into 'lib'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For Solr, a 'resolve' task is much more complex. There are three folders:
|
||||
// lib/
|
||||
// test-lib/
|
||||
// lucene-libs/
|
||||
//
|
||||
// There doesn't seem to be one ideal set of rules on how these should be created, but
|
||||
// I tried to imitate the current (master) logic present in ivy and ant files in this way:
|
||||
//
|
||||
// The "solr platform" set of dependencies is a union of all deps for (core, solrj, server).
|
||||
//
|
||||
// Then:
|
||||
// lib - these are module's "own" dependencies, excluding Lucene's that are not present in the
|
||||
// solr platform.
|
||||
// lucene-libs - these are lucene modules declared as module's dependencies and not
|
||||
// present in solr platform.
|
||||
// test-lib/ - libs not present in solr platform and not included in solr:test-framework.
|
||||
//
|
||||
// None of these are really needed with gradle... they should be collected just in the distribution
|
||||
// package, not at each project's level.
|
||||
//
|
||||
// Unfortunately this "resolution" process is also related to how the final Solr packaging is assembled.
|
||||
// I don't know how to untie these two cleanly.
|
||||
//
|
||||
|
||||
configure(allprojects.findAll {project -> project.path.startsWith(":solr:contrib") }) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
ext {
|
||||
packagingDir = file("${buildDir}/packaging")
|
||||
deps = file("${packagingDir}/${project.name}")
|
||||
}
|
||||
|
||||
configurations {
|
||||
solrPlatformLibs
|
||||
solrTestPlatformLibs
|
||||
runtimeLibs {
|
||||
extendsFrom runtimeElements
|
||||
}
|
||||
packaging
|
||||
}
|
||||
|
||||
dependencies {
|
||||
solrPlatformLibs project(":solr:core")
|
||||
solrPlatformLibs project(":solr:solrj")
|
||||
solrPlatformLibs project(":solr:server")
|
||||
|
||||
solrTestPlatformLibs project(":solr:test-framework")
|
||||
}
|
||||
|
||||
// An aggregate that configures lib, lucene-libs and test-lib in a temporary location.
|
||||
task assemblePackaging(type: Sync) {
|
||||
from "README.txt"
|
||||
|
||||
from ({
|
||||
def externalLibs = configurations.runtimeLibs.copyRecursive { dep ->
|
||||
if (dep instanceof org.gradle.api.artifacts.ProjectDependency) {
|
||||
return !dep.dependencyProject.path.startsWith(":solr")
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return externalLibs - configurations.solrPlatformLibs
|
||||
}, {
|
||||
exclude "lucene-*"
|
||||
into "lib"
|
||||
})
|
||||
|
||||
from ({
|
||||
def projectLibs = configurations.runtimeLibs.copyRecursive { dep ->
|
||||
(dep instanceof org.gradle.api.artifacts.ProjectDependency)
|
||||
}
|
||||
return projectLibs - configurations.solrPlatformLibs
|
||||
}, {
|
||||
include "lucene-*"
|
||||
into "lucene-libs"
|
||||
})
|
||||
|
||||
into deps
|
||||
}
|
||||
|
||||
task syncLib(type: Sync) {
|
||||
dependsOn assemblePackaging
|
||||
|
||||
from(file("${deps}/lib"), {
|
||||
include "**"
|
||||
})
|
||||
into file("${projectDir}/lib")
|
||||
}
|
||||
|
||||
task syncTestLib(type: Sync) {
|
||||
// From test runtime classpath exclude:
|
||||
// 1) project dependencies (and their dependencies)
|
||||
// 2) runtime dependencies
|
||||
// What remains is this module's "own" test dependency.
|
||||
from({
|
||||
def testRuntimeLibs = configurations.testRuntimeClasspath.copyRecursive { dep ->
|
||||
!(dep instanceof org.gradle.api.artifacts.ProjectDependency)
|
||||
}
|
||||
|
||||
return testRuntimeLibs - configurations.runtimeLibs - configurations.solrTestPlatformLibs
|
||||
})
|
||||
|
||||
into file("${projectDir}/test-lib")
|
||||
}
|
||||
|
||||
task resolve() {
|
||||
dependsOn syncLib, syncTestLib
|
||||
}
|
||||
|
||||
// Contrib packaging currently depends on internal resolve.
|
||||
artifacts {
|
||||
packaging packagingDir, {
|
||||
builtBy assemblePackaging
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:example")) {
|
||||
evaluationDependsOn(":solr:example") // explicitly wait for other configs to be applied
|
||||
|
||||
task resolve(type: Copy) {
|
||||
from(configurations.postJar, {
|
||||
into "exampledocs/"
|
||||
})
|
||||
|
||||
from(configurations.dih, {
|
||||
into "example-DIH/solr/db/lib"
|
||||
})
|
||||
|
||||
into projectDir
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:server")) {
|
||||
evaluationDependsOn(":solr:server")
|
||||
|
||||
task resolve(type: Copy) {
|
||||
dependsOn assemblePackaging
|
||||
|
||||
from({ packagingDir }, {
|
||||
include "**/*.jar"
|
||||
include "solr-webapp/webapp/**"
|
||||
includeEmptyDirs false
|
||||
})
|
||||
|
||||
into projectDir
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:core")) {
|
||||
evaluationDependsOn(":solr:core")
|
||||
|
||||
configurations {
|
||||
runtimeLibs {
|
||||
extendsFrom runtimeElements
|
||||
}
|
||||
}
|
||||
|
||||
task resolve(type: Sync) {
|
||||
from({
|
||||
def ownDeps = configurations.runtimeLibs.copyRecursive { dep ->
|
||||
if (dep instanceof org.gradle.api.artifacts.ProjectDependency) {
|
||||
return !dep.dependencyProject.path.startsWith(":solr")
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return ownDeps
|
||||
}, {
|
||||
exclude "lucene-*"
|
||||
})
|
||||
|
||||
into "lib"
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:solrj")) {
|
||||
evaluationDependsOn(":solr:solrj")
|
||||
|
||||
task resolve(type: Sync) {
|
||||
from({ configurations.runtimeClasspath }, {
|
||||
})
|
||||
|
||||
into "lib"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
// Why does solr exclude these from forbidden API checks?
|
||||
|
||||
configure(project(":solr:core")) {
|
||||
configure([forbiddenApisMain, forbiddenApisTest]) {
|
||||
exclude "org/apache/solr/internal/**"
|
||||
exclude "org/apache/hadoop/**"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
// Set up cross-project dependency on test classes. This should be resolved by pulling reused classes into
|
||||
// a separate regular module. Exporting test classes is sort of weird.
|
||||
configure([project(":lucene:spatial3d"),
|
||||
project(":lucene:analysis:common"),
|
||||
project(":lucene:backward-codecs"),
|
||||
project(":lucene:queryparser"),
|
||||
project(":solr:contrib:dataimporthandler")]) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
configurations {
|
||||
testClassesExported
|
||||
}
|
||||
|
||||
artifacts {
|
||||
testClassesExported sourceSets.test.java.outputDir, {
|
||||
builtBy testClasses
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:spatial-extras")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
dependencies {
|
||||
testImplementation project(path: ':lucene:spatial3d', configuration: 'testClassesExported')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:core")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
dependencies {
|
||||
testImplementation project(path: ':lucene:backward-codecs', configuration: 'testClassesExported')
|
||||
testImplementation project(path: ':lucene:queryparser', configuration: 'testClassesExported')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:contrib:analysis-extras")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
dependencies {
|
||||
testImplementation project(path: ':lucene:analysis:common', configuration: 'testClassesExported')
|
||||
testImplementation project(path: ':solr:contrib:dataimporthandler', configuration: 'testClassesExported')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:contrib:dataimporthandler-extras")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
dependencies {
|
||||
testImplementation project(path: ':solr:contrib:dataimporthandler', configuration: 'testClassesExported')
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
allprojects {
|
||||
apply plugin: 'idea'
|
||||
|
||||
idea {
|
||||
module {
|
||||
outputDir file('build/idea/classes/main')
|
||||
testOutputDir file('build/idea/classes/test')
|
||||
downloadSources = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
// Configure Java project defaults.
|
||||
|
||||
allprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
sourceCompatibility = "11"
|
||||
targetCompatibility = "11"
|
||||
|
||||
compileJava.options.encoding = "UTF-8"
|
||||
compileTestJava.options.encoding = "UTF-8"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
// Configure javadoc defaults.
|
||||
|
||||
allprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
tasks.matching { it.name == "javadoc" }.all {
|
||||
StandardJavadocDocletOptions opts = (options as StandardJavadocDocletOptions)
|
||||
opts.locale("en_US")
|
||||
opts.charSet = "UTF-8"
|
||||
opts.encoding = "UTF-8"
|
||||
opts.docEncoding = "UTF-8"
|
||||
|
||||
opts.noIndex()
|
||||
opts.memberLevel = JavadocMemberLevel.PROTECTED
|
||||
opts.version = true
|
||||
opts.author = true
|
||||
opts.use = true
|
||||
|
||||
opts.linksOffline(
|
||||
"https://docs.oracle.com/en/java/javase/11/docs/api/",
|
||||
project(":lucene").file("tools/javadoc/java11/").toString())
|
||||
|
||||
opts.tags(
|
||||
"lucene.experimental:a:WARNING: This API is experimental and might change in incompatible ways in the next release.",
|
||||
"lucene.internal:a:NOTE: This API is for internal purposes only and might change in incompatible ways in the next release.",
|
||||
"lucene.spi:t:SPI Name (case-insensitive: if the name is 'htmlStrip', 'htmlstrip' can be used when looking up the service).",
|
||||
)
|
||||
|
||||
opts.addStringOption("-release", "11")
|
||||
opts.addBooleanOption('Xdoclint:all,-missing,-accessibility,-html', true)
|
||||
|
||||
def libName = project.path.startsWith(":lucene") ? "Lucene" : "Solr"
|
||||
opts.overview = file("src/main/java/overview.html").toString()
|
||||
opts.docTitle = "${libName} ${project.version} ${project.name} API"
|
||||
opts.windowTitle = "${libName} ${project.version} ${project.name} API"
|
||||
opts.bottom = "<i>Copyright © 2000-${buildYear} Apache Software Foundation. All Rights Reserved.</i>"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// https://issues.apache.org/jira/browse/LUCENE-9132: Add apache yetus so that javadoc doesn't fail
|
||||
configure([
|
||||
project(":solr:solrj"),
|
||||
project(":solr:core"),
|
||||
project(":solr:test-framework"),
|
||||
]) {
|
||||
configurations {
|
||||
javadocFix
|
||||
}
|
||||
|
||||
dependencies {
|
||||
javadocFix("org.apache.yetus:audience-annotations:0.11.1")
|
||||
}
|
||||
|
||||
plugins.withType(JavaPlugin) {
|
||||
javadoc {
|
||||
classpath += configurations.javadocFix.asFileTree
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
allprojects {
|
||||
apply plugin: 'base'
|
||||
|
||||
group "org.apache"
|
||||
|
||||
// Repositories to fetch dependencies from.
|
||||
repositories {
|
||||
mavenCentral()
|
||||
maven {
|
||||
url "https://maven.restlet.com"
|
||||
}
|
||||
}
|
||||
|
||||
// Artifacts will have names after full gradle project path
|
||||
// so :solr:core will have solr-core.jar, etc.
|
||||
project.archivesBaseName = project.path.replaceAll("^:", "").replace(':', '-')
|
||||
|
||||
ext {
|
||||
// Utility method to support passing overrides via -P or -D.
|
||||
propertyOrDefault = { propName, defValue ->
|
||||
def result
|
||||
if (project.hasProperty(propName)) {
|
||||
result = project.getProperty(propName)
|
||||
} else if (System.properties.containsKey(propName)) {
|
||||
result = System.properties.get(propName)
|
||||
} else {
|
||||
result = defValue
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
|
||||
// This script tries to guess sensible defaults for gradle parallelism
|
||||
// and local machine's resources and save them under 'gradle.properties'.
|
||||
|
||||
def hasDefaults = rootProject.file("gradle.properties").exists()
|
||||
|
||||
// If we don't have the defaults yet, create them and re-run the build
|
||||
// recursively with the same parameters as originally passed.
|
||||
//
|
||||
// Sadly, the recursive build doesn't seem to pick up the parallelism
|
||||
// tweaks from gradle.properties file.
|
||||
|
||||
if (!hasDefaults) {
|
||||
configure(rootProject) {
|
||||
task setupLocalDefaultsOnce(type: GradleBuild) {
|
||||
// Approximate a common-sense default for running gradle with parallel
|
||||
// workers: half the count of available cpus but not more than 12.
|
||||
def cpus = Runtime.runtime.availableProcessors()
|
||||
def maxWorkers = (int) Math.max(1d, Math.min(cpus * 0.5d, 12))
|
||||
def testsJvms = (int) Math.max(1d, Math.min(cpus * 0.5d, 4))
|
||||
|
||||
// Reuse the same set of parameters for the recursive invocation and apply
|
||||
// some of these eagerly.
|
||||
def startParams = gradle.startParameter.newInstance()
|
||||
startParams.setParallelProjectExecutionEnabled(true)
|
||||
startParams.setMaxWorkerCount(maxWorkers)
|
||||
startParameter(startParams)
|
||||
|
||||
// Write the defaults for this machine.
|
||||
rootProject.file("gradle.properties").write(
|
||||
[
|
||||
"# These settings have been generated automatically on the first run.",
|
||||
"# See gradlew :helpLocalSettings for more information.",
|
||||
"systemProp.file.encoding=UTF-8",
|
||||
"org.gradle.daemon=true",
|
||||
"org.gradle.jvmargs=-Xmx1g",
|
||||
"org.gradle.parallel=true",
|
||||
"org.gradle.priority=normal",
|
||||
"org.gradle.warning.mode=none", // Silence gradle warnings. We'll deal with them when we upgrade the wrapper.
|
||||
"",
|
||||
"# Maximum number of parallel gradle workers.",
|
||||
"org.gradle.workers.max=${maxWorkers}",
|
||||
"",
|
||||
"# Maximum number of test JVMs forked per test task.",
|
||||
"tests.jvms=${testsJvms}"
|
||||
].join("\n"), "UTF-8")
|
||||
|
||||
doFirst {
|
||||
logger.log(LogLevel.WARN, "\nIMPORTANT. This is the first time you ran the build. " +
|
||||
"I wrote some sane defaults (for this machine) to 'gradle.properties', " +
|
||||
"they will be picked up on consecutive gradle invocations (not this one).\n\n" +
|
||||
"Run gradlew :helpLocalSettings for more information.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Disable any tasks in this build, they were forked recursively.
|
||||
gradle.taskGraph.whenReady { graph ->
|
||||
graph.allTasks.each { task ->
|
||||
if (task != rootProject.setupLocalDefaultsOnce) {
|
||||
task.enabled = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Make all tasks depend on local setup to make sure it'll run.
|
||||
allprojects {
|
||||
tasks.all { task ->
|
||||
if (task != rootProject.setupLocalDefaultsOnce) {
|
||||
task.dependsOn rootProject.setupLocalDefaultsOnce
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
// Add "help" tasks which display plain text files under 'help' folder.
|
||||
|
||||
configure(rootProject) {
|
||||
def helpFiles = [
|
||||
["Workflow", "help/workflow.txt", "Typical workflow commands."],
|
||||
["Ant", "help/ant.txt", "Ant-gradle migration help."],
|
||||
["Tests", "help/tests.txt", "Tests, filtering, beasting, etc."],
|
||||
["Deps", "help/dependencies.txt", "Declaring, inspecting and excluding dependencies."],
|
||||
["ForbiddenApis", "help/forbiddenApis.txt", "How to add/apply rules for forbidden APIs."],
|
||||
["LocalSettings", "help/localSettings.txt", "Local settings, overrides and build performance tweaks."],
|
||||
["Git", "help/git.txt", "Git assistance and guides."],
|
||||
]
|
||||
|
||||
helpFiles.each { section, path, sectionInfo ->
|
||||
task "help${section}" {
|
||||
group = 'Help (developer guides and hints)'
|
||||
description = sectionInfo
|
||||
doFirst {
|
||||
println "\n" + rootProject.file(path).getText("UTF-8")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
help {
|
||||
doLast {
|
||||
println ""
|
||||
println "This is an experimental Lucene/Solr gradle build. See some"
|
||||
println "guidelines, ant-equivalent commands etc. under help/*; or type:"
|
||||
helpFiles.each { section, path, sectionInfo ->
|
||||
println String.format(Locale.ROOT,
|
||||
" gradlew :help%-14s # %s", section, sectionInfo)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task allHelpFilesExit() {
|
||||
doFirst {
|
||||
helpFiles.each { section, path, sectionInfo ->
|
||||
if (!rootProject.file(path).exists()) {
|
||||
throw new GradleException("Help file missing: ${path} (correct help.gradle)")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
check.dependsOn allHelpFilesExit
|
||||
}
|
|
@ -0,0 +1,121 @@
|
|||
|
||||
// Maven publications and configuration.
|
||||
//
|
||||
// the 'published' list contains an explicit list of all projects
|
||||
// which should be published to Maven repositories.
|
||||
|
||||
configure(rootProject) {
|
||||
ext {
|
||||
published = [
|
||||
":lucene:analysis:common",
|
||||
":lucene:analysis:icu",
|
||||
":lucene:analysis:kuromoji",
|
||||
":lucene:analysis:morfologik",
|
||||
":lucene:analysis:nori",
|
||||
":lucene:analysis:opennlp",
|
||||
":lucene:analysis:phonetic",
|
||||
":lucene:analysis:smartcn",
|
||||
":lucene:analysis:stempel",
|
||||
":lucene:backward-codecs",
|
||||
":lucene:benchmark",
|
||||
":lucene:classification",
|
||||
":lucene:codecs",
|
||||
":lucene:core",
|
||||
":lucene:demo",
|
||||
":lucene:expressions",
|
||||
":lucene:facet",
|
||||
":lucene:grouping",
|
||||
":lucene:highlighter",
|
||||
":lucene:join",
|
||||
":lucene:luke",
|
||||
":lucene:memory",
|
||||
":lucene:misc",
|
||||
":lucene:monitor",
|
||||
":lucene:queries",
|
||||
":lucene:queryparser",
|
||||
":lucene:replicator",
|
||||
":lucene:sandbox",
|
||||
":lucene:spatial",
|
||||
":lucene:spatial-extras",
|
||||
":lucene:spatial3d",
|
||||
":lucene:suggest",
|
||||
":lucene:test-framework",
|
||||
|
||||
":solr:core",
|
||||
":solr:solrj",
|
||||
":solr:contrib:analysis-extras",
|
||||
":solr:contrib:dataimporthandler",
|
||||
":solr:contrib:dataimporthandler-extras",
|
||||
":solr:contrib:analytics",
|
||||
":solr:contrib:clustering",
|
||||
":solr:contrib:extraction",
|
||||
":solr:contrib:langid",
|
||||
":solr:contrib:jaegertracer-configurator",
|
||||
":solr:contrib:prometheus-exporter",
|
||||
":solr:contrib:velocity",
|
||||
":solr:test-framework",
|
||||
]
|
||||
}
|
||||
|
||||
configure(subprojects.findAll { it.path in rootProject.published }) {
|
||||
apply plugin: 'maven-publish'
|
||||
apply plugin: 'signing'
|
||||
|
||||
publishing {
|
||||
// TODO: Add publishing repository details.
|
||||
}
|
||||
|
||||
plugins.withType(JavaPlugin) {
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
archiveClassifier = 'sources'
|
||||
from sourceSets.main.allJava
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
archiveClassifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
publishing {
|
||||
def configurePom = {
|
||||
name = "Apache Solr/Lucene (${project.name})"
|
||||
licenses {
|
||||
license {
|
||||
name = 'Apache 2'
|
||||
url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
publications {
|
||||
// JARS and sources, no javadocs (for local inspection only).
|
||||
jars(MavenPublication) {
|
||||
from components.java
|
||||
groupId = project.group
|
||||
artifactId = project.archivesBaseName
|
||||
|
||||
artifact sourcesJar
|
||||
|
||||
pom(configurePom)
|
||||
}
|
||||
|
||||
// Full set of signed artifacts.
|
||||
signed(MavenPublication) {
|
||||
from components.java
|
||||
groupId = project.group
|
||||
artifactId = project.archivesBaseName
|
||||
|
||||
artifact sourcesJar
|
||||
artifact javadocJar
|
||||
|
||||
pom(configurePom)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
signing {
|
||||
sign publishing.publications.signed
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
|
||||
// This adds a root project task to install all artifacts to a build-local
|
||||
// Maven repository (so that pom files can be manually reviewed).
|
||||
|
||||
configure(rootProject) {
|
||||
ext {
|
||||
mavenLocalDir = file("${buildDir}/maven-local")
|
||||
}
|
||||
|
||||
task mavenLocal() {
|
||||
group "Publishing"
|
||||
description "Publish Maven JARs and POMs locally to " + mavenLocalDir
|
||||
|
||||
doLast {
|
||||
logger.lifecycle "Local maven artifacts (poms, jars) created at: ${mavenLocalDir}"
|
||||
}
|
||||
}
|
||||
|
||||
task mavenLocalClean(type: Delete) {
|
||||
delete mavenLocalDir
|
||||
}
|
||||
|
||||
configure(subprojects.findAll { it.path in rootProject.published }) {
|
||||
plugins.withType(PublishingPlugin) {
|
||||
publishing {
|
||||
repositories {
|
||||
maven {
|
||||
name = 'build'
|
||||
url = mavenLocalDir
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tasks.matching { it.name == "publishJarsPublicationToBuildRepository" }.all { task ->
|
||||
// Clean prior to republishing to local build repository.
|
||||
task.dependsOn mavenLocalClean
|
||||
// Attach to root project's mavenLocal task.
|
||||
mavenLocal.dependsOn task
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.gradle.api.tasks.testing.logging.*
|
||||
import org.apache.lucene.gradle.ErrorReportingTestListener
|
||||
|
||||
def verboseModeHookInstalled = false
|
||||
|
||||
allprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
def verboseMode = Boolean.parseBoolean(propertyOrDefault("tests.verbose", "false"))
|
||||
|
||||
project.ext {
|
||||
testsWorkDir = file("${buildDir}/tmp/tests-cwd")
|
||||
testsTmpDir = file("${buildDir}/tmp/tests-tmp")
|
||||
commonDir = project(":lucene").projectDir
|
||||
commonSolrDir = project(":solr").projectDir
|
||||
}
|
||||
|
||||
// If we're running in verbose mode and:
|
||||
// 1) worker count > 1
|
||||
// 2) number of 'test' tasks in the build is > 1
|
||||
// then the output would very likely be mangled on the
|
||||
// console. Fail and let the user know what to do.
|
||||
if (verboseMode && !verboseModeHookInstalled) {
|
||||
verboseModeHookInstalled = true
|
||||
if (gradle.startParameter.maxWorkerCount > 1) {
|
||||
gradle.taskGraph.whenReady { graph ->
|
||||
def testTasks = graph.allTasks.findAll { task -> task instanceof Test }
|
||||
if (testTasks.size() > 1) {
|
||||
throw new GradleException("Run your tests in verbose mode only with --max-workers=1 option passed to gradle.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test {
|
||||
if (verboseMode) {
|
||||
maxParallelForks = 1
|
||||
} else {
|
||||
maxParallelForks = propertyOrDefault("tests.jvms", (int) Math.max(1, Math.min(Runtime.runtime.availableProcessors() / 2.0, 4.0)))
|
||||
}
|
||||
|
||||
workingDir testsWorkDir
|
||||
useJUnit()
|
||||
|
||||
minHeapSize = "256m"
|
||||
maxHeapSize = "512m"
|
||||
|
||||
systemProperty 'java.util.logging.config.file', file("${commonDir}/tools/junit4/logging.properties")
|
||||
systemProperty 'java.awt.headless', 'true'
|
||||
systemProperty 'jdk.map.althashing.threshold', '0'
|
||||
|
||||
if (!Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
systemProperty 'java.security.egd', 'file:/dev/./urandom'
|
||||
}
|
||||
|
||||
// jetty-related.
|
||||
systemProperty 'jetty.testMode', '1'
|
||||
systemProperty 'jetty.insecurerandom', '1'
|
||||
|
||||
// Turn jenkins blood red for hashmap bugs, even on jdk7
|
||||
systemProperty 'jdk.map.althashing.threshold', '0'
|
||||
|
||||
// Pass these to RandomizedRunner so that it doesn't attempt to set them.
|
||||
systemProperty 'junit4.childvm.count', '1'
|
||||
systemProperty 'junit4.childvm.id', '0'
|
||||
|
||||
// Set up cwd and temp locations.
|
||||
systemProperty("java.io.tmpdir", testsTmpDir)
|
||||
systemProperty("tempDir", testsTmpDir)
|
||||
doFirst {
|
||||
testsWorkDir.mkdirs()
|
||||
testsTmpDir.mkdirs()
|
||||
}
|
||||
|
||||
// Disable HTML report generation. The reports are big and slow to generate.
|
||||
reports.html.enabled = false
|
||||
|
||||
// Set up logging.
|
||||
testLogging {
|
||||
events TestLogEvent.FAILED
|
||||
exceptionFormat TestExceptionFormat.FULL
|
||||
showExceptions true
|
||||
showCauses true
|
||||
showStackTraces true
|
||||
showStandardStreams false
|
||||
}
|
||||
|
||||
// Set up custom test output handler.
|
||||
def testOutputsDir = file("${reports.junitXml.destination}/outputs")
|
||||
doFirst {
|
||||
project.delete testOutputsDir
|
||||
}
|
||||
|
||||
def spillDir = getTemporaryDir().toPath()
|
||||
def listener = new ErrorReportingTestListener(test.testLogging, spillDir, testOutputsDir.toPath(), verboseMode)
|
||||
addTestOutputListener(listener)
|
||||
addTestListener(listener)
|
||||
|
||||
doFirst {
|
||||
// Print some diagnostics about locations used.
|
||||
logger.info("Test folders for {}: cwd={}, tmp={}", project.path, testsWorkDir, testsTmpDir)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
// If we run the test task with a filter we want to fail if no test actually ran (everything was excluded).
|
||||
|
||||
configure(allprojects) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
test {
|
||||
filter {
|
||||
failOnNoMatchingTests = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gradle.taskGraph.whenReady { graph ->
|
||||
def args = gradle.startParameter.taskNames
|
||||
def filters = args.findAll({ arg ->
|
||||
return arg == /--tests/
|
||||
})
|
||||
|
||||
// Only apply the check if we are actually filtering.
|
||||
if (!filters.isEmpty()) {
|
||||
def testTasks = graph.allTasks.findAll { task -> task instanceof Test }
|
||||
// ... and there are some test tasks in the execution graph.
|
||||
if (!testTasks.isEmpty()) {
|
||||
def executedTests = 0
|
||||
def executedTasks = 0
|
||||
|
||||
testTasks.each { task ->
|
||||
task.doFirst {
|
||||
executedTasks++
|
||||
}
|
||||
task.afterSuite { desc, result ->
|
||||
executedTests += result.testCount
|
||||
}
|
||||
}
|
||||
|
||||
// After the build is finished, check the test count.
|
||||
gradle.buildFinished {
|
||||
if (executedTests == 0 && executedTasks > 0) {
|
||||
throw new GradleException("No tests found for the given filters?")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
// Display all failed tests at the end of the build.
|
||||
|
||||
def failedTests = []
|
||||
|
||||
allprojects {
|
||||
tasks.withType(Test) { Test task ->
|
||||
afterTest { desc, result ->
|
||||
if (result.resultType == TestResult.ResultType.FAILURE) {
|
||||
failedTests << [
|
||||
"name": "${desc.className}.${desc.name}",
|
||||
"project": "${test.project.path}",
|
||||
"reproduce": "gradlew ${project.path}:test --tests \"${desc.className}\" ${task.project.testOptionsForReproduceLine}"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
afterSuite { desc, result ->
|
||||
if (result.exceptions) {
|
||||
failedTests << [
|
||||
"name": "${desc.name}",
|
||||
"project": "${test.project.path}",
|
||||
"reproduce": "gradlew ${project.path}:test --tests \"${desc.name}\" ${task.project.testOptionsForReproduceLine}"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gradle.buildFinished { result ->
|
||||
if (failedTests) {
|
||||
def formatted = failedTests
|
||||
.sort { a, b -> b.project.compareTo(a.project) }
|
||||
.collect { e -> String.format(Locale.ROOT, " - %s (%s)\n Minimum reproduce line: %s\n", e.name, e.project, e.reproduce) }
|
||||
.join("\n")
|
||||
|
||||
logger.error("\nERROR: The following test(s) have failed:\n${formatted}")
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
|
||||
// Per-project test summary.
|
||||
|
||||
allprojects {
|
||||
tasks.withType(Test) { task ->
|
||||
afterSuite { desc, result ->
|
||||
if (!desc.parent) {
|
||||
if (result.testCount > 0) {
|
||||
def components = [
|
||||
"test(s)" : result.testCount,
|
||||
"failure(s)": result.failedTestCount,
|
||||
"skipped" : result.skippedTestCount
|
||||
].findAll { k, v -> v > 0 }.collect { k, v -> "$v $k" }.join(", ")
|
||||
|
||||
logger.lifecycle("${task.path} (${result.resultType}): ${components}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Policy file for :lucene:replicator tests. Please keep minimal and avoid wildcards.
|
||||
|
||||
grant {
|
||||
// 3rd party jar resources (where symlinks are not supported), test-files/ resources
|
||||
permission java.io.FilePermission "${common.dir}${/}-", "read";
|
||||
// system jar resources, and let TestIndexWriterOnJRECrash fork its jvm
|
||||
permission java.io.FilePermission "${java.home}${/}-", "read,execute";
|
||||
|
||||
// write only to sandbox
|
||||
permission java.io.FilePermission "${java.io.tmpdir}", "read,write";
|
||||
permission java.io.FilePermission "${java.io.tmpdir}${/}-", "read,write,delete";
|
||||
permission java.io.FilePermission "${tests.linedocsfile}", "read";
|
||||
|
||||
// misc HardlinkCopyDirectoryWrapper needs this to test if hardlinks can be created
|
||||
permission java.nio.file.LinkPermission "hard";
|
||||
// needed by SSD detection tests in TestIOUtils (creates symlinks)
|
||||
permission java.nio.file.LinkPermission "symbolic";
|
||||
|
||||
// needed by randomizedtesting runner to identify test methods.
|
||||
permission java.lang.RuntimePermission "accessDeclaredMembers";
|
||||
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
|
||||
// needed by certain tests to redirect sysout/syserr:
|
||||
permission java.lang.RuntimePermission "setIO";
|
||||
// needed by randomized runner to catch failures from other threads:
|
||||
permission java.lang.RuntimePermission "setDefaultUncaughtExceptionHandler";
|
||||
// needed by randomized runner getTopThreadGroup:
|
||||
permission java.lang.RuntimePermission "modifyThreadGroup";
|
||||
// needed by tests e.g. shutting down executors:
|
||||
permission java.lang.RuntimePermission "modifyThread";
|
||||
// needed for tons of test hacks etc
|
||||
permission java.lang.RuntimePermission "getStackTrace";
|
||||
// needed for mock filesystems in tests
|
||||
permission java.lang.RuntimePermission "fileSystemProvider";
|
||||
// needed for test of IOUtils.spins (maybe it can be avoided)
|
||||
permission java.lang.RuntimePermission "getFileStoreAttributes";
|
||||
// analyzers/uima: needed by lucene expressions' JavascriptCompiler
|
||||
permission java.lang.RuntimePermission "createClassLoader";
|
||||
// needed to test unmap hack on platforms that support it
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
|
||||
// needed by cyberneko usage by benchmarks on J9
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.org.apache.xerces.util";
|
||||
// needed by jacoco to dump coverage
|
||||
permission java.lang.RuntimePermission "shutdownHooks";
|
||||
// needed by org.apache.logging.log4j
|
||||
permission java.lang.RuntimePermission "getenv.*";
|
||||
permission java.lang.RuntimePermission "getClassLoader";
|
||||
permission java.lang.RuntimePermission "setContextClassLoader";
|
||||
|
||||
// allows LuceneTestCase#runWithRestrictedPermissions to execute with lower (or no) permission
|
||||
permission java.security.SecurityPermission "createAccessControlContext";
|
||||
|
||||
// read access to all system properties.
|
||||
permission java.util.PropertyPermission "*", "read";
|
||||
|
||||
// write access to only these.
|
||||
|
||||
// environment randomization
|
||||
permission java.util.PropertyPermission "user.language", "write";
|
||||
permission java.util.PropertyPermission "user.timezone", "write";
|
||||
|
||||
// CMS randomization
|
||||
permission java.util.PropertyPermission "lucene.cms.override_core_count", "write";
|
||||
permission java.util.PropertyPermission "lucene.cms.override_spins", "write";
|
||||
|
||||
// used by nested tests? (e.g. TestLeaveFilesIfTestFails). TODO: look into this
|
||||
permission java.util.PropertyPermission "tests.runnested", "write";
|
||||
|
||||
// Used by LuceneTestCase to set up TestRuleRestoreSystemProperties.
|
||||
// TODO: move it to Solr or modify TestRuleRestoreSystemProperties not to fail on inaccessible properties?
|
||||
permission java.util.PropertyPermission "solr.data.dir", "write";
|
||||
permission java.util.PropertyPermission "solr.solr.home", "write";
|
||||
permission java.util.PropertyPermission "solr.directoryFactory", "write";
|
||||
|
||||
// this section is replicator-specific (jetty)
|
||||
|
||||
// write access to all system properties
|
||||
permission java.util.PropertyPermission "*", "write";
|
||||
|
||||
// replicator: jetty tests require some network permissions:
|
||||
// all possibilities of accepting/binding/connecting on localhost with ports >= 1024:
|
||||
permission java.net.SocketPermission "localhost:1024-", "accept,listen,connect,resolve";
|
||||
permission java.net.SocketPermission "127.0.0.1:1024-", "accept,listen,connect,resolve";
|
||||
permission java.net.SocketPermission "[::1]:1024-", "accept,listen,connect,resolve";
|
||||
|
||||
// SSL related properties for jetty
|
||||
permission java.security.SecurityPermission "getProperty.ssl.KeyManagerFactory.algorithm";
|
||||
permission java.security.SecurityPermission "getProperty.ssl.TrustManagerFactory.algorithm";
|
||||
};
|
||||
|
||||
|
||||
// Grant all permissions to Gradle test runner classes.
|
||||
|
||||
grant codeBase "file:${gradle.lib.dir}${/}-" {
|
||||
permission java.security.AllPermission;
|
||||
};
|
||||
|
||||
grant codeBase "file:${gradle.worker.jar}" {
|
||||
permission java.security.AllPermission;
|
||||
};
|
||||
|
||||
grant {
|
||||
// Allow reading gradle worker JAR.
|
||||
permission java.io.FilePermission "${gradle.worker.jar}", "read";
|
||||
// Allow reading from classpath JARs (resources).
|
||||
permission java.io.FilePermission "${gradle.user.home}${/}-", "read";
|
||||
};
|
|
@ -0,0 +1,217 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Policy file for solr. Please keep minimal and avoid wildcards.
|
||||
|
||||
// permissions needed for tests to pass, based on properties set by the build system
|
||||
// NOTE: if the property is not set, the permission entry is ignored.
|
||||
grant {
|
||||
// 3rd party jar resources (where symlinks are not supported), test-files/ resources
|
||||
permission java.io.FilePermission "${common.dir}${/}-", "read";
|
||||
permission java.io.FilePermission "${common.dir}${/}..${/}solr${/}-", "read";
|
||||
|
||||
// system jar resources
|
||||
permission java.io.FilePermission "${java.home}${/}-", "read";
|
||||
|
||||
// Test launchers (randomizedtesting, etc.)
|
||||
permission java.io.FilePermission "${java.io.tmpdir}", "read,write";
|
||||
permission java.io.FilePermission "${java.io.tmpdir}${/}-", "read,write,delete";
|
||||
|
||||
permission java.io.FilePermission "${tests.linedocsfile}", "read";
|
||||
// DirectoryFactoryTest messes with these (wtf?)
|
||||
permission java.io.FilePermission "/tmp/inst1/conf/solrcore.properties", "read";
|
||||
permission java.io.FilePermission "/path/to/myinst/conf/solrcore.properties", "read";
|
||||
// TestConfigSets messes with these (wtf?)
|
||||
permission java.io.FilePermission "/path/to/solr/home/lib", "read";
|
||||
|
||||
permission java.nio.file.LinkPermission "hard";
|
||||
|
||||
// all possibilities of accepting/binding/connections on localhost with ports >=1024:
|
||||
permission java.net.SocketPermission "localhost:1024-", "accept,listen,connect,resolve";
|
||||
permission java.net.SocketPermission "127.0.0.1:1024-", "accept,listen,connect,resolve";
|
||||
permission java.net.SocketPermission "[::1]:1024-", "accept,listen,connect,resolve";
|
||||
// "dead hosts", we try to keep it fast
|
||||
permission java.net.SocketPermission "[::1]:4", "connect,resolve";
|
||||
permission java.net.SocketPermission "[::1]:6", "connect,resolve";
|
||||
permission java.net.SocketPermission "[::1]:8", "connect,resolve";
|
||||
|
||||
// Basic permissions needed for Lucene to work:
|
||||
permission java.util.PropertyPermission "*", "read,write";
|
||||
|
||||
// needed by randomizedtesting runner to identify test methods.
|
||||
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
|
||||
permission java.lang.RuntimePermission "accessDeclaredMembers";
|
||||
// needed by certain tests to redirect sysout/syserr:
|
||||
permission java.lang.RuntimePermission "setIO";
|
||||
// needed by randomized runner to catch failures from other threads:
|
||||
permission java.lang.RuntimePermission "setDefaultUncaughtExceptionHandler";
|
||||
// needed by randomized runner getTopThreadGroup:
|
||||
permission java.lang.RuntimePermission "modifyThreadGroup";
|
||||
// needed by tests e.g. shutting down executors:
|
||||
permission java.lang.RuntimePermission "modifyThread";
|
||||
// needed for tons of test hacks etc
|
||||
permission java.lang.RuntimePermission "getStackTrace";
|
||||
// needed for mock filesystems in tests
|
||||
permission java.lang.RuntimePermission "fileSystemProvider";
|
||||
// needed for test of IOUtils.spins (maybe it can be avoided)
|
||||
permission java.lang.RuntimePermission "getFileStoreAttributes";
|
||||
// analyzers/uima: needed by lucene expressions' JavascriptCompiler
|
||||
permission java.lang.RuntimePermission "createClassLoader";
|
||||
// needed to test unmap hack on platforms that support it
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
|
||||
// needed by jacoco to dump coverage
|
||||
permission java.lang.RuntimePermission "shutdownHooks";
|
||||
// needed by org.apache.logging.log4j
|
||||
permission java.lang.RuntimePermission "getenv.*";
|
||||
permission java.lang.RuntimePermission "getClassLoader";
|
||||
permission java.lang.RuntimePermission "setContextClassLoader";
|
||||
permission java.lang.RuntimePermission "getStackWalkerWithClassReference";
|
||||
// needed by bytebuddy
|
||||
permission java.lang.RuntimePermission "defineClass";
|
||||
// needed by mockito
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect";
|
||||
permission java.lang.RuntimePermission "reflectionFactoryAccess";
|
||||
// needed by SolrResourceLoader
|
||||
permission java.lang.RuntimePermission "closeClassLoader";
|
||||
// needed by HttpSolrClient
|
||||
permission java.lang.RuntimePermission "getFileSystemAttributes";
|
||||
// needed by hadoop auth (TODO: there is a cleaner way to handle this)
|
||||
permission java.lang.RuntimePermission "loadLibrary.jaas";
|
||||
permission java.lang.RuntimePermission "loadLibrary.jaas_unix";
|
||||
permission java.lang.RuntimePermission "loadLibrary.jaas_nt";
|
||||
// needed by hadoop common RawLocalFileSystem for java nio getOwner
|
||||
permission java.lang.RuntimePermission "accessUserInformation";
|
||||
// needed by hadoop hdfs
|
||||
permission java.lang.RuntimePermission "readFileDescriptor";
|
||||
permission java.lang.RuntimePermission "writeFileDescriptor";
|
||||
// needed by hadoop http
|
||||
permission java.lang.RuntimePermission "getProtectionDomain";
|
||||
|
||||
// These two *have* to be spelled out a separate
|
||||
permission java.lang.management.ManagementPermission "control";
|
||||
permission java.lang.management.ManagementPermission "monitor";
|
||||
|
||||
// needed by hadoop htrace
|
||||
permission java.net.NetPermission "getNetworkInformation";
|
||||
|
||||
// needed by DIH
|
||||
permission java.sql.SQLPermission "deregisterDriver";
|
||||
|
||||
permission java.util.logging.LoggingPermission "control";
|
||||
|
||||
// needed by solr mbeans feature/tests
|
||||
// TODO: can we remove wildcard for class names/members?
|
||||
permission javax.management.MBeanPermission "*", "getAttribute";
|
||||
permission javax.management.MBeanPermission "*", "getMBeanInfo";
|
||||
permission javax.management.MBeanPermission "*", "queryMBeans";
|
||||
permission javax.management.MBeanPermission "*", "queryNames";
|
||||
permission javax.management.MBeanPermission "*", "registerMBean";
|
||||
permission javax.management.MBeanPermission "*", "unregisterMBean";
|
||||
permission javax.management.MBeanServerPermission "createMBeanServer";
|
||||
permission javax.management.MBeanServerPermission "findMBeanServer";
|
||||
permission javax.management.MBeanServerPermission "releaseMBeanServer";
|
||||
permission javax.management.MBeanTrustPermission "register";
|
||||
|
||||
// needed by hadoop auth
|
||||
permission javax.security.auth.AuthPermission "getSubject";
|
||||
permission javax.security.auth.AuthPermission "modifyPrincipals";
|
||||
permission javax.security.auth.AuthPermission "doAs";
|
||||
permission javax.security.auth.AuthPermission "getLoginConfiguration";
|
||||
permission javax.security.auth.AuthPermission "setLoginConfiguration";
|
||||
permission javax.security.auth.AuthPermission "modifyPrivateCredentials";
|
||||
permission javax.security.auth.AuthPermission "modifyPublicCredentials";
|
||||
permission javax.security.auth.PrivateCredentialPermission "org.apache.hadoop.security.Credentials * \"*\"", "read";
|
||||
|
||||
// needed by hadoop security
|
||||
permission java.security.SecurityPermission "putProviderProperty.SaslPlainServer";
|
||||
permission java.security.SecurityPermission "insertProvider";
|
||||
|
||||
permission javax.xml.bind.JAXBPermission "setDatatypeConverter";
|
||||
|
||||
// SSL related properties for Solr tests
|
||||
permission javax.net.ssl.SSLPermission "setDefaultSSLContext";
|
||||
|
||||
// SASL/Kerberos related properties for Solr tests
|
||||
permission javax.security.auth.PrivateCredentialPermission "javax.security.auth.kerberos.KerberosTicket * \"*\"", "read";
|
||||
|
||||
// may only be necessary with Java 7?
|
||||
permission javax.security.auth.PrivateCredentialPermission "javax.security.auth.kerberos.KeyTab * \"*\"", "read";
|
||||
permission javax.security.auth.PrivateCredentialPermission "sun.security.jgss.krb5.Krb5Util$KeysFromKeyTab * \"*\"", "read";
|
||||
|
||||
permission javax.security.auth.kerberos.ServicePermission "*", "initiate";
|
||||
permission javax.security.auth.kerberos.ServicePermission "*", "accept";
|
||||
permission javax.security.auth.kerberos.DelegationPermission "\"*\" \"krbtgt/EXAMPLE.COM@EXAMPLE.COM\"";
|
||||
|
||||
// java 8 accessibility requires this perm - should not after 8 I believe (rrd4j is the root reason we hit an accessibility code path)
|
||||
permission java.awt.AWTPermission "*";
|
||||
|
||||
// used by solr to create sandboxes (e.g. script execution)
|
||||
permission java.security.SecurityPermission "createAccessControlContext";
|
||||
};
|
||||
|
||||
// additional permissions based on system properties set by /bin/solr
|
||||
// NOTE: if the property is not set, the permission entry is ignored.
|
||||
grant {
|
||||
permission java.io.FilePermission "${hadoop.security.credential.provider.path}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${hadoop.security.credential.provider.path}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.jetty.keystore}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.jetty.keystore}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.jetty.truststore}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.jetty.truststore}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.install.dir}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.install.dir}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${jetty.home}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${jetty.home}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.solr.home}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.solr.home}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.data.home}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.data.home}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.default.confdir}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.default.confdir}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.log.dir}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.log.dir}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${log4j.configurationFile}", "read,write,delete,readlink";
|
||||
|
||||
// expanded to a wildcard if set, allows all networking everywhere
|
||||
permission java.net.SocketPermission "${solr.internal.network.permission}", "accept,listen,connect,resolve";
|
||||
};
|
||||
|
||||
// Grant all permissions to Gradle test runner classes.
|
||||
|
||||
grant codeBase "file:${gradle.lib.dir}${/}-" {
|
||||
permission java.security.AllPermission;
|
||||
};
|
||||
|
||||
grant codeBase "file:${gradle.worker.jar}" {
|
||||
permission java.security.AllPermission;
|
||||
};
|
||||
|
||||
grant {
|
||||
// Allow reading gradle worker JAR.
|
||||
permission java.io.FilePermission "${gradle.worker.jar}", "read";
|
||||
// Allow reading from classpath JARs (resources).
|
||||
permission java.io.FilePermission "${gradle.user.home}${/}-", "read";
|
||||
};
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Policy file for lucene tests. Please keep minimal and avoid wildcards.
|
||||
|
||||
grant {
|
||||
// 3rd party jar resources (where symlinks are not supported), test-files/ resources
|
||||
permission java.io.FilePermission "${common.dir}${/}-", "read";
|
||||
// system jar resources, and let TestIndexWriterOnJRECrash fork its jvm
|
||||
permission java.io.FilePermission "${java.home}${/}-", "read,execute";
|
||||
|
||||
// write only to sandbox
|
||||
permission java.io.FilePermission "${java.io.tmpdir}", "read,write";
|
||||
permission java.io.FilePermission "${java.io.tmpdir}${/}-", "read,write,delete";
|
||||
permission java.io.FilePermission "${tests.linedocsfile}", "read";
|
||||
|
||||
// misc HardlinkCopyDirectoryWrapper needs this to test if hardlinks can be created
|
||||
permission java.nio.file.LinkPermission "hard";
|
||||
// needed by SSD detection tests in TestIOUtils (creates symlinks)
|
||||
permission java.nio.file.LinkPermission "symbolic";
|
||||
|
||||
// needed by randomizedtesting runner to identify test methods.
|
||||
permission java.lang.RuntimePermission "accessDeclaredMembers";
|
||||
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
|
||||
// needed by certain tests to redirect sysout/syserr:
|
||||
permission java.lang.RuntimePermission "setIO";
|
||||
// needed by randomized runner to catch failures from other threads:
|
||||
permission java.lang.RuntimePermission "setDefaultUncaughtExceptionHandler";
|
||||
// needed by randomized runner getTopThreadGroup:
|
||||
permission java.lang.RuntimePermission "modifyThreadGroup";
|
||||
// needed by tests e.g. shutting down executors:
|
||||
permission java.lang.RuntimePermission "modifyThread";
|
||||
// needed for tons of test hacks etc
|
||||
permission java.lang.RuntimePermission "getStackTrace";
|
||||
// needed for mock filesystems in tests
|
||||
permission java.lang.RuntimePermission "fileSystemProvider";
|
||||
// needed for test of IOUtils.spins (maybe it can be avoided)
|
||||
permission java.lang.RuntimePermission "getFileStoreAttributes";
|
||||
// analyzers/uima: needed by lucene expressions' JavascriptCompiler
|
||||
permission java.lang.RuntimePermission "createClassLoader";
|
||||
// needed to test unmap hack on platforms that support it
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
|
||||
// needed by cyberneko usage by benchmarks on J9
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.org.apache.xerces.util";
|
||||
// needed by jacoco to dump coverage
|
||||
permission java.lang.RuntimePermission "shutdownHooks";
|
||||
// needed by org.apache.logging.log4j
|
||||
permission java.lang.RuntimePermission "getenv.*";
|
||||
permission java.lang.RuntimePermission "getClassLoader";
|
||||
permission java.lang.RuntimePermission "setContextClassLoader";
|
||||
|
||||
// allows LuceneTestCase#runWithRestrictedPermissions to execute with lower (or no) permission
|
||||
permission java.security.SecurityPermission "createAccessControlContext";
|
||||
|
||||
// read access to all system properties.
|
||||
permission java.util.PropertyPermission "*", "read";
|
||||
|
||||
// write access to only these.
|
||||
|
||||
// environment randomization
|
||||
permission java.util.PropertyPermission "user.language", "write";
|
||||
permission java.util.PropertyPermission "user.timezone", "write";
|
||||
|
||||
// CMS randomization
|
||||
permission java.util.PropertyPermission "lucene.cms.override_core_count", "write";
|
||||
permission java.util.PropertyPermission "lucene.cms.override_spins", "write";
|
||||
|
||||
// used by nested tests? (e.g. TestLeaveFilesIfTestFails). TODO: look into this
|
||||
permission java.util.PropertyPermission "tests.runnested", "write";
|
||||
|
||||
// Used by LuceneTestCase to set up TestRuleRestoreSystemProperties.
|
||||
// TODO: move it to Solr or modify TestRuleRestoreSystemProperties not to fail on inaccessible properties?
|
||||
permission java.util.PropertyPermission "solr.data.dir", "write";
|
||||
permission java.util.PropertyPermission "solr.solr.home", "write";
|
||||
permission java.util.PropertyPermission "solr.directoryFactory", "write";
|
||||
};
|
||||
|
||||
|
||||
// Grant all permissions to Gradle test runner classes.
|
||||
|
||||
grant codeBase "file:${gradle.lib.dir}${/}-" {
|
||||
permission java.security.AllPermission;
|
||||
};
|
||||
|
||||
grant codeBase "file:${gradle.worker.jar}" {
|
||||
permission java.security.AllPermission;
|
||||
};
|
||||
|
||||
grant {
|
||||
// Allow reading gradle worker JAR.
|
||||
permission java.io.FilePermission "${gradle.worker.jar}", "read";
|
||||
// Allow reading from classpath JARs (resources).
|
||||
permission java.io.FilePermission "${gradle.user.home}${/}-", "read";
|
||||
};
|
|
@ -0,0 +1,237 @@
|
|||
//
|
||||
// Configure test randomization seeds and derived test properties.
|
||||
//
|
||||
|
||||
import java.nio.file.*
|
||||
import com.carrotsearch.randomizedtesting.SeedUtils
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
classpath 'com.carrotsearch.randomizedtesting:randomizedtesting-runner:2.7.2'
|
||||
}
|
||||
}
|
||||
|
||||
// Pick the "root" seed from which everything else is derived.
|
||||
configure(rootProject) {
|
||||
ext {
|
||||
rootSeed = propertyOrDefault('tests.seed', String.format("%08X", new Random().nextLong()))
|
||||
rootSeedLong = SeedUtils.parseSeedChain(rootSeed)[0]
|
||||
projectSeedLong = rootSeedLong ^ project.path.hashCode()
|
||||
|
||||
// "base" version is stripped of the qualifier. Compute it.
|
||||
baseVersion = {
|
||||
def m = (rootProject.version =~ /^(\d+\.\d+\.\d+)(-(.+))?/)
|
||||
if (!m) {
|
||||
throw GradleException("Can't strip version to just x.y.z: " + rootProject.version)
|
||||
}
|
||||
return m[0][1]
|
||||
}()
|
||||
}
|
||||
|
||||
task randomizationInfo() {
|
||||
doFirst {
|
||||
logger.lifecycle("Running tests with randomization seed: tests.seed=${rootSeed}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Any test task will trigger display of randomization settings.
|
||||
allprojects {
|
||||
tasks.withType(Test) { task ->
|
||||
task.dependsOn rootProject.randomizationInfo
|
||||
}
|
||||
}
|
||||
|
||||
// Configure test property defaults and their descriptions.
|
||||
allprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
ext {
|
||||
testOptions = [
|
||||
// seed, repetition and amplification.
|
||||
[propName: 'tests.seed', value: "random", description: "Sets the master randomization seed."],
|
||||
[propName: 'tests.iters', value: null, description: "Duplicate (re-run) each test N times."],
|
||||
[propName: 'tests.multiplier', value: 1, description: "Value multiplier for randomized tests."],
|
||||
[propName: 'tests.maxfailures', value: null, description: "Skip tests after a given number of failures."],
|
||||
[propName: 'tests.timeoutSuite', value: null, description: "Timeout (in millis) for an entire suite."],
|
||||
[propName: 'tests.failfast', value: "false", description: "Stop the build early on failure.", buildOnly: true],
|
||||
// asserts, debug output.
|
||||
[propName: 'tests.asserts', value: "true", description: "Enables or disables assertions mode."],
|
||||
[propName: 'tests.verbose', value: false, description: "Emit verbose debug information from tests."],
|
||||
[propName: 'tests.infostream', value: false, description: "Enables or disables infostream logs."],
|
||||
[propName: 'tests.leaveTemporary', value: null, description: "Leave temporary directories after tests complete."],
|
||||
[propName: 'tests.useSecurityManager', value: true, description: "Control security manager in tests.", buildOnly: true],
|
||||
// component randomization
|
||||
[propName: 'tests.codec', value: "random", description: "Sets the codec tests should run with."],
|
||||
[propName: 'tests.directory', value: "random", description: "Sets the Directory implementation tests should run with."],
|
||||
[propName: 'tests.postingsformat', value: "random", description: "Sets the postings format tests should run with."],
|
||||
[propName: 'tests.docvaluesformat', value: "random", description: "Sets the doc values format tests should run with."],
|
||||
[propName: 'tests.locale', value: "random", description: "Sets the default locale tests should run with."],
|
||||
[propName: 'tests.timezone', value: "random", description: "Sets the default time zone tests should run with."],
|
||||
// filtering
|
||||
[propName: 'tests.filter', value: null, description: "Applies a test filter (see :helpTests)."],
|
||||
[propName: 'tests.slow', value: true, description: "Enables or disables @Slow tests."],
|
||||
[propName: 'tests.nightly', value: false, description: "Enables or disables @Nightly tests."],
|
||||
[propName: 'tests.weekly', value: false, description: "Enables or disables @Weekly tests."],
|
||||
[propName: 'tests.monster', value: false, description: "Enables or disables @Monster tests."],
|
||||
[propName: 'tests.awaitsfix', value: null, description: "Enables or disables @AwaitsFix tests."],
|
||||
[propName: 'tests.file.encoding', value: "random", description: "Sets the default file.encoding on test JVM.", buildOnly: true],
|
||||
// test data
|
||||
[propName: 'tests.linedocsfile', value: 'europarl.lines.txt.gz', description: "Test data file path."],
|
||||
// miscellaneous; some of them very weird.
|
||||
[propName: 'tests.LUCENE_VERSION', value: baseVersion, description: "Base Lucene version."],
|
||||
[propName: 'tests.bwcdir', value: null, description: "Data for backward-compatibility indexes."],
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add Solr-specific test configs settings.
|
||||
configure(allprojects.findAll {project -> project.path.startsWith(":solr") }) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
ext {
|
||||
testOptions += [
|
||||
[propName: 'tests.luceneMatchVersion', value: baseVersion, description: "Base Lucene version."],
|
||||
[propName: 'common-solr.dir', value: file("${commonDir}/../solr").path, description: "Solr base dir."],
|
||||
[propName: 'solr.directoryFactory', value: "org.apache.solr.core.MockDirectoryFactory", description: "Solr directory factory."],
|
||||
[propName: 'tests.src.home', value: null, description: "See SOLR-14023."],
|
||||
[propName: 'solr.tests.use.numeric.points', value: null, description: "Point implementation to use (true=numerics, false=trie)."],
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve test option values after all evaluation is complete.
|
||||
allprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
afterEvaluate {
|
||||
ext.testOptionsResolved = testOptions.findAll { opt ->
|
||||
propertyOrDefault(opt.propName, opt.value) != null
|
||||
}.collectEntries { opt ->
|
||||
[(opt.propName): Objects.toString(propertyOrDefault(opt.propName, opt.value))]
|
||||
}
|
||||
|
||||
// These are not official options or dynamically seed-derived options.
|
||||
if (testOptionsResolved['tests.file.encoding'] == 'random') {
|
||||
testOptionsResolved['tests.file.encoding'] = RandomPicks.randomFrom(
|
||||
new Random(projectSeedLong), [
|
||||
"US-ASCII", "ISO-8859-1", "UTF-8"
|
||||
])
|
||||
}
|
||||
|
||||
if (testOptionsResolved['tests.seed'] == 'random') {
|
||||
testOptionsResolved['tests.seed'] = rootSeed
|
||||
}
|
||||
|
||||
// Compute the "reproduce with" string.
|
||||
ext.testOptionsForReproduceLine = testOptions.findAll { opt ->
|
||||
def defValue = Objects.toString(opt.value, null)
|
||||
def value = testOptionsResolved[opt.propName]
|
||||
return defValue != value
|
||||
}.collect { opt ->
|
||||
"-P" + opt.propName + "=" + testOptionsResolved[opt.propName]
|
||||
}.join(" ")
|
||||
|
||||
// leaving temporary folder option has multiple aliases...
|
||||
if ([
|
||||
"tests.leaveTemporary",
|
||||
"tests.leavetemporary",
|
||||
"tests.leavetmpdir",
|
||||
"solr.test.leavetmpdir",
|
||||
].find { prop ->
|
||||
Boolean.parseBoolean(propertyOrDefault(prop, "false"))
|
||||
}) {
|
||||
testOptionsResolved['tests.leaveTemporary'] = "true"
|
||||
}
|
||||
|
||||
// Append resolved test properties to the test task.
|
||||
test {
|
||||
// TODO: we could remove opts with "buildOnly: true" (?)
|
||||
systemProperties testOptionsResolved
|
||||
|
||||
if (Boolean.parseBoolean(testOptionsResolved['tests.asserts'])) {
|
||||
jvmArgs("-ea", "-esa")
|
||||
} else {
|
||||
enableAssertions = false
|
||||
}
|
||||
|
||||
if (Boolean.parseBoolean(testOptionsResolved["tests.failfast"])) {
|
||||
failFast true
|
||||
}
|
||||
|
||||
// Enable security manager, if requested. We could move the selection of security manager and security policy
|
||||
// to each project's build/ configuration but it seems compact enough to keep it here for now.
|
||||
if (Boolean.parseBoolean(testOptionsResolved["tests.useSecurityManager"])) {
|
||||
if (project.path == ":lucene:replicator") {
|
||||
systemProperty 'java.security.manager', "org.apache.lucene.util.TestSecurityManager"
|
||||
systemProperty 'java.security.policy', rootProject.file("gradle/testing/policies/replicator-tests.policy")
|
||||
} else if (project.path.startsWith(":lucene")) {
|
||||
systemProperty 'java.security.manager', "org.apache.lucene.util.TestSecurityManager"
|
||||
systemProperty 'java.security.policy', rootProject.file("gradle/testing/policies/tests.policy")
|
||||
} else {
|
||||
systemProperty 'common-solr.dir', commonSolrDir
|
||||
systemProperty 'java.security.manager', "org.apache.lucene.util.TestSecurityManager"
|
||||
systemProperty 'java.security.policy', rootProject.file("gradle/testing/policies/solr-tests.policy")
|
||||
}
|
||||
|
||||
systemProperty 'common.dir', commonDir
|
||||
|
||||
def gradleUserHome = project.gradle.getGradleUserHomeDir()
|
||||
systemProperty 'gradle.lib.dir', Paths.get(project.class.location.toURI()).parent.toAbsolutePath().toString().replace('\\', '/')
|
||||
systemProperty 'gradle.worker.jar', Paths.get("${gradleUserHome}/caches/${gradle.gradleVersion}/workerMain/gradle-worker.jar").toAbsolutePath().toString()
|
||||
systemProperty 'gradle.user.home', gradleUserHome.toPath().toAbsolutePath().toString()
|
||||
}
|
||||
|
||||
doFirst {
|
||||
logger.debug("Will use test opts:\n" + testOptionsResolved.collect {k,v -> "${k}: ${v}"}.sort().join("\n"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add a helper task to display resolved test property values with their defaults
|
||||
// and descriptions.
|
||||
allprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
task testOpts() {
|
||||
group = 'Help (developer guides and hints)'
|
||||
description = "Display values of randomization settings for a given seed"
|
||||
|
||||
doFirst {
|
||||
println "Test options for project ${project.path} and seed \"${rootSeed}\":"
|
||||
|
||||
testOptions.sort { a, b -> a.propName.compareTo(b.propName) }.each { opt ->
|
||||
def defValue = Objects.toString(opt.value, null)
|
||||
def value = testOptionsResolved[opt.propName]
|
||||
println String.format(Locale.ROOT,
|
||||
"%s%-23s = %-8s # %s",
|
||||
(defValue != value ? "! " : " "),
|
||||
opt.propName,
|
||||
value,
|
||||
(defValue != value ? "(!= default: ${defValue}) " : "") + opt.description)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Disable assertions for HashMap due to: LUCENE-8991 / JDK-8205399
|
||||
def vmName = System.getProperty("java.vm.name")
|
||||
def spec = System.getProperty("java.specification.version")
|
||||
if (vmName =~ /(?i)(hotspot|openjdk|jrockit)/ &&
|
||||
spec =~ /^(1\.8|9|10|11)$/ &&
|
||||
!Boolean.parseBoolean(propertyOrDefault('tests.asserts.hashmap', 'false'))) {
|
||||
logger.debug("Enabling HashMap assertions.")
|
||||
allprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
test {
|
||||
jvmArgs("-da:java.util.HashMap")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
// This adds support for compiling and testing against a different Java runtime.
|
||||
// This is the only way to build against JVMs not yet supported by Gradle itself.
|
||||
|
||||
import org.gradle.internal.jvm.Jvm
|
||||
|
||||
def jvmForTests = {
|
||||
def runtimeJavaHome = propertyOrDefault("runtime.java.home", System.getenv('RUNTIME_JAVA_HOME'))
|
||||
if (!runtimeJavaHome) {
|
||||
return Jvm.current()
|
||||
} else {
|
||||
return Jvm.forHome(file(runtimeJavaHome))
|
||||
}
|
||||
}()
|
||||
def jvmGradle = Jvm.current()
|
||||
|
||||
def differentTestJvm = (jvmGradle.javaHome.canonicalPath != jvmForTests.javaHome.canonicalPath)
|
||||
|
||||
// Set up tasks to use the alternative Java.
|
||||
if (differentTestJvm) {
|
||||
configure(rootProject) {
|
||||
task testJvmWarning() {
|
||||
doFirst {
|
||||
logger.warn("This Java will be used for running tests: ${jvmForTests.javaExecutable}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set up test tasks to use the alternative JVM.
|
||||
allprojects {
|
||||
tasks.withType(Test) {
|
||||
dependsOn ":testJvmWarning"
|
||||
executable = jvmForTests.javaExecutable
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
// Add test duration summary at the end of the build.
|
||||
|
||||
def allTests = []
|
||||
|
||||
allprojects {
|
||||
tasks.withType(Test) { task ->
|
||||
afterTest { desc, result ->
|
||||
def duration = (result.getEndTime() - result.getStartTime())
|
||||
|
||||
allTests << [
|
||||
name : "${desc.className.replaceAll('.+\\.', "")}.${desc.name} (${project.path})",
|
||||
duration: duration
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gradle.buildFinished { result ->
|
||||
if (allTests && result.getFailure() == null) {
|
||||
def slowest = allTests
|
||||
.sort { a, b -> b.duration.compareTo(a.duration) }
|
||||
.take(10)
|
||||
.findAll { e -> e.duration >= 500 }
|
||||
.collect { e -> String.format(Locale.ROOT, "%5.2fs %s", e.duration / 1000d, e.name) }
|
||||
|
||||
if (slowest) {
|
||||
logger.lifecycle("The slowest tests (exceeding 500 ms) during this run:\n " +
|
||||
slowest.join("\n "))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
// This checks that we're running the desired version of Gradle and
|
||||
// that the JVM is supported.
|
||||
|
||||
import org.gradle.util.GradleVersion
|
||||
|
||||
configure(rootProject) {
|
||||
ext {
|
||||
expectedGradleVersion = '6.0.1'
|
||||
minJavaVersion = JavaVersion.VERSION_11
|
||||
}
|
||||
|
||||
wrapper {
|
||||
distributionType = Wrapper.DistributionType.ALL
|
||||
gradleVersion = expectedGradleVersion
|
||||
}
|
||||
|
||||
def currentJavaVersion = JavaVersion.current()
|
||||
if (currentJavaVersion < minJavaVersion) {
|
||||
throw new GradleException("At least Java ${minJavaVersion} is required, you are running Java ${currentJavaVersion} "
|
||||
+ "[${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]")
|
||||
}
|
||||
|
||||
// If we're regenerating the wrapper, skip the check.
|
||||
if (!gradle.startParameter.taskNames.contains("wrapper")) {
|
||||
def currentGradleVersion = GradleVersion.current()
|
||||
if (currentGradleVersion != GradleVersion.version(expectedGradleVersion)) {
|
||||
throw new GradleException("Gradle ${expectedGradleVersion} is required (hint: use the gradlew script): this gradle is ${currentGradleVersion}")
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
// An equivalent of ant's "check-example-lucene-match-version"
|
||||
|
||||
configure(project(":solr")) {
|
||||
task validateConfigFileSanity() {
|
||||
doFirst {
|
||||
def matchVersion = project(":solr:core").testOptionsResolved['tests.luceneMatchVersion']
|
||||
if (!matchVersion) {
|
||||
throw new GradleException("tests.luceneMatchVersion not defined?")
|
||||
}
|
||||
|
||||
// Verify solrconfig.xml files declare proper luceneMatchVersion.
|
||||
[
|
||||
file("server/solr/configsets"),
|
||||
file("example")
|
||||
].each { configsetsDir ->
|
||||
def configFiles = fileTree(configsetsDir, {
|
||||
include "**/solrconfig.xml"
|
||||
})
|
||||
|
||||
configFiles.each { file ->
|
||||
def content = file.getText("UTF-8")
|
||||
if (!content.contains("<luceneMatchVersion>${matchVersion}<")) {
|
||||
throw new GradleException("Configset does not refer to the correct luceneMatchVersion (${matchVersion}): ${file.absolutePath}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def configsetsDir = file("server/solr/configsets")
|
||||
def subdirs = configsetsDir.listFiles({file -> file.isDirectory() } as FileFilter)
|
||||
|
||||
if (subdirs.size() == 0) {
|
||||
throw new GradleException("No sub-directories found under ${configsetsDir}?")
|
||||
}
|
||||
|
||||
subdirs.each { dir ->
|
||||
if (fileTree(dir, { include "**/solrconfig.xml" }).asList().isEmpty()) {
|
||||
throw new GradleException("No solrconfig.xml file under: ${dir}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,116 @@
|
|||
// This configures application of forbidden API rules
|
||||
// via https://github.com/policeman-tools/forbidden-apis
|
||||
|
||||
// Only apply forbidden-apis to java projects.
|
||||
allprojects { prj ->
|
||||
plugins.withId("java", {
|
||||
prj.apply plugin: 'de.thetaphi.forbiddenapis'
|
||||
|
||||
// This helper method appends signature files based on a set of true
|
||||
// dependencies from a given configuration.
|
||||
def dynamicSignatures = { configuration, suffix ->
|
||||
def deps = configuration.resolvedConfiguration.resolvedArtifacts
|
||||
.collect { a -> a.moduleVersion.id }
|
||||
.collect { id -> [
|
||||
"${id.group}.${id.name}.all.txt",
|
||||
"${id.group}.${id.name}.${suffix}.txt",
|
||||
]}
|
||||
.flatten()
|
||||
.sort()
|
||||
|
||||
deps += ["defaults.all.txt", "defaults.${suffix}.txt"]
|
||||
|
||||
deps.each { sig ->
|
||||
def signaturesFile = rootProject.file("gradle/validation/forbidden-apis/${sig}")
|
||||
if (signaturesFile.exists()) {
|
||||
logger.info("Signature file applied: ${sig}")
|
||||
signaturesFiles += files(signaturesFile)
|
||||
} else {
|
||||
logger.debug("Signature file omitted (does not exist): ${sig}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Configure defaults for sourceSets.main
|
||||
forbiddenApisMain {
|
||||
bundledSignatures += [
|
||||
'jdk-unsafe',
|
||||
'jdk-deprecated',
|
||||
'jdk-non-portable',
|
||||
'jdk-reflection',
|
||||
'jdk-system-out',
|
||||
]
|
||||
|
||||
suppressAnnotations += [
|
||||
"**.SuppressForbidden"
|
||||
]
|
||||
}
|
||||
|
||||
// Configure defaults for sourceSets.test
|
||||
forbiddenApisTest {
|
||||
bundledSignatures += [
|
||||
'jdk-unsafe',
|
||||
'jdk-deprecated',
|
||||
'jdk-non-portable',
|
||||
'jdk-reflection',
|
||||
]
|
||||
|
||||
signaturesFiles = files(
|
||||
rootProject.file("gradle/validation/forbidden-apis/defaults.tests.txt")
|
||||
)
|
||||
|
||||
suppressAnnotations += [
|
||||
"**.SuppressForbidden"
|
||||
]
|
||||
}
|
||||
|
||||
// Attach validation to check task.
|
||||
check.dependsOn forbiddenApisMain, forbiddenApisTest
|
||||
|
||||
// Disable sysout signatures for these projects.
|
||||
if (prj.path in [
|
||||
":lucene:demo",
|
||||
":lucene:benchmark",
|
||||
":lucene:test-framework",
|
||||
":solr:solr-ref-guide",
|
||||
":solr:test-framework"
|
||||
]) {
|
||||
forbiddenApisMain.bundledSignatures -= [
|
||||
'jdk-system-out'
|
||||
]
|
||||
}
|
||||
|
||||
// Configure lucene-specific rules.
|
||||
if (prj.path.startsWith(":lucene")) {
|
||||
forbiddenApisMain {
|
||||
doFirst dynamicSignatures.curry(configurations.compileClasspath, "lucene")
|
||||
}
|
||||
|
||||
forbiddenApisTest {
|
||||
doFirst dynamicSignatures.curry(configurations.testCompileClasspath, "lucene")
|
||||
}
|
||||
}
|
||||
|
||||
// Configure solr-specific rules.
|
||||
if (prj.path.startsWith(":solr")) {
|
||||
forbiddenApisMain {
|
||||
doFirst dynamicSignatures.curry(configurations.compileClasspath, "solr")
|
||||
}
|
||||
|
||||
forbiddenApisTest {
|
||||
doFirst dynamicSignatures.curry(configurations.testCompileClasspath, "solr")
|
||||
}
|
||||
}
|
||||
|
||||
// We rely on resolved configurations to compute the relevant set of rule
|
||||
// files for forbiddenApis. Since we don't want to resolve these configurations until
|
||||
// the task is executed, we can't really use them as task inputs properly. This is a
|
||||
// chicken-and-egg problem.
|
||||
//
|
||||
// This is the simplest workaround possible: just point at all the rule files and indicate
|
||||
// them as inputs. This way if a rule is modified, checks will be reapplied.
|
||||
configure([forbiddenApisMain, forbiddenApisTest]) { task ->
|
||||
task.inputs.dir(rootProject.file("gradle/validation/forbidden-apis"))
|
||||
}
|
||||
})
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
com.carrotsearch.randomizedtesting.annotations.Seed @ Don't commit hardcoded seeds
|
|
@ -0,0 +1,2 @@
|
|||
@defaultMessage Use org.apache.solr.common.annotation.JsonProperty instead
|
||||
com.fasterxml.jackson.annotation.JsonProperty
|
|
@ -0,0 +1,17 @@
|
|||
@defaultMessage Use corresponding Java 8 functional/streaming interfaces
|
||||
com.google.common.base.Function
|
||||
com.google.common.base.Joiner
|
||||
com.google.common.base.Predicate
|
||||
com.google.common.base.Supplier
|
||||
|
||||
@defaultMessage Use java.nio.charset.StandardCharsets instead
|
||||
com.google.common.base.Charsets
|
||||
|
||||
@defaultMessage Use methods in java.util.Objects instead
|
||||
com.google.common.base.Objects#equal(java.lang.Object,java.lang.Object)
|
||||
com.google.common.base.Objects#hashCode(java.lang.Object[])
|
||||
com.google.common.base.Preconditions#checkNotNull(java.lang.Object)
|
||||
com.google.common.base.Preconditions#checkNotNull(java.lang.Object,java.lang.Object)
|
||||
|
||||
@defaultMessage Use methods in java.util.Comparator instead
|
||||
com.google.common.collect.Ordering
|
|
@ -0,0 +1,2 @@
|
|||
@defaultMessage Use java.nio.charset.StandardCharsets instead
|
||||
org.apache.commons.codec.Charsets
|
|
@ -0,0 +1,64 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
@defaultMessage Spawns threads with vague names; use a custom thread factory (Lucene's NamedThreadFactory, Solr's DefaultSolrThreadFactory) and name threads so that you can tell (by its name) which executor it is associated with
|
||||
java.util.concurrent.Executors#newFixedThreadPool(int)
|
||||
java.util.concurrent.Executors#newSingleThreadExecutor()
|
||||
java.util.concurrent.Executors#newCachedThreadPool()
|
||||
java.util.concurrent.Executors#newSingleThreadScheduledExecutor()
|
||||
java.util.concurrent.Executors#newScheduledThreadPool(int)
|
||||
java.util.concurrent.Executors#defaultThreadFactory()
|
||||
java.util.concurrent.Executors#privilegedThreadFactory()
|
||||
|
||||
@defaultMessage Properties files should be read/written with Reader/Writer, using UTF-8 charset. This allows reading older files with unicode escapes, too.
|
||||
java.util.Properties#load(java.io.InputStream)
|
||||
java.util.Properties#save(java.io.OutputStream,java.lang.String)
|
||||
java.util.Properties#store(java.io.OutputStream,java.lang.String)
|
||||
|
||||
@defaultMessage The context classloader should never be used for resource lookups, unless there is a 3rd party library that needs it. Always pass a classloader down as method parameters.
|
||||
java.lang.Thread#getContextClassLoader()
|
||||
java.lang.Thread#setContextClassLoader(java.lang.ClassLoader)
|
||||
|
||||
java.lang.Character#codePointBefore(char[],int) @ Implicit start offset is error-prone when the char[] is a buffer and the first chars are random chars
|
||||
java.lang.Character#codePointAt(char[],int) @ Implicit end offset is error-prone when the char[] is a buffer and the last chars are random chars
|
||||
|
||||
java.io.File#delete() @ use Files.delete for real exception, IOUtils.deleteFilesIgnoringExceptions if you dont care
|
||||
|
||||
java.util.Collections#shuffle(java.util.List) @ Use shuffle(List, Random) instead so that it can be reproduced
|
||||
|
||||
java.util.Locale#forLanguageTag(java.lang.String) @ use new Locale.Builder().setLanguageTag(...).build() which has error handling
|
||||
java.util.Locale#toString() @ use Locale#toLanguageTag() for a standardized BCP47 locale name
|
||||
|
||||
@defaultMessage Constructors for wrapper classes of Java primitives should be avoided in favor of the public static methods available or autoboxing
|
||||
java.lang.Integer#<init>(int)
|
||||
java.lang.Integer#<init>(java.lang.String)
|
||||
java.lang.Byte#<init>(byte)
|
||||
java.lang.Byte#<init>(java.lang.String)
|
||||
java.lang.Short#<init>(short)
|
||||
java.lang.Short#<init>(java.lang.String)
|
||||
java.lang.Long#<init>(long)
|
||||
java.lang.Long#<init>(java.lang.String)
|
||||
java.lang.Boolean#<init>(boolean)
|
||||
java.lang.Boolean#<init>(java.lang.String)
|
||||
java.lang.Character#<init>(char)
|
||||
java.lang.Float#<init>(float)
|
||||
java.lang.Float#<init>(double)
|
||||
java.lang.Float#<init>(java.lang.String)
|
||||
java.lang.Double#<init>(double)
|
||||
java.lang.Double#<init>(java.lang.String)
|
||||
|
||||
@defaultMessage Java deserialization is unsafe when the data is untrusted. The java developer is powerless: no checks or casts help, exploitation can happen in places such as clinit or finalize!
|
||||
java.io.ObjectInputStream
|
||||
java.io.ObjectOutputStream
|
|
@ -0,0 +1,49 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
@defaultMessage Use NIO.2 instead
|
||||
java.io.File
|
||||
java.io.FileInputStream
|
||||
java.io.FileOutputStream
|
||||
java.io.PrintStream#<init>(java.lang.String,java.lang.String)
|
||||
java.io.PrintWriter#<init>(java.lang.String,java.lang.String)
|
||||
java.util.Formatter#<init>(java.lang.String,java.lang.String,java.util.Locale)
|
||||
java.io.RandomAccessFile
|
||||
java.nio.file.Path#toFile()
|
||||
java.util.jar.JarFile
|
||||
java.util.zip.ZipFile
|
||||
@defaultMessage Prefer using ArrayUtil as Arrays#copyOfRange fills zeros for bad bounds
|
||||
java.util.Arrays#copyOfRange(byte[],int,int)
|
||||
java.util.Arrays#copyOfRange(char[],int,int)
|
||||
java.util.Arrays#copyOfRange(short[],int,int)
|
||||
java.util.Arrays#copyOfRange(int[],int,int)
|
||||
java.util.Arrays#copyOfRange(long[],int,int)
|
||||
java.util.Arrays#copyOfRange(float[],int,int)
|
||||
java.util.Arrays#copyOfRange(double[],int,int)
|
||||
java.util.Arrays#copyOfRange(boolean[],int,int)
|
||||
java.util.Arrays#copyOfRange(java.lang.Object[],int,int)
|
||||
java.util.Arrays#copyOfRange(java.lang.Object[],int,int,java.lang.Class)
|
||||
|
||||
@defaultMessage Prefer using ArrayUtil as Arrays#copyOf fills zeros for bad bounds
|
||||
java.util.Arrays#copyOf(byte[],int)
|
||||
java.util.Arrays#copyOf(char[],int)
|
||||
java.util.Arrays#copyOf(short[],int)
|
||||
java.util.Arrays#copyOf(int[],int)
|
||||
java.util.Arrays#copyOf(long[],int)
|
||||
java.util.Arrays#copyOf(float[],int)
|
||||
java.util.Arrays#copyOf(double[],int)
|
||||
java.util.Arrays#copyOf(boolean[],int)
|
||||
java.util.Arrays#copyOf(java.lang.Object[],int)
|
||||
java.util.Arrays#copyOf(java.lang.Object[],int,java.lang.Class)
|
|
@ -0,0 +1,35 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
@defaultMessage Spawns threads without MDC logging context; use ExecutorUtil.newMDCAwareFixedThreadPool instead
|
||||
java.util.concurrent.Executors#newFixedThreadPool(int,java.util.concurrent.ThreadFactory)
|
||||
|
||||
@defaultMessage Spawns threads without MDC logging context; use ExecutorUtil.newMDCAwareSingleThreadExecutor instead
|
||||
java.util.concurrent.Executors#newSingleThreadExecutor(java.util.concurrent.ThreadFactory)
|
||||
|
||||
@defaultMessage Spawns threads without MDC logging context; use ExecutorUtil.newMDCAwareCachedThreadPool instead
|
||||
java.util.concurrent.Executors#newCachedThreadPool(java.util.concurrent.ThreadFactory)
|
||||
|
||||
@defaultMessage Use ExecutorUtil.MDCAwareThreadPoolExecutor instead of ThreadPoolExecutor
|
||||
java.util.concurrent.ThreadPoolExecutor#<init>(int,int,long,java.util.concurrent.TimeUnit,java.util.concurrent.BlockingQueue,java.util.concurrent.ThreadFactory,java.util.concurrent.RejectedExecutionHandler)
|
||||
java.util.concurrent.ThreadPoolExecutor#<init>(int,int,long,java.util.concurrent.TimeUnit,java.util.concurrent.BlockingQueue)
|
||||
java.util.concurrent.ThreadPoolExecutor#<init>(int,int,long,java.util.concurrent.TimeUnit,java.util.concurrent.BlockingQueue,java.util.concurrent.ThreadFactory)
|
||||
java.util.concurrent.ThreadPoolExecutor#<init>(int,int,long,java.util.concurrent.TimeUnit,java.util.concurrent.BlockingQueue,java.util.concurrent.RejectedExecutionHandler)
|
||||
|
||||
@defaultMessage Use RTimer/TimeOut/System.nanoTime for time comparisons, and `new Date()` output/debugging/stats of timestamps. If for some miscellaneous reason, you absolutely need to use this, use a SuppressForbidden.
|
||||
java.lang.System#currentTimeMillis()
|
||||
|
||||
@defaultMessage Use slf4j classes instead
|
||||
java.util.logging.**
|
|
@ -0,0 +1,25 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
java.util.Random#<init>() @ Use RandomizedRunner's random() instead
|
||||
java.lang.Math#random() @ Use RandomizedRunner's random().nextDouble() instead
|
||||
|
||||
# TODO: fix tests that do this!
|
||||
#java.lang.System#currentTimeMillis() @ Don't depend on wall clock times
|
||||
#java.lang.System#nanoTime() @ Don't depend on wall clock times
|
||||
|
||||
@defaultMessage Use LuceneTestCase.collate instead, which can avoid JDK-8071862
|
||||
java.text.Collator#compare(java.lang.Object,java.lang.Object)
|
||||
java.text.Collator#compare(java.lang.String,java.lang.String)
|
|
@ -0,0 +1,43 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
@defaultMessage Servlet API method is parsing request parameters without using the correct encoding if no extra configuration is given in the servlet container
|
||||
|
||||
javax.servlet.ServletRequest#getParameter(java.lang.String)
|
||||
javax.servlet.ServletRequest#getParameterMap()
|
||||
javax.servlet.ServletRequest#getParameterNames()
|
||||
javax.servlet.ServletRequest#getParameterValues(java.lang.String)
|
||||
|
||||
javax.servlet.http.HttpServletRequest#getSession() @ Servlet API getter has side effect of creating sessions
|
||||
|
||||
@defaultMessage Servlet API method is broken and slow in some environments (e.g., Jetty's UTF-8 readers)
|
||||
|
||||
javax.servlet.ServletRequest#getReader()
|
||||
javax.servlet.ServletResponse#getWriter()
|
||||
javax.servlet.ServletInputStream#readLine(byte[],int,int)
|
||||
javax.servlet.ServletOutputStream#print(boolean)
|
||||
javax.servlet.ServletOutputStream#print(char)
|
||||
javax.servlet.ServletOutputStream#print(double)
|
||||
javax.servlet.ServletOutputStream#print(float)
|
||||
javax.servlet.ServletOutputStream#print(int)
|
||||
javax.servlet.ServletOutputStream#print(long)
|
||||
javax.servlet.ServletOutputStream#print(java.lang.String)
|
||||
javax.servlet.ServletOutputStream#println(boolean)
|
||||
javax.servlet.ServletOutputStream#println(char)
|
||||
javax.servlet.ServletOutputStream#println(double)
|
||||
javax.servlet.ServletOutputStream#println(float)
|
||||
javax.servlet.ServletOutputStream#println(int)
|
||||
javax.servlet.ServletOutputStream#println(long)
|
||||
javax.servlet.ServletOutputStream#println(java.lang.String)
|
|
@ -0,0 +1 @@
|
|||
junit.framework.TestCase @ All classes should derive from LuceneTestCase
|
|
@ -0,0 +1,3 @@
|
|||
@defaultMessage Use slf4j classes instead
|
||||
org.apache.log4j.**
|
||||
org.apache.logging.log4j.**
|
|
@ -0,0 +1,52 @@
|
|||
// This adds top-level 'precommit' task with essential
|
||||
// precommit validation checks.
|
||||
|
||||
import org.eclipse.jgit.api.*;
|
||||
import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
|
||||
import org.eclipse.jgit.errors.*;
|
||||
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
classpath 'org.eclipse.jgit:org.eclipse.jgit:5.3.0.201903130848-r'
|
||||
classpath 'commons-codec:commons-codec:1.6'
|
||||
}
|
||||
}
|
||||
|
||||
configure(rootProject) {
|
||||
// Verify git working copy does not have any modified files.
|
||||
task checkWorkingCopyPristine() {
|
||||
doFirst {
|
||||
def gitDir = rootProject.projectDir
|
||||
try {
|
||||
def repository = new FileRepositoryBuilder()
|
||||
.setWorkTree(gitDir)
|
||||
.setMustExist(true)
|
||||
.build()
|
||||
|
||||
def status = new Git(repository).status().call()
|
||||
if (!status.clean) {
|
||||
def offenders = [
|
||||
"(added)": status.added,
|
||||
"(changed)": status.changed,
|
||||
"(conflicting)": status.conflicting,
|
||||
"(missing)": status.missing,
|
||||
"(modified)": status.modified,
|
||||
"(removed)": status.removed,
|
||||
"(untracked)": [status.untracked, status.untrackedFolders].flatten()
|
||||
].collectMany { fileStatus, files ->
|
||||
files.collect {file -> " - ${file} ${fileStatus}" }
|
||||
}.sort()
|
||||
|
||||
throw new GradleException("Working copy is not a clean git checkout, offending files:\n${offenders.join("\n")}")
|
||||
}
|
||||
} catch (RepositoryNotFoundException | NoWorkTreeException | NotSupportedException e) {
|
||||
logger.warn("WARNING: Directory is not a valid GIT checkout (won't check dirty files): ${gitDir}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,377 @@
|
|||
// This adds validation of project dependencies:
|
||||
// 1) license file
|
||||
// 2) notice file
|
||||
// 3) checksum validation/ generation.
|
||||
|
||||
import org.apache.commons.codec.digest.DigestUtils
|
||||
import org.apache.commons.codec.digest.MessageDigestAlgorithms
|
||||
|
||||
// This should be false only for debugging.
|
||||
def failOnError = true
|
||||
|
||||
// We're using commons-codec for computing checksums.
|
||||
buildscript {
|
||||
repositories {
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
dependencies {
|
||||
classpath 'commons-codec:commons-codec:1.13'
|
||||
}
|
||||
}
|
||||
|
||||
// Configure license checksum folder for top-level projects.
|
||||
// (The file("licenses") inside the configure scope resolves
|
||||
// relative to the current project so they're not the same).
|
||||
configure(project(":lucene")) {
|
||||
ext.licensesDir = file("licenses")
|
||||
}
|
||||
configure(project(":solr")) {
|
||||
ext.licensesDir = file("licenses")
|
||||
}
|
||||
|
||||
// All known license types. If 'noticeOptional' is true then
|
||||
// the notice file must accompany the license.
|
||||
def licenseTypes = [
|
||||
"ASL" : [name: "Apache Software License 2.0"],
|
||||
"BSD" : [name: "Berkeley Software Distribution"],
|
||||
//BSD like just means someone has taken the BSD license and put in their name, copyright, or it's a very similar license.
|
||||
"BSD_LIKE": [name: "BSD like license"],
|
||||
"CDDL" : [name: "Common Development and Distribution License", noticeOptional: true],
|
||||
"CPL" : [name: "Common Public License"],
|
||||
"EPL" : [name: "Eclipse Public License Version 1.0", noticeOptional: true],
|
||||
"MIT" : [name: "Massachusetts Institute of Tech. License", noticeOptional: true],
|
||||
"MPL" : [name: "Mozilla Public License", noticeOptional: true /* NOT SURE on the required notice */],
|
||||
"PD" : [name: "Public Domain", noticeOptional: true],
|
||||
"SUN" : [name: "Sun Open Source License", noticeOptional: true],
|
||||
"COMPOUND": [name: "Compound license (details in NOTICE file)."],
|
||||
]
|
||||
|
||||
allprojects {
|
||||
task licenses() {
|
||||
group = 'Dependency validation'
|
||||
description = "Apply all dependency/ license checks."
|
||||
}
|
||||
check.dependsOn(licenses)
|
||||
}
|
||||
|
||||
subprojects {
|
||||
// Configure jarValidation configuration for all projects. Any dependency
|
||||
// declared on this configuration (or any configuration it extends from) will
|
||||
// be verified.
|
||||
configurations {
|
||||
jarValidation
|
||||
}
|
||||
|
||||
// For Java projects, add all dependencies from the following configurations
|
||||
// to jar validation
|
||||
plugins.withType(JavaPlugin) {
|
||||
configurations {
|
||||
jarValidation {
|
||||
extendsFrom runtimeClasspath
|
||||
extendsFrom compileClasspath
|
||||
extendsFrom testRuntimeClasspath
|
||||
extendsFrom testCompileClasspath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Collects dependency JAR information for a project and saves it in
|
||||
// project.ext.jarInfos. Each dependency has a map of attributes
|
||||
// which make it easier to process it later on (name, hash, origin module,
|
||||
// see the code below for details).
|
||||
task collectJarInfos() {
|
||||
dependsOn configurations.jarValidation
|
||||
|
||||
doFirst {
|
||||
def isSolr = project.path.startsWith(":solr")
|
||||
|
||||
// When gradle resolves a configuration it applies exclude rules from inherited configurations
|
||||
// globally (this seems like a bug to me). So we process each inherited configuration independently
|
||||
// but make sure there are no other dependencies on jarValidation itself.
|
||||
if (!configurations.jarValidation.dependencies.isEmpty()) {
|
||||
throw new GradleException("jarValidation must only inherit from other configurations (can't have its own dependencies).")
|
||||
}
|
||||
|
||||
def excludeRules = configurations.jarValidation.excludeRules
|
||||
|
||||
ArrayDeque<ResolvedDependency> queue = new ArrayDeque<>()
|
||||
configurations.jarValidation.extendsFrom.each { conf ->
|
||||
if (excludeRules) {
|
||||
conf = configurations.detachedConfiguration().extendsFrom(conf)
|
||||
conf.excludeRules = excludeRules
|
||||
}
|
||||
if (conf.canBeResolved) {
|
||||
queue.addAll(conf.resolvedConfiguration.firstLevelModuleDependencies)
|
||||
}
|
||||
}
|
||||
|
||||
def visited = new HashSet<>()
|
||||
def infos = []
|
||||
|
||||
while (!queue.isEmpty()) {
|
||||
def dep = queue.removeFirst()
|
||||
|
||||
// Skip any artifacts from other Solr modules (they will be resolved there).
|
||||
if (dep.moduleGroup == "org.apache.solr") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip any artifacts from Lucene modules.
|
||||
if (dep.moduleGroup.startsWith("org.apache.lucene")) {
|
||||
// ... but process their transitive dependencies for Solr compatibility.
|
||||
if (isSolr) {
|
||||
queue.addAll(dep.children)
|
||||
}
|
||||
} else {
|
||||
queue.addAll(dep.children)
|
||||
dep.moduleArtifacts.each { resolvedArtifact ->
|
||||
def file = resolvedArtifact.file
|
||||
if (visited.add(file)) {
|
||||
infos.add([
|
||||
name : resolvedArtifact.name,
|
||||
jarName : file.toPath().getFileName().toString(),
|
||||
path : file,
|
||||
module : resolvedArtifact.moduleVersion,
|
||||
checksum : provider { new DigestUtils(MessageDigestAlgorithms.SHA_1).digestAsHex(file).trim() },
|
||||
// We keep track of the files referenced by this dependency (sha, license, notice, etc.)
|
||||
// so that we can determine unused dangling files later on.
|
||||
referencedFiles: []
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
project.ext.jarInfos = infos.sort {a, b -> "${a.module}".compareTo("${b.module}")}
|
||||
// jarInfos.each { info -> println "${info.module}" }
|
||||
}
|
||||
}
|
||||
|
||||
// Verifies that each JAR has a corresponding checksum and that it matches actual JAR available for this dependency.
|
||||
task validateJarChecksums() {
|
||||
group = 'Dependency validation'
|
||||
description = "Validate checksums of dependencies"
|
||||
dependsOn collectJarInfos
|
||||
|
||||
doLast {
|
||||
def errors = []
|
||||
jarInfos.each { dep ->
|
||||
def expectedChecksumFile = file("${licensesDir}/${dep.jarName}.sha1")
|
||||
if (!expectedChecksumFile.exists()) {
|
||||
errors << "Dependency checksum missing ('${dep.module}'), expected it at: ${expectedChecksumFile}"
|
||||
} else {
|
||||
dep.referencedFiles += expectedChecksumFile
|
||||
def expected = expectedChecksumFile.getText("UTF-8").trim()
|
||||
def actual = dep.checksum.get()
|
||||
if (expected.compareToIgnoreCase(actual) != 0) {
|
||||
errors << "Dependency checksum mismatch ('${dep.module}'), expected it to be: ${expected}, but was: ${actual}"
|
||||
} else {
|
||||
logger.log(LogLevel.INFO, "Dependency checksum OK ('${dep.module}')")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (errors) {
|
||||
def msg = "Dependency checksum validation failed:\n - " + errors.join("\n - ")
|
||||
if (failOnError) {
|
||||
throw new GradleException(msg)
|
||||
} else {
|
||||
logger.log(LogLevel.WARN, "WARNING: ${msg}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Locate the set of license file candidates for this dependency. We
|
||||
// search for [jar-or-prefix]-LICENSE-[type].txt
|
||||
// where 'jar-or-prefix' can be any '-'-delimited prefix of the dependency JAR's name.
|
||||
// So for 'commons-io' it can be 'commons-io-LICENSE-foo.txt' or
|
||||
// 'commons-LICENSE.txt'
|
||||
task validateJarLicenses() {
|
||||
group = 'Dependency validation'
|
||||
description = "Validate license and notice files of dependencies"
|
||||
dependsOn collectJarInfos
|
||||
|
||||
doLast {
|
||||
def errors = []
|
||||
jarInfos.each { dep ->
|
||||
def baseName = dep.name
|
||||
def found = []
|
||||
def candidates = []
|
||||
while (true) {
|
||||
candidates += file("${licensesDir}/${baseName}-LICENSE-[type].txt")
|
||||
found += fileTree(dir: licensesDir, include: "${baseName}-LICENSE-*.txt").files
|
||||
def prefix = baseName.replaceAll(/[\-][^-]+$/, "")
|
||||
if (found || prefix == baseName) {
|
||||
break
|
||||
}
|
||||
baseName = prefix
|
||||
}
|
||||
|
||||
if (found.size() == 0) {
|
||||
errors << "License file missing ('${dep.module}'), expected it at: ${candidates.join(" or ")}," +
|
||||
" where [type] can be any of ${licenseTypes.keySet()}."
|
||||
} else if (found.size() > 1) {
|
||||
errors << "Multiple license files matching for ('${dep.module}'): ${found.join(", ")}"
|
||||
} else {
|
||||
def licenseFile = found.get(0)
|
||||
dep.referencedFiles += licenseFile
|
||||
def m = (licenseFile.name =~ /LICENSE-(.+)\.txt$/)
|
||||
if (!m) throw new GradleException("License file name doesn't contain license type?: ${licenseFile.name}")
|
||||
|
||||
def licenseName = m[0][1]
|
||||
def licenseType = licenseTypes[licenseName]
|
||||
if (!licenseType) {
|
||||
errors << "Unknown license type suffix for ('${dep.module}'): ${licenseFile} (must be one of ${licenseTypes.keySet()})"
|
||||
} else {
|
||||
logger.log(LogLevel.INFO, "Dependency license file OK ('${dep.module}'): " + licenseName)
|
||||
|
||||
// Look for sibling NOTICE file.
|
||||
def noticeFile = file(licenseFile.path.replaceAll(/\-LICENSE-.+/, "-NOTICE.txt"))
|
||||
if (noticeFile.exists()) {
|
||||
dep.referencedFiles += noticeFile
|
||||
logger.log(LogLevel.INFO, "Dependency notice file OK ('${dep.module}'): " + noticeFile)
|
||||
} else if (!licenseType.noticeOptional) {
|
||||
errors << "Notice file missing for ('${dep.module}'), expected it at: ${noticeFile}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (errors) {
|
||||
def msg = "Certain license/ notice files are missing:\n - " + errors.join("\n - ")
|
||||
if (failOnError) {
|
||||
throw new GradleException(msg)
|
||||
} else {
|
||||
logger.log(LogLevel.WARN, "WARNING: ${msg}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
licenses.dependsOn validateJarChecksums, validateJarLicenses
|
||||
}
|
||||
|
||||
// Add top-project level tasks validating dangling files
|
||||
// and regenerating dependency checksums.
|
||||
configure([project(":solr"), project(":lucene"),]) {
|
||||
def validationTasks = subprojects.collectMany { it.tasks.matching { it.name == "licenses" } }
|
||||
def jarInfoTasks = subprojects.collectMany { it.tasks.matching { it.name == "collectJarInfos" } }
|
||||
|
||||
// Update dependency checksums.
|
||||
task updateLicenses() {
|
||||
group = 'Dependency validation'
|
||||
description = "Write or update checksums of dependencies"
|
||||
dependsOn jarInfoTasks
|
||||
|
||||
doLast {
|
||||
licensesDir.mkdirs()
|
||||
|
||||
// Clean any previous checksums. In theory we wouldn't have to do it --
|
||||
// dangling files from any previous JARs would be reported;
|
||||
// it automates the process of updating versions and makes it easier though so
|
||||
// why not.
|
||||
project.delete fileTree(licensesDir, {
|
||||
include "*.sha1"
|
||||
exclude checkDanglingLicenseFiles.ext.exclude
|
||||
})
|
||||
|
||||
def updated = []
|
||||
jarInfoTasks.collectMany { task -> task.project.jarInfos }.each { dep ->
|
||||
def expectedChecksumFile = file("${licensesDir}/${dep.jarName}.sha1")
|
||||
def actual = dep.checksum.get()
|
||||
if (expectedChecksumFile.exists()) {
|
||||
def expected = expectedChecksumFile.getText("UTF-8").trim()
|
||||
if (expected.compareToIgnoreCase(actual) == 0) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
updated += "Updated checksum ('${dep.module}'): ${expectedChecksumFile}"
|
||||
expectedChecksumFile.write(actual + "\n", "UTF-8")
|
||||
}
|
||||
|
||||
updated.sort().each { line -> logger.log(LogLevel.LIFECYCLE, line) }
|
||||
}
|
||||
}
|
||||
|
||||
// Any validation task must run after all updates have been applied.
|
||||
// We add an ordering constraint that any validation task (or its dependency subgraph)
|
||||
// must run after updateLicenses
|
||||
validationTasks
|
||||
.collectMany { task -> [task, task.dependsOn]}
|
||||
.flatten()
|
||||
.each { task ->
|
||||
task.mustRunAfter updateLicenses
|
||||
}
|
||||
|
||||
// Check for dangling files in the licenses folder.
|
||||
task checkDanglingLicenseFiles() {
|
||||
dependsOn validationTasks
|
||||
|
||||
ext {
|
||||
exclude = []
|
||||
}
|
||||
|
||||
doFirst {
|
||||
def allReferenced = validationTasks.collectMany { task ->
|
||||
task.project.jarInfos.collectMany { it.referencedFiles }
|
||||
}.collect { it.toString() }
|
||||
|
||||
def patterns = ext.exclude
|
||||
def allExisting = fileTree(licensesDir, {
|
||||
exclude patterns
|
||||
}).files.collect { it.toString() }
|
||||
|
||||
def dangling = (allExisting - allReferenced).sort()
|
||||
|
||||
if (dangling) {
|
||||
gradle.buildFinished {
|
||||
logger.warn("WARNING: there were unreferenced files under license folder:\n - ${dangling.join("\n - ")}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
licenses.dependsOn checkDanglingLicenseFiles
|
||||
}
|
||||
|
||||
// Exclude files that are not a result of direct dependencies but have to be there.
|
||||
// It would be probably better to move non-dependency licenses into the actual project
|
||||
// where they're used and only assemble them for the distribution package.
|
||||
configure(project(":lucene")) {
|
||||
checkDanglingLicenseFiles {
|
||||
exclude += [
|
||||
"elegant-icon-font-*",
|
||||
"ant-*",
|
||||
"ivy-*",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr")) {
|
||||
checkDanglingLicenseFiles {
|
||||
exclude += [
|
||||
"README.committers.txt",
|
||||
|
||||
// solr-ref-guide compilation-only dependencies.
|
||||
"android-json-*",
|
||||
"ant-*",
|
||||
"asciidoctor-ant-*",
|
||||
"jsoup-*",
|
||||
"junit4-ant-*",
|
||||
"slf4j-simple-*",
|
||||
"start.jar.sha1"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// solr-ref-guide doesn't contribute any JARs to dependency checks.
|
||||
configure(project(":solr:solr-ref-guide")) {
|
||||
configurations {
|
||||
jarValidation {
|
||||
exclude group: "*"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
// This adds top-level 'precommit' task.
|
||||
|
||||
configure(rootProject) {
|
||||
task precommit() {
|
||||
group = 'Precommit'
|
||||
description = "All precommit checks"
|
||||
|
||||
// Root-level validation tasks.
|
||||
dependsOn ":verifyLocks"
|
||||
dependsOn ":versionsPropsAreSorted"
|
||||
dependsOn ":checkWorkingCopyPristine"
|
||||
dependsOn ":validateSourcePatterns"
|
||||
|
||||
// Solr validation tasks.
|
||||
dependsOn ":solr:validateConfigFileSanity"
|
||||
|
||||
// Attach all these tasks from all projects that have them.
|
||||
// This uses lazy collections as they may not yet be defined.
|
||||
dependsOn allprojects.collect { prj ->
|
||||
prj.tasks.matching { task -> task.name in [
|
||||
"forbiddenApisMain",
|
||||
"forbiddenApisTest",
|
||||
"licenses",
|
||||
"javadoc",
|
||||
"rat",
|
||||
]}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,252 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import groovy.xml.NamespaceBuilder
|
||||
|
||||
configure(rootProject) {
|
||||
configurations {
|
||||
ratDeps
|
||||
}
|
||||
|
||||
dependencies {
|
||||
ratDeps "org.apache.rat:apache-rat:${scriptDepVersions['apache-rat']}"
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
task("rat", type: RatTask) {
|
||||
group = 'Verification'
|
||||
description = 'Runs Apache Rat checks.'
|
||||
}
|
||||
}
|
||||
|
||||
configure(rootProject) {
|
||||
rat {
|
||||
includes += [
|
||||
"buildSrc/**/*.java",
|
||||
"lucene/tools/forbiddenApis/**",
|
||||
"lucene/tools/prettify/**",
|
||||
]
|
||||
excludes += [
|
||||
// Unclear if this needs ASF header, depends on how much was copied from ElasticSearch
|
||||
"**/ErrorReportingTestListener.java"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:analysis:common")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/*.aff",
|
||||
"**/*.dic",
|
||||
"**/charfilter/*.htm*",
|
||||
"**/*LuceneResourcesWikiPage.html"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:analysis:kuromoji")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
// whether rat detects this as binary or not is platform dependent?!
|
||||
"**/bocchan.utf-8"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:analysis:opennlp")) {
|
||||
rat {
|
||||
excludes += [
|
||||
"src/tools/test-model-data/*.txt",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:highlighter")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/CambridgeMA.utf8"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:suggest")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/Top50KWiki.utf8",
|
||||
"**/stop-snowball.txt"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:core")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/htmlStripReaderTest.html"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:webapp")) {
|
||||
rat {
|
||||
includes = [ "**" ]
|
||||
excludes += [
|
||||
"web/img/**",
|
||||
"*.iml",
|
||||
"build.gradle",
|
||||
"build/**",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// Structure inspired by existing task from Apache Kafka, heavily modified since then.
|
||||
class RatTask extends DefaultTask {
|
||||
@Input
|
||||
List<String> includes = [
|
||||
"*.xml",
|
||||
"src/tools/**"
|
||||
]
|
||||
|
||||
@Input
|
||||
List<String> excludes = []
|
||||
|
||||
@Input
|
||||
List<String> srcExcludes = [
|
||||
"**/TODO",
|
||||
"**/*.txt",
|
||||
"**/*.iml",
|
||||
"**/*.gradle",
|
||||
"build/**"
|
||||
]
|
||||
|
||||
@OutputFile
|
||||
def xmlReport = new File(new File(project.buildDir, 'rat'), 'rat-report.xml')
|
||||
|
||||
def generateXmlReport() {
|
||||
def uri = 'antlib:org.apache.rat.anttasks'
|
||||
def ratClasspath = project.rootProject.configurations.ratDeps.asPath
|
||||
ant.taskdef(resource: 'org/apache/rat/anttasks/antlib.xml', uri: uri, classpath: ratClasspath)
|
||||
|
||||
def rat = NamespaceBuilder.newInstance(ant, uri)
|
||||
rat.report(format: 'xml', reportFile: xmlReport, addDefaultLicenseMatchers: true) {
|
||||
ant.fileset(dir: "${project.projectDir}") {
|
||||
includes.each { pattern -> ant.include(name: pattern) }
|
||||
excludes.each { pattern -> ant.exclude(name: pattern) }
|
||||
}
|
||||
|
||||
if (project.plugins.findPlugin(JavaPlugin)) {
|
||||
[
|
||||
project.sourceSets.main.java.srcDirs,
|
||||
project.sourceSets.test.java.srcDirs,
|
||||
].flatten().each { srcLocation ->
|
||||
ant.fileset(dir: srcLocation, erroronmissingdir: false) {
|
||||
srcExcludes.each { pattern -> ant.exclude(name: pattern) }
|
||||
}
|
||||
}
|
||||
|
||||
[
|
||||
project.sourceSets.main.resources.srcDirs
|
||||
].flatten().each { srcLocation ->
|
||||
ant.fileset(dir: srcLocation, erroronmissingdir: false) {
|
||||
ant.include(name: "META-INF/**")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The license rules below were manually copied from lucene/common-build.xml, there is currently no mechanism to sync them
|
||||
|
||||
// BSD 4-clause stuff (is disallowed below)
|
||||
substringMatcher(licenseFamilyCategory: "BSD4 ", licenseFamilyName: "Original BSD License (with advertising clause)") {
|
||||
pattern(substring: "All advertising materials")
|
||||
}
|
||||
|
||||
// BSD-like stuff
|
||||
substringMatcher(licenseFamilyCategory: "BSD ", licenseFamilyName: "Modified BSD License") {
|
||||
// brics automaton
|
||||
pattern(substring: "Copyright (c) 2001-2009 Anders Moeller")
|
||||
// snowball
|
||||
pattern(substring: "Copyright (c) 2001, Dr Martin Porter")
|
||||
// UMASS kstem
|
||||
pattern(substring: "THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF MASSACHUSETTS AND OTHER CONTRIBUTORS")
|
||||
// Egothor
|
||||
pattern(substring: "Egothor Software License version 1.00")
|
||||
// JaSpell
|
||||
pattern(substring: "Copyright (c) 2005 Bruno Martins")
|
||||
// d3.js
|
||||
pattern(substring: "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS")
|
||||
// highlight.js
|
||||
pattern(substring: "THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS")
|
||||
}
|
||||
|
||||
// MIT-like
|
||||
substringMatcher(licenseFamilyCategory: "MIT ", licenseFamilyName:"Modified BSD License") {
|
||||
// ICU license
|
||||
pattern(substring: "Permission is hereby granted, free of charge, to any person obtaining a copy")
|
||||
}
|
||||
|
||||
// Apache
|
||||
substringMatcher(licenseFamilyCategory: "AL ", licenseFamilyName: "Apache") {
|
||||
pattern(substring: "Licensed to the Apache Software Foundation (ASF) under")
|
||||
// this is the old - school one under some files
|
||||
pattern(substring: 'Licensed under the Apache License, Version 2.0 (the "License")')
|
||||
}
|
||||
|
||||
substringMatcher(licenseFamilyCategory: "GEN ", licenseFamilyName: "Generated") {
|
||||
// svg files generated by gnuplot
|
||||
pattern(substring: "Produced by GNUPLOT")
|
||||
// snowball stemmers generated by snowball compiler
|
||||
pattern(substring: "This file was generated automatically by the Snowball to Java compiler")
|
||||
// parsers generated by antlr
|
||||
pattern(substring: "ANTLR GENERATED CODE")
|
||||
}
|
||||
|
||||
approvedLicense(familyName: "Apache")
|
||||
approvedLicense(familyName: "The MIT License")
|
||||
approvedLicense(familyName: "Modified BSD License")
|
||||
approvedLicense(familyName: "Generated")
|
||||
}
|
||||
}
|
||||
|
||||
def printUnknownFiles() {
|
||||
def ratXml = new XmlParser().parse(xmlReport)
|
||||
def errors = []
|
||||
ratXml.resource.each { resource ->
|
||||
if (resource.'license-approval'.@name[0] == "false") {
|
||||
errors << "Unknown license: ${resource.@name}"
|
||||
}
|
||||
}
|
||||
if (errors) {
|
||||
throw new GradleException("Found " + errors.size() + " file(s) with errors:\n" +
|
||||
errors.collect{ msg -> " - ${msg}" }.join("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
def rat() {
|
||||
def origEncoding = System.getProperty("file.encoding")
|
||||
try {
|
||||
generateXmlReport()
|
||||
printUnknownFiles()
|
||||
} finally {
|
||||
if (System.getProperty("file.encoding") != origEncoding) {
|
||||
throw new GradleException("Insane: rat changed file.encoding to ${System.getProperty('file.encoding')}?")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
|
||||
// Equivalent of ant's "validate-source-patterns".
|
||||
// This should be eventually rewritten in plain gradle. For now, delegate to
|
||||
// the ant/groovy script we already have.
|
||||
|
||||
configure(rootProject) {
|
||||
configurations {
|
||||
checkSourceDeps
|
||||
}
|
||||
|
||||
dependencies {
|
||||
checkSourceDeps "org.codehaus.groovy:groovy-all:2.4.17"
|
||||
checkSourceDeps "org.apache.rat:apache-rat:${scriptDepVersions['apache-rat']}"
|
||||
}
|
||||
|
||||
task validateSourcePatterns() {
|
||||
doFirst {
|
||||
ant.taskdef(
|
||||
name: "groovy",
|
||||
classname: "org.codehaus.groovy.ant.Groovy",
|
||||
classpath: configurations.checkSourceDeps.asPath)
|
||||
|
||||
ant.groovy(src: project(":lucene").file("tools/src/groovy/check-source-patterns.groovy"))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
// This ensures 'versions.props' file is sorted lexicographically.
|
||||
|
||||
configure(rootProject) {
|
||||
task versionsPropsAreSorted() {
|
||||
doFirst {
|
||||
def versionsProps = file('versions.props')
|
||||
def lines = versionsProps.readLines("UTF-8")
|
||||
def sorted = lines.toSorted()
|
||||
|
||||
if (!Objects.equals(lines, sorted)) {
|
||||
def sortedFile = file("${buildDir}/versions.props")
|
||||
sortedFile.write(sorted.join("\n"), "UTF-8")
|
||||
throw new GradleException("${versionsProps} file is not sorted lexicographically. I wrote a sorted file to ${sortedFile} - please review and commit.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Binary file not shown.
|
@ -0,0 +1,5 @@
|
|||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-6.0.1-all.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
|
@ -0,0 +1,188 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
#
|
||||
# Copyright 2015 the original author or authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
NONSTOP* )
|
||||
nonstop=true
|
||||
;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=$((i+1))
|
||||
done
|
||||
case $i in
|
||||
(0) set -- ;;
|
||||
(1) set -- "$args0" ;;
|
||||
(2) set -- "$args0" "$args1" ;;
|
||||
(3) set -- "$args0" "$args1" "$args2" ;;
|
||||
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Escape application args
|
||||
save () {
|
||||
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
||||
echo " "
|
||||
}
|
||||
APP_ARGS=$(save "$@")
|
||||
|
||||
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
||||
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
||||
|
||||
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
|
||||
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
|
||||
cd "$(dirname "$0")"
|
||||
fi
|
||||
|
||||
exec "$JAVACMD" "$@"
|
|
@ -0,0 +1,100 @@
|
|||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windows variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
|
@ -0,0 +1,50 @@
|
|||
Gradle for Ant users
|
||||
====================
|
||||
|
||||
This shows some common ant targets and their equivalent Gradle commands.
|
||||
Examples below assume cwd is at the top of the checkout (gradlew
|
||||
script available from ./). Quoted [string] gives a better or more
|
||||
conventional and commonly used task alternative.
|
||||
|
||||
Gradle tasks apply to all modules that contain a given task name. Use
|
||||
"-p" switch with a directory or a colon-notation to specify a particular
|
||||
task or module. For example these two are equivalent:
|
||||
|
||||
gradlew -p lucene/core check
|
||||
gradlew :lucene:core:check
|
||||
|
||||
List of common dev task equivalents
|
||||
-----------------------------------
|
||||
|
||||
ant clean => gradlew clean
|
||||
ant jar => gradlew jar [better: gradlew assemble]
|
||||
|
||||
ant compile => gradlew classes [better: gradlew assemble]
|
||||
gradlew testClasses [better: gradlew assemble]
|
||||
|
||||
ant validate => gradlew check
|
||||
ant test => gradlew test
|
||||
|
||||
ant jar-checksums => gradlew updateLicenses
|
||||
ant check-licenses => gradlew licenses [better: gradlew check -x test]
|
||||
ant clean-jars => (NO REPLACEMENT)
|
||||
|
||||
ant precommit => gradlew precommit
|
||||
|
||||
ant get-maven-poms => gradlew mavenLocal
|
||||
|
||||
Solr-specific targets
|
||||
---------------------
|
||||
|
||||
Assemble Solr distribution at solr/packaging/build/...
|
||||
|
||||
ant create-package => gradlew -p solr/packaging assemble
|
||||
|
||||
"Resolve" libraries by copying them to lib/ and other source
|
||||
locations. This task should *not* be used anymore. It is added
|
||||
for backward compatibility with ant (and for debugging)
|
||||
but it mixes sources with output locations and this workflow
|
||||
should be discouraged. Instead run assemble on packaging (above).
|
||||
|
||||
ant resolve => gradlew -p solr resolve
|
||||
|
|
@ -0,0 +1,138 @@
|
|||
Dependencies
|
||||
============
|
||||
|
||||
Each gradle project can have multiple (named) "configurations"
|
||||
and each configuration can have dependencies attached to it.
|
||||
|
||||
There are some standard conventions so, for example, the Java plugin
|
||||
adds standard configurations such as "api", "implementation",
|
||||
"testImplementation" and others. These configurations can also inherit
|
||||
from each other; more about this typic can be found here:
|
||||
|
||||
https://docs.gradle.org/current/userguide/dependency_management_for_java_projects.html#dependency_management_for_java_projects
|
||||
https://docs.gradle.org/current/userguide/java_library_plugin.html#sec:java_library_separation
|
||||
https://docs.gradle.org/current/userguide/java_plugin.html#sec:java_plugin_and_dependency_management
|
||||
|
||||
For the needs of Lucene and Solr we will typically focus on three
|
||||
configurations and attach project dependencies to them:
|
||||
|
||||
api - makes a dependency available for main classes, tests and any
|
||||
other modules importing the project (exportable dependency),
|
||||
|
||||
implementation - makes a dependency available for main classes, tests
|
||||
but will *not* export the dependency for other modules (so their
|
||||
compilation classpath won't contain it).
|
||||
|
||||
testImplementation - makes a dependency only available for test classes.
|
||||
|
||||
|
||||
Adding a library dependency
|
||||
---------------------------
|
||||
|
||||
Let's say we wish to add a dependency on library "foo.bar:baz" in
|
||||
version 1.2 to :lucene:core. Let's assume this library is only
|
||||
used internally by the project. The :lucene:core project is configured
|
||||
by lucene/core/build.gradle and we would add (or modify) the dependency
|
||||
block as follows:
|
||||
|
||||
dependencies {
|
||||
implementation "foo.bar:baz"
|
||||
}
|
||||
|
||||
The "implementation" here is a named configuration; we don't need to declare
|
||||
it because it is declared for us by the java-library plugin.
|
||||
|
||||
In "normal" gradle the version of the dependency would be present
|
||||
directly inside the declaration but we use a plugin
|
||||
(palantir-consistent-versions) to manage all dependency versions
|
||||
from the top-level (so that conflicts can be resolved globally).
|
||||
|
||||
If this is the first time "foo.bar:baz" is added to the project, we'd have
|
||||
to add its version to "versions.props" file at the top level of the
|
||||
checkout:
|
||||
|
||||
foo.bar:baz=1.2
|
||||
|
||||
and then regenerate the "versions.lock" file using the following
|
||||
command:
|
||||
|
||||
gradlew --write-locks
|
||||
|
||||
IMPORTANT: The versions.lock file will contain the actual version
|
||||
of the dependency picked based on other project dependencies and
|
||||
their transitive dependencies. This selected version may be
|
||||
different from what each of these actually requires (the highest
|
||||
version number will be typically selected). To see which dependencies
|
||||
require which version of the library use:
|
||||
|
||||
gradlew why --hash=...
|
||||
|
||||
where the hash code comes from versions.lock file. For example, at
|
||||
the time of writing, jackson-databind has the following entry:
|
||||
|
||||
com.fasterxml.jackson.core:jackson-databind:2.10.0 (3 constraints: 931a7796)
|
||||
|
||||
and "gradlew why --hash=931a7796" prints:
|
||||
|
||||
com.fasterxml.jackson.core:jackson-databind:2.10.0
|
||||
projects -> 2.10.0
|
||||
net.thisptr:jackson-jq -> 2.7.0
|
||||
org.carrot2:carrot2-mini -> 2.9.9.3
|
||||
|
||||
Once the dependency is added it always makes sense to see the
|
||||
tree of all module dependencies and maybe exclude transitive
|
||||
dependencies of foo.bar:baz that we won't need.
|
||||
|
||||
|
||||
Inspecting current dependencies
|
||||
-------------------------------
|
||||
|
||||
The tree of dependencies of a project (in all configurations) can
|
||||
be dumped by the following command (example):
|
||||
|
||||
gradlew -p lucene\analysis\icu dependencies
|
||||
|
||||
But this can be a bit overwhelming; we will most likely be interested
|
||||
in just the "publicly visible" and "classpath-visible" configurations.
|
||||
|
||||
The publicly visible project dependencies (classes shared by other
|
||||
modules importing our module) can be displayed with:
|
||||
|
||||
gradlew -p lucene\analysis\icu dependencies --configuration api
|
||||
|
||||
And the "private" set of dependencies (real classpath) can be dumped
|
||||
with:
|
||||
|
||||
gradlew -p lucene\analysis\icu dependencies --configuration runtimeClasspath
|
||||
|
||||
|
||||
Excluding a transitive dependency
|
||||
---------------------------------
|
||||
|
||||
Let's say "foo.bar:baz" has a transitive dependency on project
|
||||
"foo.bar:irrelevant" and we know the transitive dependency is not
|
||||
crucial for the functioning of "foo.bar:baz". We can exclude it
|
||||
by adding an exclusion block to the original declaration:
|
||||
|
||||
dependencies {
|
||||
implementation("foo.bar:baz", {
|
||||
exclude group: "foo.bar", module: "irrelevant"
|
||||
})
|
||||
}
|
||||
|
||||
Note the brackets - they are important and prevent accidental
|
||||
mistakes of applying the exclusion to the wrong scope.
|
||||
|
||||
|
||||
Updating dependency checksum and licenses
|
||||
-----------------------------------------
|
||||
|
||||
The last step is to make sure the licenses, notice files and checksums
|
||||
are in place for any new dependencies. This command will print what's
|
||||
missing and where:
|
||||
|
||||
gradlew licenses
|
||||
|
||||
To update JAR checksums for licenses use:
|
||||
|
||||
gradlew updateLicenses
|
|
@ -0,0 +1,34 @@
|
|||
Forbidden API rules
|
||||
===================
|
||||
|
||||
Uwe's excellent forbidden API checker is applied as part of 'check'
|
||||
task. The rules for each project are sourced dynamically based on the
|
||||
actual set of dependencies.
|
||||
|
||||
If a given project has a dependency on an artifact called "foo.bar:baz"
|
||||
then all of these rule files will be applied (all paths relative
|
||||
to: gradle/validation/forbidden-apis/).
|
||||
|
||||
defaults.all.txt
|
||||
defaults.[project].txt
|
||||
foo.bar.baz.all.txt
|
||||
foo.bar.baz.[project].txt
|
||||
|
||||
Note that the "defaults" can't reference any JARs other than Java's
|
||||
runtime.
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
We'd like to prevent people from using Guava's
|
||||
com.google.common.base.Charsets class. The rule would be:
|
||||
|
||||
@defaultMessage Use java.nio.charset.StandardCharsets instead
|
||||
com.google.common.base.Charsets
|
||||
|
||||
and we would place this rule in this file:
|
||||
|
||||
gradle/validation/forbidden-apis/com.google.guava.guava.all.txt
|
||||
|
||||
From now on, if *any* module depends on this library, it will
|
||||
automatically pick up the rule and enforce it.
|
|
@ -0,0 +1,14 @@
|
|||
A list of resources that may be helpful for those learning git:
|
||||
|
||||
- Dawid's task-oriented help:
|
||||
https://github.com/dweiss/lucene-git-guides
|
||||
|
||||
- git for computer scientists (a nice writeup on how git is essentially a graph
|
||||
of commits and labels attached to those commits):
|
||||
https://eagain.net/articles/git-for-computer-scientists/
|
||||
|
||||
- git pro (book)
|
||||
https://git-scm.com/book/en/v2
|
||||
|
||||
- git workflows, their pros and cons:
|
||||
https://help.github.com/articles/what-is-a-good-git-workflow/
|
|
@ -0,0 +1,46 @@
|
|||
Local developer settings
|
||||
========================
|
||||
|
||||
The first invocation of any task in Lucene/Solr gradle build will generate
|
||||
and save a project-local 'gradle.properties' file. This file contains
|
||||
the defaults you may (but don't have to) tweak for your particular hardware
|
||||
(or taste).
|
||||
|
||||
This is an overview of some of these settings.
|
||||
|
||||
Parallelism
|
||||
-----------
|
||||
|
||||
Gradle build can run tasks in parallel but by default it consumes all CPU cores which
|
||||
is too optimistic a default for Lucene/Solr tests. You can disable the parallelism
|
||||
entirely or assign it a 'low' priority with these properties:
|
||||
|
||||
org.gradle.parallel=[true, false]
|
||||
org.gradle.priority=[normal, low]
|
||||
|
||||
The default level of parallelism is computed based on the number of cores on
|
||||
your machine (on the first run of gradle build). By default these are fairly conservative
|
||||
settings (half the number of cores for workers, for example):
|
||||
|
||||
org.gradle.workers.max=[X]
|
||||
tests.jvms=[N <= X]
|
||||
|
||||
The number of test JVMs can be lower than the number of workers: this just means
|
||||
that two projects can run tests in parallel to saturate all the workers. The I/O and memory
|
||||
bandwidth limits will kick in quickly so even if you have a very beefy machine bumping
|
||||
it too high may not help.
|
||||
|
||||
You can always override these settings locally using command line as well:
|
||||
gradlew -Ptests.jvms=N --max-workers=X
|
||||
|
||||
Gradle Daemon
|
||||
-------------
|
||||
|
||||
The gradle daemon is a background process that keeps an evaluated copy of the project
|
||||
structure, some caches, etc. It speeds up repeated builds quite a bit but if you don't
|
||||
like the idea of having a (sizeable) background process running in the background,
|
||||
disable it.
|
||||
|
||||
org.gradle.daemon=[true, false]
|
||||
org.gradle.jvmargs=...
|
||||
|
|
@ -0,0 +1,128 @@
|
|||
Testing
|
||||
=======
|
||||
|
||||
Examples below assume cwd at the gradlew script in the top directory of
|
||||
the project's checkout.
|
||||
|
||||
|
||||
Generic test/ checkup commands
|
||||
------------------------------
|
||||
|
||||
Run all unit tests:
|
||||
|
||||
gradlew test
|
||||
|
||||
Run all verification tasks, including tests:
|
||||
|
||||
gradlew check
|
||||
|
||||
Run all verification tasks, excluding tests (-x is gradle's generic task
|
||||
exclusion mechanism):
|
||||
|
||||
gradlew check -x test
|
||||
|
||||
Run verification for a selected module only:
|
||||
|
||||
gradlew :lucene:core:check # By full gradle project path
|
||||
gradlew -p lucene/core check # By folder designation + task
|
||||
|
||||
|
||||
Randomization
|
||||
-------------
|
||||
|
||||
To run tests with the given starting seed pass 'tests.seed'
|
||||
property:
|
||||
|
||||
gradlew :lucene:misc:test -Ptests.seed=DEADBEEF
|
||||
|
||||
There are a lot of other test randomization properties
|
||||
available. To list them, their defaults and current values
|
||||
run the testOpts task against a project that has tests.
|
||||
For example:
|
||||
|
||||
gradlew -p lucene/core testOpts
|
||||
|
||||
|
||||
Filtering
|
||||
---------
|
||||
|
||||
Run tests of lucene-core module:
|
||||
|
||||
gradlew -p lucene/core test
|
||||
|
||||
Run a single test case (from a single module). Uses gradle's built-in filtering
|
||||
(https://docs.gradle.org/current/userguide/java_testing.html#test_filtering):
|
||||
|
||||
gradlew -p lucene/core test --tests TestDemo
|
||||
|
||||
Run all tests in a package:
|
||||
|
||||
gradlew -p lucene/core test --tests "org.apache.lucene.document.*"
|
||||
|
||||
Run all test classes/ methods that match this pattern:
|
||||
|
||||
gradlew -p lucene/core test --tests "*testFeatureMissing*"
|
||||
|
||||
|
||||
Test groups
|
||||
-----------
|
||||
|
||||
Tests can be filtered by an annotation they're marked with.
|
||||
Some test group annotations include: @AwaitsFix, @Nightly, @Slow
|
||||
|
||||
This uses filtering infrastructure on the *runner* (randomizedtesting),
|
||||
not gradle's built-in mechanisms (but it can be combined with "--tests").
|
||||
For example, run all lucene-core tests annotated as @Slow:
|
||||
|
||||
gradlew -p lucene/core test -Ptests.filter=@Slow
|
||||
|
||||
Test group filters can be combined into Boolean expressions:
|
||||
|
||||
gradlew -p lucene/core test "default and not(@awaitsfix or @slow)"
|
||||
|
||||
|
||||
Reiteration ("beasting")
|
||||
------------------------
|
||||
|
||||
Multiply each test case N times (this works by repeating the same test
|
||||
within the same JVM; it also works in IDEs):
|
||||
|
||||
gradlew -p lucene/core test --tests TestDemo -Ptests.iters=5
|
||||
|
||||
Tests tasks will be (by default) re-executed on each invocation because
|
||||
we pick a random global tests.seed. If you run the same tests twice
|
||||
with the same seed, the test task will be skipped (as it is up-to-date
|
||||
with respect to source code):
|
||||
|
||||
gradlew -p lucene/core test -Ptests.seed=deadbeef
|
||||
|
||||
to force re-execution of tests, even for the same master seed, apply
|
||||
cleanTest task:
|
||||
|
||||
gradlew -p lucene/core cleanTest test -Ptests.seed=deadbeef
|
||||
|
||||
|
||||
Verbose mode and debugging
|
||||
--------------------------
|
||||
|
||||
The "tests.verbose" mode switch enables standard streams from tests
|
||||
to be dumped directly to the console. Run your verbose tests explicitly
|
||||
specifying the project and test task or a fully qualified task path. Example:
|
||||
|
||||
gradlew -p lucene/core test -Ptests.verbose=true --tests "TestDemo"
|
||||
|
||||
|
||||
Testing against different JVMs
|
||||
------------------------------
|
||||
|
||||
By default tests are executed with the same Java gradle is using internally.
|
||||
To run tests against a different Java version define a property called
|
||||
"runtime.java.home" or define an environment variable "RUNTIME_JAVA_HOME"
|
||||
pointing at the JDK installation folder.
|
||||
|
||||
If property is used, it can be a system property (-D...) or a project
|
||||
property (-P...).
|
||||
|
||||
Example:
|
||||
|
||||
gradlew test -p lucene/test-framework --tests TestJvmInfo -Dtests.verbose=true -Druntime.java.home=/jvms/jdk14
|
|
@ -0,0 +1,33 @@
|
|||
Typical workflow and tasks
|
||||
==========================
|
||||
|
||||
This shows some typical workflow gradle commands.
|
||||
|
||||
Run tests on a module:
|
||||
gradlew -p lucene/core test
|
||||
|
||||
Run test of a single-class (run "gradlew :helpTests" for more):
|
||||
gradlew -p lucene/core test --tests "*Demo*"
|
||||
|
||||
Run all tests and validation checks on a module:
|
||||
gradlew -p lucene/core check
|
||||
|
||||
Run all tests and validation checks on everything:
|
||||
gradlew check
|
||||
|
||||
Run all validation checks but skip all tests:
|
||||
gradlew check -x test
|
||||
|
||||
Assemble a single module's JAR (here for lucene-core):
|
||||
gradlew -p lucene/core assemble
|
||||
ls lucene/core/build/libs
|
||||
|
||||
Create all distributable packages, POMs, etc. and create a
|
||||
local maven repository for inspection:
|
||||
gradlew mavenLocal
|
||||
ls -R build/maven-local/
|
||||
|
||||
Put together Solr distribution:
|
||||
gradlew -p solr/packaging assemble
|
||||
ls solr/packaging/build/solr-*
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:analysis:common')
|
||||
|
||||
api 'com.ibm.icu:icu4j'
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:analysis:common')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:analysis:common')
|
||||
|
||||
api 'org.carrot2:morfologik-stemming'
|
||||
|
||||
implementation 'org.carrot2:morfologik-polish'
|
||||
implementation 'ua.net.nlp:morfologik-ukrainian-search'
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:analysis:common')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:analysis:common')
|
||||
api 'org.apache.opennlp:opennlp-tools'
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:analysis:common')
|
||||
|
||||
implementation 'commons-codec:commons-codec'
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:analysis:common')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:analysis:common')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
|
||||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
|
||||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
implementation project(':lucene:analysis:common')
|
||||
implementation project(':lucene:facet')
|
||||
implementation project(':lucene:highlighter')
|
||||
implementation project(':lucene:queries')
|
||||
implementation project(':lucene:spatial-extras')
|
||||
implementation project(':lucene:queryparser')
|
||||
|
||||
implementation "org.apache.commons:commons-compress"
|
||||
implementation "com.ibm.icu:icu4j"
|
||||
implementation "org.locationtech.spatial4j:spatial4j"
|
||||
implementation("net.sourceforge.nekohtml:nekohtml", {
|
||||
exclude module: "xml-apis"
|
||||
})
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
subprojects {
|
||||
group "org.apache.lucene"
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
implementation project(':lucene:queries')
|
||||
implementation project(':lucene:grouping')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
testImplementation project(':lucene:analysis:common')
|
||||
testImplementation project(':lucene:codecs')
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
implementation project(':lucene:core')
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -266,7 +266,7 @@
|
|||
<property name="pitest.targetClasses" value="org.apache.*" />
|
||||
|
||||
<!-- a reasonable default exclusion set, can be overridden for special cases -->
|
||||
<property name="rat.excludes" value="**/TODO,**/*.txt,**/*.iml"/>
|
||||
<property name="rat.excludes" value="**/TODO,**/*.txt,**/*.iml,**/*.gradle"/>
|
||||
|
||||
<!-- These patterns can be defined to add additional files for checks, relative to module's home dir -->
|
||||
<property name="rat.additional-includes" value=""/>
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
|
||||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
testImplementation project(':lucene:codecs')
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
implementation project(':lucene:core')
|
||||
implementation project(':lucene:facet')
|
||||
implementation project(':lucene:queries')
|
||||
implementation project(':lucene:analysis:common')
|
||||
implementation project(':lucene:queryparser')
|
||||
implementation project(':lucene:expressions')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
implementation project(':lucene:codecs')
|
||||
|
||||
implementation 'org.antlr:antlr4-runtime'
|
||||
|
||||
// It is awkward that we force-omit the intermediate dependency here...
|
||||
// The dependency chain is:
|
||||
// asm-commons -> asm-tree -> asm
|
||||
// Should we really go through these hoops?
|
||||
implementation 'org.ow2.asm:asm'
|
||||
implementation('org.ow2.asm:asm-commons', {
|
||||
exclude group: "org.ow2.asm", module: "asm-tree"
|
||||
exclude group: "org.ow2.asm", module: "asm-analysis"
|
||||
})
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
|
||||
apply plugin: 'java-library'
|
||||
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
implementation 'com.carrotsearch:hppc'
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
testImplementation project(':lucene:queries')
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
|
||||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
implementation project(':lucene:queries')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
|
||||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
implementation project(':lucene:queries')
|
||||
implementation project(':lucene:memory')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
testImplementation project(':lucene:analysis:common')
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
implementation project(':lucene:codecs')
|
||||
implementation project(':lucene:backward-codecs')
|
||||
implementation project(':lucene:analysis:common')
|
||||
implementation project(':lucene:queries')
|
||||
implementation project(':lucene:queryparser')
|
||||
implementation project(':lucene:misc')
|
||||
|
||||
implementation 'org.apache.logging.log4j:log4j-core'
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
testImplementation project(':lucene:queryparser')
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
implementation project(':lucene:memory')
|
||||
implementation project(':lucene:analysis:common')
|
||||
|
||||
testImplementation project(':lucene:queryparser')
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
testImplementation project(':lucene:expressions')
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:queries')
|
||||
api project(':lucene:sandbox')
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
|
||||
implementation project(':lucene:facet')
|
||||
|
||||
implementation('org.apache.httpcomponents:httpclient', {
|
||||
exclude group: "commons-codec", module: "commons-codec"
|
||||
})
|
||||
|
||||
implementation 'org.eclipse.jetty:jetty-server'
|
||||
implementation('org.eclipse.jetty:jetty-servlet', {
|
||||
exclude group: "org.eclipse.jetty", module: "jetty-security"
|
||||
})
|
||||
implementation 'org.eclipse.jetty:jetty-continuation'
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:spatial3d')
|
||||
|
||||
api 'org.locationtech.spatial4j:spatial4j'
|
||||
api 'io.sgr:s2-geometry-library-java'
|
||||
|
||||
testImplementation project(':lucene:test-framework')
|
||||
|
||||
testImplementation 'org.locationtech.jts:jts-core'
|
||||
testImplementation 'org.locationtech.spatial4j:spatial4j::tests'
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
testImplementation project(':lucene:test-framework')
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue