LUCENE-9435: Clean up ant compatability remnants in Gradle build

* Removing ant-only unused tasks.
* Correct message in TestVersion.java
* Remove unused file.
* Removing forbidden API rules for ant.
* Remove 'resolve' emulation.
* Move ecj-lint to task-definition-relative folder.
* Remove 'packaging' specification. It'll have to wait until proper new packaging is implemented for Solr distribution.
* Move render-javadoc tasks's files to task-relative location.
* Moved security manager policies and default JDK logging file to gradle's task relative locations.
* Removing obsolete ant tools. Moving check source patterns under gradle's folder.
* Correct paths.
* Correct property name in task selector.
This commit is contained in:
Dawid Weiss 2020-08-30 14:01:04 +02:00 committed by GitHub
parent a3b3ba10e3
commit def82ab556
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 18 additions and 3826 deletions

View File

@ -163,12 +163,10 @@ apply from: file('gradle/help.gradle')
// of potential problems with the build conventions, dependencies, etc.
apply from: file('gradle/ant-compat/force-versions.gradle')
apply from: file('gradle/ant-compat/misc.gradle')
apply from: file('gradle/ant-compat/resolve.gradle')
apply from: file('gradle/ant-compat/post-jar.gradle')
apply from: file('gradle/ant-compat/test-classes-cross-deps.gradle')
apply from: file('gradle/ant-compat/artifact-naming.gradle')
apply from: file('gradle/ant-compat/solr-forbidden-apis.gradle')
apply from: file('gradle/ant-compat/forbidden-api-rules-in-sync.gradle')
apply from: file('gradle/documentation/documentation.gradle')
apply from: file('gradle/documentation/changes-to-html.gradle')

View File

@ -1,54 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Just make sure the forbidden API rules are in sync between gradle and ant versions until
// we get rid of ant build.
def linesOf(FileTree ftree) {
return ftree.collectMany { path ->
path.readLines("UTF-8")
.collect { line -> line.trim() }
.findAll { line -> !line.startsWith("#") }
.unique()
.collect { line -> [path: path, line: line] }
}.groupBy { e -> e.line }
}
configure(rootProject) {
task verifyForbiddenApiRulesInSync() {
doFirst {
// Read all rules line by line from ant, gradle, remove comments, uniq.
// Rule sets should be identical.
def gradleRules = linesOf(fileTree("gradle/validation/forbidden-apis", { include "**/*.txt" }))
def antRules = linesOf(project(":lucene").fileTree("tools/forbiddenApis", { include "**/*.txt" }))
def antOnlyLines = antRules.keySet() - gradleRules.keySet()
def gradleOnlyLines = gradleRules.keySet() - antRules.keySet()
if (!gradleOnlyLines.isEmpty() || !antOnlyLines.isEmpty()) {
project.logger.log(LogLevel.ERROR, "The following rules don't have counterparts:\n" +
(gradleRules.findAll { gradleOnlyLines.contains(it.key) } + antRules.findAll { antOnlyLines.contains(it.key)})
.collectMany { it.value }
.join("\n"))
throw new GradleException("Forbidden APIs rules out of sync.")
}
}
}
check.dependsOn verifyForbiddenApiRulesInSync
}

View File

@ -1,26 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This file is not included but is kept in ant-compat so that cleanup can be done later
// Remove special handling of dependency checksum validation/ collection for Solr where
// transitive Lucene dependencies are sucked in to licenses/ folder. We can just copy
// Lucene licenses as a whole (they're joint projects after all).
//
// the hack is in 'jar-checks.gradle' under:
// def isSolr = project.path.startsWith(":solr")

View File

@ -1,223 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// For Lucene, a 'resolve' task that copies any (non-project) dependencies
// under lib/ folder.
configure(allprojects.findAll {project -> project.path.startsWith(":lucene") }) {
plugins.withType(JavaPlugin) {
configurations {
runtimeLibs {
extendsFrom runtimeElements
extendsFrom testRuntimeClasspath
}
}
task resolve(type: Sync) {
from({
return configurations.runtimeLibs.copyRecursive { dep ->
!(dep instanceof org.gradle.api.artifacts.ProjectDependency)
}
})
into 'lib'
}
}
}
// For Solr, a 'resolve' task is much more complex. There are three folders:
// lib/
// test-lib/
// lucene-libs/
//
// There doesn't seem to be one ideal set of rules on how these should be created, but
// I tried to imitate the current (master) logic present in ivy and ant files in this way:
//
// The "solr platform" set of dependencies is a union of all deps for (core, solrj, server).
//
// Then:
// lib - these are module's "own" dependencies, excluding Lucene's that are not present in the
// solr platform.
// lucene-libs - these are lucene modules declared as module's dependencies and not
// present in solr platform.
// test-lib/ - libs not present in solr platform and not included in solr:test-framework.
//
// None of these are really needed with gradle... they should be collected just in the distribution
// package, not at each project's level.
//
// Unfortunately this "resolution" process is also related to how the final Solr packaging is assembled.
// I don't know how to untie these two cleanly.
//
configure(allprojects.findAll {project -> project.path.startsWith(":solr:contrib") }) {
plugins.withType(JavaPlugin) {
ext {
packagingDir = file("${buildDir}/packaging")
deps = file("${packagingDir}/${project.name}")
}
configurations {
solrPlatformLibs
solrTestPlatformLibs
runtimeLibs {
extendsFrom runtimeElements
}
packaging
}
dependencies {
solrPlatformLibs project(":solr:core")
solrPlatformLibs project(":solr:solrj")
solrPlatformLibs project(":solr:server")
solrTestPlatformLibs project(":solr:test-framework")
}
// An aggregate that configures lib, lucene-libs and test-lib in a temporary location.
task assemblePackaging(type: Sync) {
from "README.txt"
from ({
def externalLibs = configurations.runtimeLibs.copyRecursive { dep ->
if (dep instanceof org.gradle.api.artifacts.ProjectDependency) {
return !dep.dependencyProject.path.startsWith(":solr")
} else {
return true
}
}
return externalLibs - configurations.solrPlatformLibs
}, {
exclude "lucene-*"
into "lib"
})
from ({
def projectLibs = configurations.runtimeLibs.copyRecursive { dep ->
(dep instanceof org.gradle.api.artifacts.ProjectDependency)
}
return projectLibs - configurations.solrPlatformLibs
}, {
include "lucene-*"
into "lucene-libs"
})
into deps
}
task syncLib(type: Sync) {
dependsOn assemblePackaging
from(file("${deps}/lib"), {
include "**"
})
into file("${projectDir}/lib")
}
task syncTestLib(type: Sync) {
// From test runtime classpath exclude:
// 1) project dependencies (and their dependencies)
// 2) runtime dependencies
// What remains is this module's "own" test dependency.
from({
def testRuntimeLibs = configurations.testRuntimeClasspath.copyRecursive { dep ->
!(dep instanceof org.gradle.api.artifacts.ProjectDependency)
}
return testRuntimeLibs - configurations.runtimeLibs - configurations.solrTestPlatformLibs
})
into file("${projectDir}/test-lib")
}
task resolve() {
dependsOn syncLib, syncTestLib
}
// Contrib packaging currently depends on internal resolve.
artifacts {
packaging packagingDir, {
builtBy assemblePackaging
}
}
}
}
configure(project(":solr:example")) {
evaluationDependsOn(":solr:example") // explicitly wait for other configs to be applied
task resolve(type: Copy) {
from(configurations.postJar, {
into "exampledocs/"
})
into projectDir
}
}
configure(project(":solr:server")) {
evaluationDependsOn(":solr:server")
task resolve(type: Copy) {
dependsOn assemblePackaging
from({ packagingDir }, {
include "**/*.jar"
include "solr-webapp/webapp/**"
includeEmptyDirs false
})
into projectDir
}
}
configure(project(":solr:core")) {
evaluationDependsOn(":solr:core")
configurations {
runtimeLibs {
extendsFrom runtimeElements
}
}
task resolve(type: Sync) {
from({
def ownDeps = configurations.runtimeLibs.copyRecursive { dep ->
if (dep instanceof org.gradle.api.artifacts.ProjectDependency) {
return !dep.dependencyProject.path.startsWith(":solr")
} else {
return true
}
}
return ownDeps
}, {
exclude "lucene-*"
})
into "lib"
}
}
configure(project(":solr:solrj")) {
evaluationDependsOn(":solr:solrj")
task resolve(type: Sync) {
from({ configurations.runtimeClasspath }, {
})
into "lib"
}
}

View File

@ -19,7 +19,7 @@
// Why does solr exclude these from forbidden API checks?
configure(project(":solr:core")) {
configure([forbiddenApisMain, forbiddenApisTest]) {
tasks.matching { it.name == "forbiddenApisMain" || it.name == "forbiddenApisTest" }.all {
exclude "org/apache/solr/internal/**"
exclude "org/apache/hadoop/**"
}

View File

@ -65,8 +65,9 @@ allprojects {
// Set up titles and link up some offline docs for all documentation
// (they may be unused but this doesn't do any harm).
def javaJavadocPackages = project.project(':lucene').file('tools/javadoc/java11/')
def junitJavadocPackages = project.project(':lucene').file('tools/javadoc/junit/')
def taskResources = "${rootProject.projectDir}/gradle/documentation/render-javadoc"
def javaJavadocPackages = rootProject.file("${taskResources}/java11/")
def junitJavadocPackages = rootProject.file("${taskResources}/junit/")
allprojects {
project.tasks.withType(RenderJavadocTask) {
title = "${project.path.startsWith(':lucene') ? 'Lucene' : 'Solr'} ${project.version} ${project.name} API"
@ -310,15 +311,19 @@ class RenderJavadocTask extends DefaultTask {
throw new GradleException("Javadoc generation failed for ${project.path},\n Options file at: ${optionsFile}\n Command output at: ${outputFile}", cause)
}
def taskResources = "${project.rootDir}/gradle/documentation/render-javadoc"
// append some special table css, prettify css
ant.concat(destfile: "${outputDir}/stylesheet.css", append: "true", fixlastline: "true", encoding: "UTF-8") {
filelist(dir: project.project(":lucene").file("tools/javadoc"), files: "table_padding.css")
filelist(dir: project.project(":lucene").file("tools/prettify"), files: "prettify.css")
filelist(dir: taskResources, files: "table_padding.css")
filelist(dir: project.file("${taskResources}/prettify"), files: "prettify.css")
}
// append prettify to scripts
ant.concat(destfile: "${outputDir}/script.js", append: "true", fixlastline: "true", encoding: "UTF-8") {
filelist(dir: project.project(':lucene').file("tools/prettify"), files: "prettify.js inject-javadocs.js")
filelist(dir: project.file("${taskResources}/prettify"), files: "prettify.js inject-javadocs.js")
}
ant.fixcrlf(srcdir: outputDir, includes: "stylesheet.css script.js", eol: "lf", fixlast: "true", encoding: "UTF-8")
}
}

View File

@ -71,7 +71,7 @@ allprojects {
jvmArgs Commandline.translateCommandline(propertyOrDefault("tests.jvmargs", System.getenv('TEST_JVM_ARGS') ?: "-XX:TieredStopAtLevel=1"))
systemProperty 'java.util.logging.config.file', file("${commonDir}/tools/junit4/logging.properties")
systemProperty 'java.util.logging.config.file', rootProject.file("gradle/testing/defaults-tests/logging.properties")
systemProperty 'java.awt.headless', 'true'
systemProperty 'jdk.map.althashing.threshold', '0'

View File

@ -178,10 +178,10 @@ allprojects {
if (Boolean.parseBoolean(testOptionsResolved["tests.useSecurityManager"])) {
if (project.path == ":lucene:replicator") {
systemProperty 'java.security.manager', "org.apache.lucene.util.TestSecurityManager"
systemProperty 'java.security.policy', rootProject.file("lucene/tools/junit4/replicator-tests.policy")
systemProperty 'java.security.policy', rootProject.file("gradle/testing/policies/replicator-tests.policy")
} else if (project.path.startsWith(":lucene")) {
systemProperty 'java.security.manager', "org.apache.lucene.util.TestSecurityManager"
systemProperty 'java.security.policy', rootProject.file("lucene/tools/junit4/tests.policy")
systemProperty 'java.security.policy', rootProject.file("gradle/testing/policies/tests.policy")
} else {
systemProperty 'common-solr.dir', commonSolrDir
systemProperty 'java.security.manager', "org.apache.lucene.util.TestSecurityManager"

View File

@ -69,7 +69,7 @@ allprojects {
args += [ "-proc:none" ]
args += [ "-nowarn" ]
args += [ "-enableJavadoc" ]
args += [ "-properties", project(":lucene").file("tools/javadoc/ecj.javadocs.prefs").absolutePath ]
args += [ "-properties", rootProject.file("gradle/validation/ecj-lint/ecj.javadocs.prefs").absolutePath ]
doFirst {
tmpDst.mkdirs()

View File

@ -1,4 +1,4 @@
/*
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.

View File

@ -15,8 +15,6 @@
* limitations under the License.
*/
// Equivalent of ant's "validate-source-patterns".
// This should be eventually rewritten in plain gradle. For now, delegate to
// the ant/groovy script we already have.
@ -37,7 +35,7 @@ configure(rootProject) {
classname: "org.codehaus.groovy.ant.Groovy",
classpath: configurations.checkSourceDeps.asPath)
ant.groovy(src: project(":lucene").file("tools/src/groovy/check-source-patterns.groovy"))
ant.groovy(src: rootProject.file("gradle/validation/validate-source-patterns/check-source-patterns.groovy"))
}
}
}

View File

@ -193,7 +193,7 @@ public class TestVersion extends LuceneTestCase {
String commonBuildVersion = System.getProperty("tests.LUCENE_VERSION");
assumeTrue("Null 'tests.LUCENE_VERSION' test property. You should run the tests with the official Lucene build file",
commonBuildVersion != null);
assertEquals("Version.LATEST does not match the one given in common-build.xml",
assertEquals("Version.LATEST does not match the one given in tests.LUCENE_VERSION property",
Version.LATEST.toString(), commonBuildVersion);
}

View File

@ -1,64 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
@defaultMessage Spawns threads with vague names; use a custom thread factory (Lucene's NamedThreadFactory, Solr's SolrNamedThreadFactory) and name threads so that you can tell (by its name) which executor it is associated with
java.util.concurrent.Executors#newFixedThreadPool(int)
java.util.concurrent.Executors#newSingleThreadExecutor()
java.util.concurrent.Executors#newCachedThreadPool()
java.util.concurrent.Executors#newSingleThreadScheduledExecutor()
java.util.concurrent.Executors#newScheduledThreadPool(int)
java.util.concurrent.Executors#defaultThreadFactory()
java.util.concurrent.Executors#privilegedThreadFactory()
@defaultMessage Properties files should be read/written with Reader/Writer, using UTF-8 charset. This allows reading older files with unicode escapes, too.
java.util.Properties#load(java.io.InputStream)
java.util.Properties#save(java.io.OutputStream,java.lang.String)
java.util.Properties#store(java.io.OutputStream,java.lang.String)
@defaultMessage The context classloader should never be used for resource lookups, unless there is a 3rd party library that needs it. Always pass a classloader down as method parameters.
java.lang.Thread#getContextClassLoader()
java.lang.Thread#setContextClassLoader(java.lang.ClassLoader)
java.lang.Character#codePointBefore(char[],int) @ Implicit start offset is error-prone when the char[] is a buffer and the first chars are random chars
java.lang.Character#codePointAt(char[],int) @ Implicit end offset is error-prone when the char[] is a buffer and the last chars are random chars
java.io.File#delete() @ use Files.delete for real exception, IOUtils.deleteFilesIgnoringExceptions if you dont care
java.util.Collections#shuffle(java.util.List) @ Use shuffle(List, Random) instead so that it can be reproduced
java.util.Locale#forLanguageTag(java.lang.String) @ use new Locale.Builder().setLanguageTag(...).build() which has error handling
java.util.Locale#toString() @ use Locale#toLanguageTag() for a standardized BCP47 locale name
@defaultMessage Constructors for wrapper classes of Java primitives should be avoided in favor of the public static methods available or autoboxing
java.lang.Integer#<init>(int)
java.lang.Integer#<init>(java.lang.String)
java.lang.Byte#<init>(byte)
java.lang.Byte#<init>(java.lang.String)
java.lang.Short#<init>(short)
java.lang.Short#<init>(java.lang.String)
java.lang.Long#<init>(long)
java.lang.Long#<init>(java.lang.String)
java.lang.Boolean#<init>(boolean)
java.lang.Boolean#<init>(java.lang.String)
java.lang.Character#<init>(char)
java.lang.Float#<init>(float)
java.lang.Float#<init>(double)
java.lang.Float#<init>(java.lang.String)
java.lang.Double#<init>(double)
java.lang.Double#<init>(java.lang.String)
@defaultMessage Java deserialization is unsafe when the data is untrusted. The java developer is powerless: no checks or casts help, exploitation can happen in places such as clinit or finalize!
java.io.ObjectInputStream
java.io.ObjectOutputStream

View File

@ -1,49 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
@defaultMessage Use NIO.2 instead
java.io.File
java.io.FileInputStream
java.io.FileOutputStream
java.io.PrintStream#<init>(java.lang.String,java.lang.String)
java.io.PrintWriter#<init>(java.lang.String,java.lang.String)
java.util.Formatter#<init>(java.lang.String,java.lang.String,java.util.Locale)
java.io.RandomAccessFile
java.nio.file.Path#toFile()
java.util.jar.JarFile
java.util.zip.ZipFile
@defaultMessage Prefer using ArrayUtil as Arrays#copyOfRange fills zeros for bad bounds
java.util.Arrays#copyOfRange(byte[],int,int)
java.util.Arrays#copyOfRange(char[],int,int)
java.util.Arrays#copyOfRange(short[],int,int)
java.util.Arrays#copyOfRange(int[],int,int)
java.util.Arrays#copyOfRange(long[],int,int)
java.util.Arrays#copyOfRange(float[],int,int)
java.util.Arrays#copyOfRange(double[],int,int)
java.util.Arrays#copyOfRange(boolean[],int,int)
java.util.Arrays#copyOfRange(java.lang.Object[],int,int)
java.util.Arrays#copyOfRange(java.lang.Object[],int,int,java.lang.Class)
@defaultMessage Prefer using ArrayUtil as Arrays#copyOf fills zeros for bad bounds
java.util.Arrays#copyOf(byte[],int)
java.util.Arrays#copyOf(char[],int)
java.util.Arrays#copyOf(short[],int)
java.util.Arrays#copyOf(int[],int)
java.util.Arrays#copyOf(long[],int)
java.util.Arrays#copyOf(float[],int)
java.util.Arrays#copyOf(double[],int)
java.util.Arrays#copyOf(boolean[],int)
java.util.Arrays#copyOf(java.lang.Object[],int)
java.util.Arrays#copyOf(java.lang.Object[],int,java.lang.Class)

View File

@ -1,43 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
@defaultMessage Servlet API method is parsing request parameters without using the correct encoding if no extra configuration is given in the servlet container
javax.servlet.ServletRequest#getParameter(java.lang.String)
javax.servlet.ServletRequest#getParameterMap()
javax.servlet.ServletRequest#getParameterNames()
javax.servlet.ServletRequest#getParameterValues(java.lang.String)
javax.servlet.http.HttpServletRequest#getSession() @ Servlet API getter has side effect of creating sessions
@defaultMessage Servlet API method is broken and slow in some environments (e.g., Jetty's UTF-8 readers)
javax.servlet.ServletRequest#getReader()
javax.servlet.ServletResponse#getWriter()
javax.servlet.ServletInputStream#readLine(byte[],int,int)
javax.servlet.ServletOutputStream#print(boolean)
javax.servlet.ServletOutputStream#print(char)
javax.servlet.ServletOutputStream#print(double)
javax.servlet.ServletOutputStream#print(float)
javax.servlet.ServletOutputStream#print(int)
javax.servlet.ServletOutputStream#print(long)
javax.servlet.ServletOutputStream#print(java.lang.String)
javax.servlet.ServletOutputStream#println(boolean)
javax.servlet.ServletOutputStream#println(char)
javax.servlet.ServletOutputStream#println(double)
javax.servlet.ServletOutputStream#println(float)
javax.servlet.ServletOutputStream#println(int)
javax.servlet.ServletOutputStream#println(long)
javax.servlet.ServletOutputStream#println(java.lang.String)

View File

@ -1,61 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
@defaultMessage Spawns threads without MDC logging context; use ExecutorUtil.newMDCAwareFixedThreadPool instead
java.util.concurrent.Executors#newFixedThreadPool(int,java.util.concurrent.ThreadFactory)
@defaultMessage Spawns threads without MDC logging context; use ExecutorUtil.newMDCAwareSingleThreadExecutor instead
java.util.concurrent.Executors#newSingleThreadExecutor(java.util.concurrent.ThreadFactory)
@defaultMessage Spawns threads without MDC logging context; use ExecutorUtil.newMDCAwareCachedThreadPool instead
java.util.concurrent.Executors#newCachedThreadPool(java.util.concurrent.ThreadFactory)
@defaultMessage Use ExecutorUtil.MDCAwareThreadPoolExecutor instead of ThreadPoolExecutor
java.util.concurrent.ThreadPoolExecutor#<init>(int,int,long,java.util.concurrent.TimeUnit,java.util.concurrent.BlockingQueue,java.util.concurrent.ThreadFactory,java.util.concurrent.RejectedExecutionHandler)
java.util.concurrent.ThreadPoolExecutor#<init>(int,int,long,java.util.concurrent.TimeUnit,java.util.concurrent.BlockingQueue)
java.util.concurrent.ThreadPoolExecutor#<init>(int,int,long,java.util.concurrent.TimeUnit,java.util.concurrent.BlockingQueue,java.util.concurrent.ThreadFactory)
java.util.concurrent.ThreadPoolExecutor#<init>(int,int,long,java.util.concurrent.TimeUnit,java.util.concurrent.BlockingQueue,java.util.concurrent.RejectedExecutionHandler)
@defaultMessage Use slf4j classes instead
org.apache.log4j.**
org.apache.logging.log4j.**
java.util.logging.**
@defaultMessage Use RTimer/TimeOut/System.nanoTime for time comparisons, and `new Date()` output/debugging/stats of timestamps. If for some miscellaneous reason, you absolutely need to use this, use a SuppressForbidden.
java.lang.System#currentTimeMillis()
@defaultMessage Use corresponding Java 8 functional/streaming interfaces
com.google.common.base.Function
com.google.common.base.Joiner
com.google.common.base.Predicate
com.google.common.base.Supplier
@defaultMessage Use java.nio.charset.StandardCharsets instead
com.google.common.base.Charsets
org.apache.commons.codec.Charsets
@defaultMessage Use methods in java.util.Objects instead
com.google.common.base.Objects#equal(java.lang.Object,java.lang.Object)
com.google.common.base.Objects#hashCode(java.lang.Object[])
com.google.common.base.Preconditions#checkNotNull(java.lang.Object)
com.google.common.base.Preconditions#checkNotNull(java.lang.Object,java.lang.Object)
@defaultMessage Use methods in java.util.Comparator instead
com.google.common.collect.Ordering
@defaultMessage Use org.apache.solr.common.annotation.JsonProperty instead
com.fasterxml.jackson.annotation.JsonProperty

View File

@ -1,29 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
junit.framework.TestCase @ All classes should derive from LuceneTestCase
java.util.Random#<init>() @ Use RandomizedRunner's random() instead
java.lang.Math#random() @ Use RandomizedRunner's random().nextDouble() instead
# TODO: fix tests that do this!
#java.lang.System#currentTimeMillis() @ Don't depend on wall clock times
#java.lang.System#nanoTime() @ Don't depend on wall clock times
com.carrotsearch.randomizedtesting.annotations.Seed @ Don't commit hardcoded seeds
@defaultMessage Use LuceneTestCase.collate instead, which can avoid JDK-8071862
java.text.Collator#compare(java.lang.Object,java.lang.Object)
java.text.Collator#compare(java.lang.String,java.lang.String)

View File

@ -1,61 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Task script that is called by Ant's build.xml file:
* Checks GIT working copy for unversioned or modified files.
*/
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.Status;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
import org.eclipse.jgit.errors.*;
def setProjectPropertyFromSet = { prop, set ->
if (set) {
properties[prop] = '* ' + set.join(properties['line.separator'] + '* ');
}
};
try {
task.log('Initializing working copy...', Project.MSG_INFO);
final Repository repository = new FileRepositoryBuilder()
.setWorkTree(project.getBaseDir())
.setMustExist(true)
.build();
task.log('Checking working copy status...', Project.MSG_INFO);
final Status status = new Git(repository).status().call();
if (!status.isClean()) {
final SortedSet unversioned = new TreeSet(), modified = new TreeSet();
status.properties.each{ prop, val ->
if (val instanceof Set) {
if (prop in ['untracked', 'untrackedFolders', 'missing']) {
unversioned.addAll(val);
} else if (prop != 'ignoredNotInIndex') {
modified.addAll(val);
}
}
};
setProjectPropertyFromSet('wc.unversioned.files', unversioned);
setProjectPropertyFromSet('wc.modified.files', modified);
}
} catch (RepositoryNotFoundException | NoWorkTreeException | NotSupportedException e) {
task.log('WARNING: Development directory is not a valid GIT checkout! Disabling checks...', Project.MSG_WARN);
}

View File

@ -1,61 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Task script that is called by Ant's common-build.xml file:
* Installs markdown filter into Ant.
*/
import org.apache.tools.ant.AntTypeDefinition;
import org.apache.tools.ant.ComponentHelper;
import org.apache.tools.ant.filters.TokenFilter.ChainableReaderFilter;
import com.vladsch.flexmark.util.ast.Document;
import com.vladsch.flexmark.ast.Heading;
import com.vladsch.flexmark.html.HtmlRenderer;
import com.vladsch.flexmark.parser.Parser;
import com.vladsch.flexmark.parser.ParserEmulationProfile;
import com.vladsch.flexmark.util.html.Escaping;
import com.vladsch.flexmark.util.options.MutableDataSet;
import com.vladsch.flexmark.ext.abbreviation.AbbreviationExtension;
import com.vladsch.flexmark.ext.autolink.AutolinkExtension;
public final class MarkdownFilter extends ChainableReaderFilter {
@Override
public String filter(String markdownSource) {
MutableDataSet options = new MutableDataSet();
options.setFrom(ParserEmulationProfile.MARKDOWN);
options.set(Parser.EXTENSIONS, [ AbbreviationExtension.create(), AutolinkExtension.create() ]);
options.set(HtmlRenderer.RENDER_HEADER_ID, true);
options.set(HtmlRenderer.MAX_TRAILING_BLANK_LINES, 0);
Document parsed = Parser.builder(options).build().parse(markdownSource);
StringBuilder html = new StringBuilder('<html>\n<head>\n');
CharSequence title = parsed.getFirstChildAny(Heading.class)?.getText();
if (title != null) {
html.append('<title>').append(Escaping.escapeHtml(title, false)).append('</title>\n');
}
html.append('<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">\n')
.append('</head>\n<body>\n');
HtmlRenderer.builder(options).build().render(parsed, html);
html.append('</body>\n</html>\n');
return html;
}
}
AntTypeDefinition t = new AntTypeDefinition();
t.setName('markdownfilter');
t.setClass(MarkdownFilter.class);
ComponentHelper.getComponentHelper(project).addDataTypeDefinition(t);

View File

@ -1,121 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Task script that is called by Ant's common-build.xml file:
* Runs test beaster.
*/
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.BuildLogger;
import org.apache.tools.ant.Project;
static boolean logFailOutput(Object task, String outdir) {
def logFile = new File(outdir, "tests-failures.txt");
if (logFile.exists()) {
logFile.eachLine("UTF-8", { line ->
task.log(line, Project.MSG_ERR);
});
}
}
int iters = (properties['beast.iters'] ?: '1') as int;
if (iters <= 1) {
throw new BuildException("Please give -Dbeast.iters with an int value > 1.");
}
def antcall = project.createTask('antcallback');
def junitOutDir = properties["junit.output.dir"];
def failed = false;
(1..iters).each { i ->
def outdir = junitOutDir + "/" + i;
task.log('Beast round ' + i + " results: " + outdir, Project.MSG_INFO);
try {
// disable verbose build logging:
project.buildListeners.each { listener ->
if (listener instanceof BuildLogger) {
listener.messageOutputLevel = Project.MSG_WARN;
}
};
new File(outdir).mkdirs();
properties["junit.output.dir"] = outdir;
antcall.setReturn("tests.failed");
antcall.setTarget("-test");
antcall.setInheritAll(true);
antcall.setInheritRefs(true);
antcall.with {
createParam().with {
name = "tests.isbeasting";
value = "true";
};
createParam().with {
name = "tests.timeoutSuite";
value = "900000";
};
createParam().with {
name = "junit.output.dir";
value = outdir;
};
};
properties["junit.output.dir"] = outdir;
antcall.execute();
def antcallResult = project.properties.'tests.failed' as boolean;
if (antcallResult) {
failed = true;
logFailOutput(task, outdir)
}
} catch (BuildException be) {
task.log(be.getMessage(), Project.MSG_ERR);
logFailOutput(task, outdir)
throw be;
} finally {
// restore build logging (unfortunately there is no way to get the original logging level (write-only property):
project.buildListeners.each { listener ->
if (listener instanceof BuildLogger) {
listener.messageOutputLevel = Project.MSG_INFO;
}
};
}
};
// restore junit output dir
properties["junit.output.dir"] = junitOutDir;
if (failed) {
task.log('Beasting finished with failure.', Project.MSG_INFO);
throw new BuildException("Beasting Failed!");
} else {
task.log('Beasting finished Successfully.', Project.MSG_INFO);
}

View File

@ -1,50 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Task script that is called by Ant's build.xml file:
* Runs maven build from within Ant after creating POMs.
*/
import groovy.xml.NamespaceBuilder;
import org.apache.tools.ant.Project;
def userHome = properties['user.home'], commonDir = properties['common.dir'];
def propPrefix = '-mvn.inject.'; int propPrefixLen = propPrefix.length();
def subProject = project.createSubProject();
project.copyUserProperties(subProject);
subProject.initProperties();
new AntBuilder(subProject).sequential{
property(file: userHome+'/lucene.build.properties', prefix: propPrefix);
property(file: userHome+'/build.properties', prefix: propPrefix);
property(file: commonDir+'/build.properties', prefix: propPrefix);
};
def cmdlineProps = subProject.properties
.findAll{ k, v -> k.startsWith(propPrefix) }
.collectEntries{ k, v -> [k.substring(propPrefixLen), v] };
cmdlineProps << project.userProperties.findAll{ k, v -> !k.startsWith('ant.') };
def artifact = NamespaceBuilder.newInstance(ant, 'antlib:org.apache.maven.artifact.ant');
task.log('Running Maven with props: ' + cmdlineProps.toString(), Project.MSG_INFO);
artifact.mvn(pom: properties['maven-build-dir']+'/pom.xml', mavenVersion: properties['maven-version'], failonerror: true, fork: true) {
sysproperty(key: 'maven.multiModuleProjectDirectory', file: properties['maven-build-dir'])
cmdlineProps.each{ k, v -> arg(value: '-D' + k + '=' + v) };
arg(value: '-fae');
arg(value: 'install');
};

View File

@ -1,920 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.dependencies;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.Resource;
import org.apache.tools.ant.types.ResourceCollection;
import org.apache.tools.ant.types.resources.FileResource;
import org.apache.tools.ant.types.resources.Resources;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
/**
* An Ant task to generate a properties file containing maven dependency
* declarations, used to filter the maven POMs when copying them to
* maven-build/ via 'ant get-maven-poms', or to lucene/build/poms/
* via the '-filter-maven-poms' target, which is called from the
* 'generate-maven-artifacts' target.
*/
public class GetMavenDependenciesTask extends Task {
private static final Pattern PROPERTY_PREFIX_FROM_IVY_XML_FILE_PATTERN = Pattern.compile
("[/\\\\](lucene|solr)[/\\\\](?:(?:contrib|(analysis)|(example)|(server))[/\\\\])?([^/\\\\]+)[/\\\\]ivy\\.xml");
private static final Pattern COORDINATE_KEY_PATTERN = Pattern.compile("/([^/]+)/([^/]+)");
private static final Pattern MODULE_DEPENDENCIES_COORDINATE_KEY_PATTERN
= Pattern.compile("(.*?)(\\.test)?\\.dependencies");
// lucene/build/core/classes/java
private static final Pattern COMPILATION_OUTPUT_DIRECTORY_PATTERN
= Pattern.compile("(lucene|solr)/build/(?:contrib/)?(.*)/classes/(?:java|test)");
private static final String UNWANTED_INTERNAL_DEPENDENCIES
= "/(?:test-)?lib/|test-framework/classes/java|/test-files|/resources";
private static final Pattern SHARED_EXTERNAL_DEPENDENCIES_PATTERN
= Pattern.compile("((?:solr|lucene)/(?!test-framework).*)/((?:test-)?)lib/");
private static final String DEPENDENCY_MANAGEMENT_PROPERTY = "lucene.solr.dependency.management";
private static final String IVY_USER_DIR_PROPERTY = "ivy.default.ivy.user.dir";
private static final Properties allProperties = new Properties();
private static final Set<String> modulesWithSeparateCompileAndTestPOMs = new HashSet<>();
private static final Set<String> globalOptionalExternalDependencies = new HashSet<>();
private static final Map<String,Set<String>> perModuleOptionalExternalDependencies = new HashMap<>();
private static final Set<String> modulesWithTransitiveDependencies = new HashSet<>();
static {
// Add modules here that have split compile and test POMs
// - they need compile-scope deps to also be test-scope deps.
modulesWithSeparateCompileAndTestPOMs.addAll
(Arrays.asList("lucene-core", "lucene-codecs", "solr-core", "solr-solrj"));
// Add external dependencies here that should be optional for all modules
// (i.e., not invoke Maven's transitive dependency mechanism).
// Format is "groupId:artifactId"
globalOptionalExternalDependencies.addAll(Arrays.asList
("org.slf4j:jul-to-slf4j", "org.slf4j:slf4j-log4j12"));
// Add modules here that should NOT have their dependencies
// excluded in the grandparent POM's dependencyManagement section,
// thus enabling their dependencies to be transitive.
modulesWithTransitiveDependencies.addAll(Arrays.asList("lucene-test-framework"));
}
private final XPath xpath = XPathFactory.newInstance().newXPath();
private final SortedMap<String,SortedSet<String>> internalCompileScopeDependencies
= new TreeMap<>();
private final Set<String> nonJarDependencies = new HashSet<>();
private final Map<String,Set<String>> dependencyClassifiers = new HashMap<>();
private final Map<String,Set<String>> interModuleExternalCompileScopeDependencies = new HashMap<>();
private final Map<String,Set<String>> interModuleExternalTestScopeDependencies = new HashMap<>();
private final Map<String,SortedSet<ExternalDependency>> allExternalDependencies
= new HashMap<>();
private final DocumentBuilder documentBuilder;
private File ivyCacheDir;
private Pattern internalJarPattern;
private Map<String,String> ivyModuleInfo;
/**
* All ivy.xml files to get external dependencies from.
*/
private Resources ivyXmlResources = new Resources();
/**
* Centralized Ivy versions properties file
*/
private File centralizedVersionsFile;
/**
* Module dependencies properties file, generated by task -append-module-dependencies-properties.
*/
private File moduleDependenciesPropertiesFile;
/**
* Where all properties are written, to be used to filter POM templates when copying them.
*/
private File mavenDependenciesFiltersFile;
/**
* A logging level associated with verbose logging.
*/
private int verboseLevel = Project.MSG_VERBOSE;
/**
* Adds a set of ivy.xml resources to check.
*/
public void add(ResourceCollection rc) {
ivyXmlResources.add(rc);
}
public void setVerbose(boolean verbose) {
verboseLevel = (verbose ? Project.MSG_VERBOSE : Project.MSG_INFO);
}
public void setCentralizedVersionsFile(File file) {
centralizedVersionsFile = file;
}
public void setModuleDependenciesPropertiesFile(File file) {
moduleDependenciesPropertiesFile = file;
}
public void setMavenDependenciesFiltersFile(File file) {
mavenDependenciesFiltersFile = file;
}
public GetMavenDependenciesTask() {
try {
documentBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
} catch (ParserConfigurationException e) {
throw new BuildException(e);
}
}
/**
* Collect dependency information from Ant build.xml and ivy.xml files
* and from ivy-versions.properties, then write out an Ant filters file
* to be used when copying POMs.
*/
@Override
public void execute() throws BuildException {
// Local: lucene/build/analysis/common/lucene-analyzers-common-5.0-SNAPSHOT.jar
// Jenkins: lucene/build/analysis/common/lucene-analyzers-common-5.0-2013-10-31_18-52-24.jar
// Also support any custom version, which won't necessarily conform to any predefined pattern.
internalJarPattern = Pattern.compile(".*(lucene|solr)([^/]*?)-"
+ Pattern.quote(getProject().getProperty("version")) + "\\.jar");
ivyModuleInfo = getIvyModuleInfo(ivyXmlResources, documentBuilder, xpath);
setInternalDependencyProperties(); // side-effect: all modules' internal deps are recorded
setExternalDependencyProperties(); // side-effect: all modules' external deps are recorded
setGrandparentDependencyManagementProperty(); // uses deps recorded in above two methods
writeFiltersFile();
}
/**
* Write out an Ant filters file to be used when copying POMs.
*/
private void writeFiltersFile() {
Writer writer = null;
try {
FileOutputStream outputStream = new FileOutputStream(mavenDependenciesFiltersFile);
writer = new OutputStreamWriter(outputStream, StandardCharsets.ISO_8859_1);
allProperties.store(writer, null);
} catch (FileNotFoundException e) {
throw new BuildException("Can't find file: '" + mavenDependenciesFiltersFile.getPath() + "'", e);
} catch (IOException e) {
throw new BuildException("Exception writing out '" + mavenDependenciesFiltersFile.getPath() + "'", e);
} finally {
if (null != writer) {
try {
writer.close();
} catch (IOException e) {
// ignore
}
}
}
}
/**
* Visits all ivy.xml files and collects module and organisation attributes into a map.
*/
private static Map<String,String> getIvyModuleInfo(Resources ivyXmlResources,
DocumentBuilder documentBuilder, XPath xpath) {
Map<String,String> ivyInfoModuleToOrganisation = new HashMap<String,String>();
traverseIvyXmlResources(ivyXmlResources, new Consumer<File>() {
@Override
public void accept(File f) {
try {
Document document = documentBuilder.parse(f);
{
String infoPath = "/ivy-module/info";
NodeList infos = (NodeList)xpath.evaluate(infoPath, document, XPathConstants.NODESET);
for (int infoNum = 0 ; infoNum < infos.getLength() ; ++infoNum) {
Element infoElement = (Element)infos.item(infoNum);
String infoOrg = infoElement.getAttribute("organisation");
String infoOrgSuffix = infoOrg.substring(infoOrg.lastIndexOf('.')+1);
String infoModule = infoElement.getAttribute("module");
String module = infoOrgSuffix+"-"+infoModule;
ivyInfoModuleToOrganisation.put(module, infoOrg);
}
}
} catch (XPathExpressionException | IOException | SAXException e) {
throw new RuntimeException(e);
}
}
});
return ivyInfoModuleToOrganisation;
}
/**
* Collects external dependencies from each ivy.xml file and sets
* external dependency properties to be inserted into modules' POMs.
*/
private void setExternalDependencyProperties() {
traverseIvyXmlResources(ivyXmlResources, new Consumer<File>() {
@Override
public void accept(File f) {
try {
collectExternalDependenciesFromIvyXmlFile(f);
} catch (XPathExpressionException | IOException | SAXException e) {
throw new RuntimeException(e);
}
}
});
addSharedExternalDependencies();
setExternalDependencyXmlProperties();
}
private static void traverseIvyXmlResources(Resources ivyXmlResources, Consumer<File> ivyXmlFileConsumer) {
@SuppressWarnings("unchecked")
Iterator<Resource> iter = (Iterator<Resource>)ivyXmlResources.iterator();
while (iter.hasNext()) {
final Resource resource = iter.next();
if ( ! resource.isExists()) {
throw new BuildException("Resource does not exist: " + resource.getName());
}
if ( ! (resource instanceof FileResource)) {
throw new BuildException("Only filesystem resources are supported: "
+ resource.getName() + ", was: " + resource.getClass().getName());
}
File ivyXmlFile = ((FileResource)resource).getFile();
try {
ivyXmlFileConsumer.accept(ivyXmlFile);
} catch (BuildException e) {
throw e;
} catch (Exception e) {
throw new BuildException("Exception reading file " + ivyXmlFile.getPath() + ": " + e, e);
}
}
}
/**
* For each module that includes other modules' external dependencies via
* including all files under their ".../lib/" dirs in their (test.)classpath,
* add the other modules' dependencies to its set of external dependencies.
*/
private void addSharedExternalDependencies() {
// Delay adding shared compile-scope dependencies until after all have been processed,
// so dependency sharing is limited to a depth of one.
Map<String,SortedSet<ExternalDependency>> sharedDependencies = new HashMap<>();
for (Map.Entry<String, Set<String>> entry : interModuleExternalCompileScopeDependencies.entrySet()) {
TreeSet<ExternalDependency> deps = new TreeSet<>();
sharedDependencies.put(entry.getKey(), deps);
Set<String> moduleDependencies = entry.getValue();
if (null != moduleDependencies) {
for (String otherArtifactId : moduleDependencies) {
SortedSet<ExternalDependency> otherExtDeps = allExternalDependencies.get(otherArtifactId);
if (null != otherExtDeps) {
for (ExternalDependency otherDep : otherExtDeps) {
if ( ! otherDep.isTestDependency) {
deps.add(otherDep);
}
}
}
}
}
}
for (Map.Entry<String, Set<String>> entry : interModuleExternalTestScopeDependencies.entrySet()) {
String module = entry.getKey();
SortedSet<ExternalDependency> deps = sharedDependencies.get(module);
if (null == deps) {
deps = new TreeSet<>();
sharedDependencies.put(module, deps);
}
Set<String> moduleDependencies = entry.getValue();
if (null != moduleDependencies) {
for (String otherArtifactId : moduleDependencies) {
int testScopePos = otherArtifactId.indexOf(":test");
boolean isTestScope = false;
if (-1 != testScopePos) {
otherArtifactId = otherArtifactId.substring(0, testScopePos);
isTestScope = true;
}
SortedSet<ExternalDependency> otherExtDeps = allExternalDependencies.get(otherArtifactId);
if (null != otherExtDeps) {
for (ExternalDependency otherDep : otherExtDeps) {
if (otherDep.isTestDependency == isTestScope) {
if ( ! deps.contains(otherDep)
&& ( null == allExternalDependencies.get(module)
|| ! allExternalDependencies.get(module).contains(otherDep))) {
// Add test-scope clone only if it's not already a compile-scope dependency.
ExternalDependency otherDepTestScope = new ExternalDependency
(otherDep.groupId, otherDep.artifactId, otherDep.classifier, true, otherDep.isOptional);
deps.add(otherDepTestScope);
}
}
}
}
}
}
}
for (Map.Entry<String, SortedSet<ExternalDependency>> entry : sharedDependencies.entrySet()) {
String module = entry.getKey();
SortedSet<ExternalDependency> deps = allExternalDependencies.get(module);
if (null == deps) {
deps = new TreeSet<>();
allExternalDependencies.put(module, deps);
}
for (ExternalDependency dep : entry.getValue()) {
String dependencyCoordinate = dep.groupId + ":" + dep.artifactId;
if (globalOptionalExternalDependencies.contains(dependencyCoordinate)
|| (perModuleOptionalExternalDependencies.containsKey(module)
&& perModuleOptionalExternalDependencies.get(module).contains(dependencyCoordinate))) {
// make a copy of the dep and set optional=true
dep = new ExternalDependency(dep.groupId, dep.artifactId, dep.classifier, dep.isTestDependency, true);
}
deps.add(dep);
}
}
}
/**
* For each module, sets a compile-scope and a test-scope property
* with values that contain the appropriate &lt;dependency&gt;
* snippets.
*/
private void setExternalDependencyXmlProperties() {
for (String module : internalCompileScopeDependencies.keySet()) { // get full module list
StringBuilder compileScopeBuilder = new StringBuilder();
StringBuilder testScopeBuilder = new StringBuilder();
SortedSet<ExternalDependency> extDeps = allExternalDependencies.get(module);
if (null != extDeps) {
for (ExternalDependency dep : extDeps) {
StringBuilder builder = dep.isTestDependency ? testScopeBuilder : compileScopeBuilder;
appendDependencyXml(builder, dep.groupId, dep.artifactId, " ", null,
dep.isTestDependency, dep.isOptional, dep.classifier, null);
// Test POMs for solrj, solr-core, lucene-codecs and lucene-core modules
// need to include all compile-scope dependencies as test-scope dependencies
// since we've turned off transitive dependency resolution.
if ( ! dep.isTestDependency && modulesWithSeparateCompileAndTestPOMs.contains(module)) {
appendDependencyXml(testScopeBuilder, dep.groupId, dep.artifactId, " ", null,
true, dep.isOptional, dep.classifier, null);
}
}
}
if (compileScopeBuilder.length() > 0) {
compileScopeBuilder.setLength(compileScopeBuilder.length() - 1); // drop trailing newline
}
if (testScopeBuilder.length() > 0) {
testScopeBuilder.setLength(testScopeBuilder.length() - 1); // drop trailing newline
}
allProperties.setProperty(module + ".external.dependencies", compileScopeBuilder.toString());
allProperties.setProperty(module + ".external.test.dependencies", testScopeBuilder.toString());
}
}
/**
* Sets the property to be inserted into the grandparent POM's
* &lt;dependencyManagement&gt; section.
*/
private void setGrandparentDependencyManagementProperty() {
StringBuilder builder = new StringBuilder();
appendAllInternalDependencies(builder);
Map<String,String> versionsMap = new HashMap<>();
appendAllExternalDependencies(builder, versionsMap);
builder.setLength(builder.length() - 1); // drop trailing newline
allProperties.setProperty(DEPENDENCY_MANAGEMENT_PROPERTY, builder.toString());
for (Map.Entry<String,String> entry : versionsMap.entrySet()) {
allProperties.setProperty(entry.getKey(), entry.getValue());
}
}
/**
* For each artifact in the project, append a dependency with version
* ${project.version} to the grandparent POM's &lt;dependencyManagement&gt;
* section. An &lt;exclusion&gt; is added for each of the artifact's
* dependencies.
*/
private void appendAllInternalDependencies(StringBuilder builder) {
for (Map.Entry<String, SortedSet<String>> entry : internalCompileScopeDependencies.entrySet()) {
String artifactId = entry.getKey();
List<String> exclusions = new ArrayList<>(entry.getValue());
SortedSet<ExternalDependency> extDeps = allExternalDependencies.get(artifactId);
if (null != extDeps) {
for (ExternalDependency externalDependency : extDeps) {
if ( ! externalDependency.isTestDependency && ! externalDependency.isOptional) {
exclusions.add(externalDependency.groupId + ':' + externalDependency.artifactId);
}
}
}
String groupId = ivyModuleInfo.get(artifactId);
appendDependencyXml(builder, groupId, artifactId, " ", "${project.version}", false, false, null, exclusions);
}
}
/**
* Sets the ivyCacheDir field, to either the ${ivy.default.ivy.user.dir}
* property, or if that's not set, to the default ~/.ivy2/.
*/
private File getIvyCacheDir() {
String ivyUserDirName = getProject().getUserProperty(IVY_USER_DIR_PROPERTY);
if (null == ivyUserDirName) {
ivyUserDirName = getProject().getProperty(IVY_USER_DIR_PROPERTY);
if (null == ivyUserDirName) {
ivyUserDirName = System.getProperty("user.home") + System.getProperty("file.separator") + ".ivy2";
}
}
File ivyUserDir = new File(ivyUserDirName);
if ( ! ivyUserDir.exists()) {
throw new BuildException("Ivy user dir does not exist: '" + ivyUserDir.getPath() + "'");
}
File dir = new File(ivyUserDir, "cache");
if ( ! dir.exists()) {
throw new BuildException("Ivy cache dir does not exist: '" + ivyCacheDir.getPath() + "'");
}
return dir;
}
/**
* Append each dependency listed in the centralized Ivy versions file
* to the grandparent POM's &lt;dependencyManagement&gt; section.
* An &lt;exclusion&gt; is added for each of the artifact's dependencies,
* which are collected from the artifact's ivy.xml from the Ivy cache.
*
* Also add a version property for each dependency.
*/
private void appendAllExternalDependencies(StringBuilder dependenciesBuilder, Map<String,String> versionsMap) {
log("Loading centralized ivy versions from: " + centralizedVersionsFile, verboseLevel);
ivyCacheDir = getIvyCacheDir();
Properties versions = new InterpolatedProperties();
try (InputStream inputStream = new FileInputStream(centralizedVersionsFile);
Reader reader = new InputStreamReader(inputStream, StandardCharsets.UTF_8)) {
versions.load(reader);
} catch (IOException e) {
throw new BuildException("Exception reading centralized versions file " + centralizedVersionsFile.getPath(), e);
}
SortedSet<Map.Entry<?,?>> sortedEntries = new TreeSet<>(new Comparator<Map.Entry<?,?>>() {
@Override public int compare(Map.Entry<?,?> o1, Map.Entry<?,?> o2) {
return ((String)o1.getKey()).compareTo((String)o2.getKey());
}
});
sortedEntries.addAll(versions.entrySet());
for (Map.Entry<?,?> entry : sortedEntries) {
String key = (String)entry.getKey();
Matcher matcher = COORDINATE_KEY_PATTERN.matcher(key);
if (matcher.lookingAt()) {
String groupId = matcher.group(1);
String artifactId = matcher.group(2);
String coordinate = groupId + ':' + artifactId;
String version = (String)entry.getValue();
versionsMap.put(coordinate + ".version", version);
if ( ! nonJarDependencies.contains(coordinate)) {
Set<String> classifiers = dependencyClassifiers.get(coordinate);
if (null != classifiers) {
for (String classifier : classifiers) {
Collection<String> exclusions = getTransitiveDependenciesFromIvyCache(groupId, artifactId, version);
appendDependencyXml
(dependenciesBuilder, groupId, artifactId, " ", version, false, false, classifier, exclusions);
}
}
}
}
}
}
/**
* Collect transitive compile-scope dependencies for the given artifact's
* ivy.xml from the Ivy cache, using the default ivy pattern
* "[organisation]/[module]/ivy-[revision].xml". See
* <a href="http://ant.apache.org/ivy/history/latest-milestone/settings/caches.html"
* >the Ivy cache documentation</a>.
*/
private Collection<String> getTransitiveDependenciesFromIvyCache
(String groupId, String artifactId, String version) {
SortedSet<String> transitiveDependencies = new TreeSet<>();
// E.g. ~/.ivy2/cache/xerces/xercesImpl/ivy-2.9.1.xml
File ivyXmlFile = new File(new File(new File(ivyCacheDir, groupId), artifactId), "ivy-" + version + ".xml");
if ( ! ivyXmlFile.exists()) {
throw new BuildException("File not found: " + ivyXmlFile.getPath());
}
try {
Document document = documentBuilder.parse(ivyXmlFile);
String dependencyPath = "/ivy-module/dependencies/dependency"
+ "[ not(starts-with(@conf,'test->'))"
+ "and not(starts-with(@conf,'provided->'))"
+ "and not(starts-with(@conf,'optional->'))]";
NodeList dependencies = (NodeList)xpath.evaluate(dependencyPath, document, XPathConstants.NODESET);
for (int i = 0 ; i < dependencies.getLength() ; ++i) {
Element dependency = (Element)dependencies.item(i);
transitiveDependencies.add(dependency.getAttribute("org") + ':' + dependency.getAttribute("name"));
}
} catch (Exception e) {
throw new BuildException( "Exception collecting transitive dependencies for "
+ groupId + ':' + artifactId + ':' + version + " from "
+ ivyXmlFile.getAbsolutePath(), e);
}
return transitiveDependencies;
}
/**
* Sets the internal dependencies compile and test properties to be inserted
* into modules' POMs.
*
* Also collects shared external dependencies,
* e.g. solr-core wants all of solrj's external dependencies
*/
private void setInternalDependencyProperties() {
log("Loading module dependencies from: " + moduleDependenciesPropertiesFile, verboseLevel);
Properties moduleDependencies = new Properties();
try (InputStream inputStream = new FileInputStream(moduleDependenciesPropertiesFile);
Reader reader = new InputStreamReader(inputStream, StandardCharsets.UTF_8)) {
moduleDependencies.load(reader);
} catch (FileNotFoundException e) {
throw new BuildException("Properties file does not exist: " + moduleDependenciesPropertiesFile.getPath());
} catch (IOException e) {
throw new BuildException("Exception reading properties file " + moduleDependenciesPropertiesFile.getPath(), e);
}
Map<String,SortedSet<String>> testScopeDependencies = new HashMap<>();
Map<String, String> testScopePropertyKeys = new HashMap<>();
for (Map.Entry<?,?> entry : moduleDependencies.entrySet()) {
String newPropertyKey = (String)entry.getKey();
StringBuilder newPropertyValue = new StringBuilder();
String value = (String)entry.getValue();
Matcher matcher = MODULE_DEPENDENCIES_COORDINATE_KEY_PATTERN.matcher(newPropertyKey);
if ( ! matcher.matches()) {
throw new BuildException("Malformed module dependencies property key: '" + newPropertyKey + "'");
}
String antProjectName = matcher.group(1);
boolean isTest = null != matcher.group(2);
String artifactName = antProjectToArtifactName(antProjectName);
newPropertyKey = artifactName + (isTest ? ".internal.test" : ".internal") + ".dependencies"; // Add ".internal"
if (isTest) {
testScopePropertyKeys.put(artifactName, newPropertyKey);
}
if (null == value || value.isEmpty()) {
allProperties.setProperty(newPropertyKey, "");
Map<String,SortedSet<String>> scopedDependencies
= isTest ? testScopeDependencies : internalCompileScopeDependencies;
scopedDependencies.put(artifactName, new TreeSet<String>());
} else {
// Lucene analysis modules' build dirs do not include hyphens, but Solr contribs' build dirs do
String origModuleDir = antProjectName.replace("analyzers-", "analysis/");
// Exclude the module's own build output, in addition to UNWANTED_INTERNAL_DEPENDENCIES
Pattern unwantedInternalDependencies = Pattern.compile
("(?:lucene/build/|solr/build/(?:contrib/)?)" + origModuleDir + "/" // require dir separator
+ "|" + UNWANTED_INTERNAL_DEPENDENCIES);
SortedSet<String> sortedDeps = new TreeSet<>();
for (String dependency : value.split(",")) {
matcher = SHARED_EXTERNAL_DEPENDENCIES_PATTERN.matcher(dependency);
if (matcher.find()) {
String otherArtifactName = matcher.group(1);
boolean isTestScope = null != matcher.group(2) && matcher.group(2).length() > 0;
otherArtifactName = otherArtifactName.replace('/', '-');
otherArtifactName = otherArtifactName.replace("lucene-analysis", "lucene-analyzers");
otherArtifactName = otherArtifactName.replace("solr-contrib-solr-", "solr-");
otherArtifactName = otherArtifactName.replace("solr-contrib-", "solr-");
if ( ! otherArtifactName.equals(artifactName)) {
Map<String,Set<String>> sharedDeps
= isTest ? interModuleExternalTestScopeDependencies : interModuleExternalCompileScopeDependencies;
Set<String> sharedSet = sharedDeps.get(artifactName);
if (null == sharedSet) {
sharedSet = new HashSet<>();
sharedDeps.put(artifactName, sharedSet);
}
if (isTestScope) {
otherArtifactName += ":test";
}
sharedSet.add(otherArtifactName);
}
}
matcher = unwantedInternalDependencies.matcher(dependency);
if (matcher.find()) {
continue; // skip external (/(test-)lib/), and non-jar and unwanted (self) internal deps
}
String artifactId = dependencyToArtifactId(newPropertyKey, dependency);
String groupId = ivyModuleInfo.get(artifactId);
String coordinate = groupId + ':' + artifactId;
sortedDeps.add(coordinate);
}
if (isTest) { // Don't set test-scope properties until all compile-scope deps have been seen
testScopeDependencies.put(artifactName, sortedDeps);
} else {
internalCompileScopeDependencies.put(artifactName, sortedDeps);
for (String dependency : sortedDeps) {
int splitPos = dependency.indexOf(':');
String groupId = dependency.substring(0, splitPos);
String artifactId = dependency.substring(splitPos + 1);
appendDependencyXml(newPropertyValue, groupId, artifactId, " ", null, false, false, null, null);
}
if (newPropertyValue.length() > 0) {
newPropertyValue.setLength(newPropertyValue.length() - 1); // drop trailing newline
}
allProperties.setProperty(newPropertyKey, newPropertyValue.toString());
}
}
}
// Now that all compile-scope dependencies have been seen, include only those test-scope
// dependencies that are not also compile-scope dependencies.
for (Map.Entry<String,SortedSet<String>> entry : testScopeDependencies.entrySet()) {
String module = entry.getKey();
SortedSet<String> testDeps = entry.getValue();
SortedSet<String> compileDeps = internalCompileScopeDependencies.get(module);
if (null == compileDeps) {
throw new BuildException("Can't find compile scope dependencies for module " + module);
}
StringBuilder newPropertyValue = new StringBuilder();
for (String dependency : testDeps) {
// modules with separate compile-scope and test-scope POMs need their compile-scope deps
// included in their test-scope deps.
if (modulesWithSeparateCompileAndTestPOMs.contains(module) || ! compileDeps.contains(dependency)) {
int splitPos = dependency.indexOf(':');
String groupId = dependency.substring(0, splitPos);
String artifactId = dependency.substring(splitPos + 1);
appendDependencyXml(newPropertyValue, groupId, artifactId, " ", null, true, false, null, null);
}
}
if (newPropertyValue.length() > 0) {
newPropertyValue.setLength(newPropertyValue.length() - 1); // drop trailing newline
}
allProperties.setProperty(testScopePropertyKeys.get(module), newPropertyValue.toString());
}
}
/**
* Converts either a compile output directory or an internal jar
* dependency, taken from an Ant (test.)classpath, into an artifactId
*/
private String dependencyToArtifactId(String newPropertyKey, String dependency) {
StringBuilder artifactId = new StringBuilder();
Matcher matcher = COMPILATION_OUTPUT_DIRECTORY_PATTERN.matcher(dependency);
if (matcher.matches()) {
// Pattern.compile("(lucene|solr)/build/(.*)/classes/java");
String artifact = matcher.group(2);
artifact = artifact.replace('/', '-');
artifact = artifact.replaceAll("(?<!solr-)analysis-", "analyzers-");
if ("lucene".equals(matcher.group(1))) {
artifactId.append("lucene-");
}
artifactId.append(artifact);
} else {
matcher = internalJarPattern.matcher(dependency);
if (matcher.matches()) {
// internalJarPattern is /.*(lucene|solr)([^/]*?)-<version>\.jar/,
// where <version> is the value of the Ant "version" property
artifactId.append(matcher.group(1));
artifactId.append(matcher.group(2));
} else {
throw new BuildException
("Malformed module dependency from '" + newPropertyKey + "': '" + dependency + "'");
}
}
return artifactId.toString();
}
/**
* Convert Ant project names to artifact names: prepend "lucene-"
* to Lucene project names
*/
private String antProjectToArtifactName(String origModule) {
String module = origModule;
if ( ! origModule.startsWith("solr-")) { // lucene modules names don't have "lucene-" prepended
module = "lucene-" + module;
}
return module;
}
/**
* Collect external dependencies from the given ivy.xml file, constructing
* property values containing &lt;dependency&gt; snippets, which will be
* filtered (substituted) when copying the POM for the module corresponding
* to the given ivy.xml file.
*/
private void collectExternalDependenciesFromIvyXmlFile(File ivyXmlFile)
throws XPathExpressionException, IOException, SAXException {
String module = getModuleName(ivyXmlFile);
log("Collecting external dependencies from: " + ivyXmlFile.getPath(), verboseLevel);
Document document = documentBuilder.parse(ivyXmlFile);
// Exclude the 'start' configuration in solr/server/ivy.xml
String dependencyPath = "/ivy-module/dependencies/dependency[not(starts-with(@conf,'start'))]";
NodeList dependencies = (NodeList)xpath.evaluate(dependencyPath, document, XPathConstants.NODESET);
for (int depNum = 0 ; depNum < dependencies.getLength() ; ++depNum) {
Element dependency = (Element)dependencies.item(depNum);
String groupId = dependency.getAttribute("org");
String artifactId = dependency.getAttribute("name");
String dependencyCoordinate = groupId + ':' + artifactId;
Set<String> classifiers = dependencyClassifiers.get(dependencyCoordinate);
if (null == classifiers) {
classifiers = new HashSet<>();
dependencyClassifiers.put(dependencyCoordinate, classifiers);
}
String conf = dependency.getAttribute("conf");
boolean confContainsTest = conf.contains("test");
boolean isOptional = globalOptionalExternalDependencies.contains(dependencyCoordinate)
|| ( perModuleOptionalExternalDependencies.containsKey(module)
&& perModuleOptionalExternalDependencies.get(module).contains(dependencyCoordinate));
SortedSet<ExternalDependency> deps = allExternalDependencies.get(module);
if (null == deps) {
deps = new TreeSet<>();
allExternalDependencies.put(module, deps);
}
NodeList artifacts = null;
if (dependency.hasChildNodes()) {
artifacts = (NodeList)xpath.evaluate("artifact", dependency, XPathConstants.NODESET);
}
if (null != artifacts && artifacts.getLength() > 0) {
for (int artifactNum = 0 ; artifactNum < artifacts.getLength() ; ++artifactNum) {
Element artifact = (Element)artifacts.item(artifactNum);
String type = artifact.getAttribute("type");
String ext = artifact.getAttribute("ext");
// When conf contains BOTH "test" and "compile", and type != "test", this is NOT a test dependency
boolean isTestDependency = confContainsTest && (type.equals("test") || ! conf.contains("compile"));
if ((type.isEmpty() && ext.isEmpty()) || type.equals("jar") || ext.equals("jar")) {
String classifier = artifact.getAttribute("maven:classifier");
if (classifier.isEmpty()) {
classifier = null;
}
classifiers.add(classifier);
deps.add(new ExternalDependency(groupId, artifactId, classifier, isTestDependency, isOptional));
} else { // not a jar
nonJarDependencies.add(dependencyCoordinate);
}
}
} else {
classifiers.add(null);
deps.add(new ExternalDependency(groupId, artifactId, null, confContainsTest, isOptional));
}
}
}
/**
* Stores information about an external dependency
*/
private static class ExternalDependency implements Comparable<ExternalDependency> {
String groupId;
String artifactId;
boolean isTestDependency;
boolean isOptional;
String classifier;
public ExternalDependency
(String groupId, String artifactId, String classifier, boolean isTestDependency, boolean isOptional) {
this.groupId = groupId;
this.artifactId = artifactId;
this.classifier = classifier;
this.isTestDependency = isTestDependency;
this.isOptional = isOptional;
}
@Override
public boolean equals(Object o) {
if ( ! (o instanceof ExternalDependency)) {
return false;
}
ExternalDependency other = (ExternalDependency)o;
return groupId.equals(other.groupId)
&& artifactId.equals(other.artifactId)
&& isTestDependency == other.isTestDependency
&& isOptional == other.isOptional
&& classifier.equals(other.classifier);
}
@Override
public int hashCode() {
return groupId.hashCode() * 31
+ artifactId.hashCode() * 31
+ (isTestDependency ? 31 : 0)
+ (isOptional ? 31 : 0)
+ classifier.hashCode();
}
@Override
public int compareTo(ExternalDependency other) {
int comparison = groupId.compareTo(other.groupId);
if (0 != comparison) {
return comparison;
}
comparison = artifactId.compareTo(other.artifactId);
if (0 != comparison) {
return comparison;
}
if (null == classifier) {
if (null != other.classifier) {
return -1;
}
} else if (null == other.classifier) { // classifier is not null
return 1;
} else { // neither classifier is null
if (0 != (comparison = classifier.compareTo(other.classifier))) {
return comparison;
}
}
// test and optional don't matter in this sort
return 0;
}
}
/**
* Extract module name from ivy.xml path.
*/
private String getModuleName(File ivyXmlFile) {
String path = ivyXmlFile.getAbsolutePath();
Matcher matcher = PROPERTY_PREFIX_FROM_IVY_XML_FILE_PATTERN.matcher(path);
if ( ! matcher.find()) {
throw new BuildException("Can't get module name from ivy.xml path: " + path);
}
StringBuilder builder = new StringBuilder();
builder.append(matcher.group(1));
if (null != matcher.group(2)) { // "lucene/analysis/..."
builder.append("-analyzers");
} else if (null != matcher.group(3)) { // "solr/example/..."
builder.append("-example");
} else if (null != matcher.group(4)) { // "solr/server/..."
builder.append("-server");
}
builder.append('-');
builder.append(matcher.group(5));
return builder.toString().replace("solr-solr-", "solr-");
}
/**
* Appends a &lt;dependency&gt; snippet to the given builder.
*/
private void appendDependencyXml(StringBuilder builder, String groupId, String artifactId,
String indent, String version, boolean isTestDependency,
boolean isOptional, String classifier, Collection<String> exclusions) {
builder.append(indent).append("<dependency>\n");
builder.append(indent).append(" <groupId>").append(groupId).append("</groupId>\n");
builder.append(indent).append(" <artifactId>").append(artifactId).append("</artifactId>\n");
if (null != version) {
builder.append(indent).append(" <version>").append(version).append("</version>\n");
}
if (isTestDependency) {
builder.append(indent).append(" <scope>test</scope>\n");
}
if (isOptional) {
builder.append(indent).append(" <optional>true</optional>\n");
}
if (null != classifier) {
builder.append(indent).append(" <classifier>").append(classifier).append("</classifier>\n");
}
if ( ! modulesWithTransitiveDependencies.contains(artifactId) && null != exclusions && ! exclusions.isEmpty()) {
builder.append(indent).append(" <exclusions>\n");
for (String dependency : exclusions) {
int splitPos = dependency.indexOf(':');
String excludedGroupId = dependency.substring(0, splitPos);
String excludedArtifactId = dependency.substring(splitPos + 1);
builder.append(indent).append(" <exclusion>\n");
builder.append(indent).append(" <groupId>").append(excludedGroupId).append("</groupId>\n");
builder.append(indent).append(" <artifactId>").append(excludedArtifactId).append("</artifactId>\n");
builder.append(indent).append(" </exclusion>\n");
}
builder.append(indent).append(" </exclusions>\n");
}
builder.append(indent).append("</dependency>\n");
}
}

View File

@ -1,162 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.dependencies;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* Parse a properties file, performing recursive Ant-like
* property value interpolation, and return the resulting Properties.
*/
public class InterpolatedProperties extends Properties {
private static final Pattern PROPERTY_REFERENCE_PATTERN = Pattern.compile("\\$\\{(?<name>[^}]+)\\}");
/**
* Loads the properties file via {@link Properties#load(InputStream)},
* then performs recursive Ant-like property value interpolation.
*/
@Override
public void load(InputStream inStream) throws IOException {
throw new UnsupportedOperationException("InterpolatedProperties.load(InputStream) is not supported.");
}
/**
* Loads the properties file via {@link Properties#load(Reader)},
* then performs recursive Ant-like property value interpolation.
*/
@Override
public void load(Reader reader) throws IOException {
Properties p = new Properties();
p.load(reader);
LinkedHashMap<String, String> props = new LinkedHashMap<>();
Enumeration<?> e = p.propertyNames();
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
props.put(key, p.getProperty(key));
}
resolve(props).forEach((k, v) -> this.setProperty(k, v));
}
private static Map<String,String> resolve(Map<String,String> props) {
LinkedHashMap<String, String> resolved = new LinkedHashMap<>();
HashSet<String> recursive = new HashSet<>();
props.forEach((k, v) -> {
resolve(props, resolved, recursive, k, v);
});
return resolved;
}
private static String resolve(Map<String,String> props,
LinkedHashMap<String, String> resolved,
Set<String> recursive,
String key,
String value) {
if (value == null) {
throw new IllegalArgumentException("Missing replaced property key: " + key);
}
if (recursive.contains(key)) {
throw new IllegalArgumentException("Circular recursive property resolution: " + recursive);
}
if (!resolved.containsKey(key)) {
recursive.add(key);
StringBuffer buffer = new StringBuffer();
Matcher matcher = PROPERTY_REFERENCE_PATTERN.matcher(value);
while (matcher.find()) {
String referenced = matcher.group("name");
String concrete = resolve(props, resolved, recursive, referenced, props.get(referenced));
matcher.appendReplacement(buffer, Matcher.quoteReplacement(concrete));
}
matcher.appendTail(buffer);
resolved.put(key, buffer.toString());
recursive.remove(key);
}
assert resolved.get(key).equals(value);
return resolved.get(key);
}
public static void main(String [] args) {
{
Map<String, String> props = new LinkedHashMap<>();
props.put("a", "${b}");
props.put("b", "${c}");
props.put("c", "foo");
props.put("d", "${a}/${b}/${c}");
assertEquals(resolve(props), "a=foo", "b=foo", "c=foo", "d=foo/foo/foo");
}
{
Map<String, String> props = new LinkedHashMap<>();
props.put("a", "foo");
props.put("b", "${a}");
assertEquals(resolve(props), "a=foo", "b=foo");
}
{
Map<String, String> props = new LinkedHashMap<>();
props.put("a", "${b}");
props.put("b", "${c}");
props.put("c", "${a}");
try {
resolve(props);
} catch (IllegalArgumentException e) {
// Expected, circular reference.
if (!e.getMessage().contains("Circular recursive")) {
throw new AssertionError();
}
}
}
{
Map<String, String> props = new LinkedHashMap<>();
props.put("a", "${b}");
try {
resolve(props);
} catch (IllegalArgumentException e) {
// Expected, no referenced value.
if (!e.getMessage().contains("Missing replaced")) {
throw new AssertionError();
}
}
}
}
private static void assertEquals(Map<String,String> resolved, String... keyValuePairs) {
List<String> result = resolved.entrySet().stream().sorted((a, b) -> a.getKey().compareTo(b.getKey()))
.map(e -> e.getKey() + "=" + e.getValue())
.collect(Collectors.toList());
if (!result.equals(Arrays.asList(keyValuePairs))) {
throw new AssertionError("Mismatch: \n" + result + "\nExpected: " + Arrays.asList(keyValuePairs));
}
}
}

View File

@ -1,903 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.validation;
import org.apache.ivy.Ivy;
import org.apache.ivy.core.LogOptions;
import org.apache.ivy.core.report.ResolveReport;
import org.apache.ivy.core.resolve.ResolveOptions;
import org.apache.ivy.core.settings.IvySettings;
import org.apache.ivy.plugins.conflict.NoConflictManager;
import org.apache.lucene.dependencies.InterpolatedProperties;
import org.apache.lucene.validation.ivyde.IvyNodeElement;
import org.apache.lucene.validation.ivyde.IvyNodeElementAdapter;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.Resource;
import org.apache.tools.ant.types.ResourceCollection;
import org.apache.tools.ant.types.resources.FileResource;
import org.apache.tools.ant.types.resources.Resources;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
import org.xml.sax.helpers.DefaultHandler;
import javax.xml.XMLConstants;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* An Ant task to verify that the '/org/name' keys in ivy-versions.properties
* are sorted lexically and are neither duplicates nor orphans, and that all
* dependencies in all ivy.xml files use rev="${/org/name}" format.
*/
public class LibVersionsCheckTask extends Task {
private static final String IVY_XML_FILENAME = "ivy.xml";
private static final Pattern COORDINATE_KEY_PATTERN = Pattern.compile("(/([^/ \t\f]+)/([^=:/ \t\f]+))");
private static final Pattern BLANK_OR_COMMENT_LINE_PATTERN = Pattern.compile("[ \t\f]*(?:[#!].*)?");
private static final Pattern TRAILING_BACKSLASH_PATTERN = Pattern.compile("[^\\\\]*(\\\\+)$");
private static final Pattern LEADING_WHITESPACE_PATTERN = Pattern.compile("[ \t\f]+(.*)");
private static final Pattern WHITESPACE_GOODSTUFF_WHITESPACE_BACKSLASH_PATTERN
= Pattern.compile("[ \t\f]*(.*?)(?:(?<!\\\\)[ \t\f]*)?\\\\");
private static final Pattern TRAILING_WHITESPACE_BACKSLASH_PATTERN
= Pattern.compile("(.*?)(?:(?<!\\\\)[ \t\f]*)?\\\\");
private static final Pattern MODULE_NAME_PATTERN = Pattern.compile("\\smodule\\s*=\\s*[\"']([^\"']+)[\"']");
private static final Pattern MODULE_DIRECTORY_PATTERN
= Pattern.compile(".*[/\\\\]((?:lucene|solr)[/\\\\].*)[/\\\\].*");
private static final SAXParserFactory SAX_PARSER_FACTORY = SAXParserFactory.newDefaultInstance();
static {
try {
SAX_PARSER_FACTORY.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
} catch (SAXNotRecognizedException | SAXNotSupportedException | ParserConfigurationException e) {
throw new Error(e);
}
}
private Ivy ivy;
/**
* All ivy.xml files to check.
*/
private Resources ivyXmlResources = new Resources();
/**
* Centralized Ivy versions properties file: ivy-versions.properties
*/
private File centralizedVersionsFile;
/**
* Centralized Ivy ignore conflicts file: ivy-ignore-conflicts.properties
*/
private File ignoreConflictsFile;
/**
* Ivy settings file: top-level-ivy-settings.xml
*/
private File topLevelIvySettingsFile;
/**
* Location of common build dir: lucene/build/
*/
private File commonBuildDir;
/**
* Location of ivy cache resolution directory.
*/
private File ivyResolutionCacheDir;
/**
* Artifact lock strategy that Ivy should use.
*/
private String ivyLockStrategy;
/**
* A logging level associated with verbose logging.
*/
private int verboseLevel = Project.MSG_VERBOSE;
/**
* All /org/name keys found in ivy-versions.properties,
* mapped to info about direct dependence and what would
* be conflicting indirect dependencies if Lucene/Solr
* were to use transitive dependencies.
*/
private Map<String,Dependency> directDependencies = new LinkedHashMap<>();
/**
* All /org/name keys found in ivy-ignore-conflicts.properties,
* mapped to the set of indirect dependency versions that will
* be ignored, i.e. not trigger a conflict.
*/
private Map<String,HashSet<String>> ignoreConflictVersions = new HashMap<>();
private static class Dependency {
String org;
String name;
String directVersion;
String latestVersion;
boolean directlyReferenced = false;
LinkedHashMap<IvyNodeElement,Set<String>> conflictLocations = new LinkedHashMap<>(); // dependency path -> moduleNames
Dependency(String org, String name, String directVersion) {
this.org = org;
this.name = name;
this.directVersion = directVersion;
}
}
/**
* Adds a set of ivy.xml resources to check.
*/
public void add(ResourceCollection rc) {
ivyXmlResources.add(rc);
}
public void setVerbose(boolean verbose) {
verboseLevel = (verbose ? Project.MSG_INFO : Project.MSG_VERBOSE);
}
public void setCentralizedVersionsFile(File file) {
centralizedVersionsFile = file;
}
public void setTopLevelIvySettingsFile(File file) {
topLevelIvySettingsFile = file;
}
public void setIvyResolutionCacheDir(File dir) {
ivyResolutionCacheDir = dir;
}
public void setIvyLockStrategy(String strategy) {
this.ivyLockStrategy = strategy;
}
public void setCommonBuildDir(File file) {
commonBuildDir = file;
}
public void setIgnoreConflictsFile(File file) {
ignoreConflictsFile = file;
}
/**
* Execute the task.
*/
@Override
public void execute() throws BuildException {
log("Starting scan.", verboseLevel);
long start = System.currentTimeMillis();
setupIvy();
int numErrors = 0;
if ( ! verifySortedCoordinatesPropertiesFile(centralizedVersionsFile)) {
++numErrors;
}
if ( ! verifySortedCoordinatesPropertiesFile(ignoreConflictsFile)) {
++numErrors;
}
collectDirectDependencies();
if ( ! collectVersionConflictsToIgnore()) {
++numErrors;
}
int numChecked = 0;
@SuppressWarnings("unchecked")
Iterator<Resource> iter = (Iterator<Resource>)ivyXmlResources.iterator();
while (iter.hasNext()) {
final Resource resource = iter.next();
if ( ! resource.isExists()) {
throw new BuildException("Resource does not exist: " + resource.getName());
}
if ( ! (resource instanceof FileResource)) {
throw new BuildException("Only filesystem resources are supported: "
+ resource.getName() + ", was: " + resource.getClass().getName());
}
File ivyXmlFile = ((FileResource)resource).getFile();
try {
if ( ! checkIvyXmlFile(ivyXmlFile)) {
++numErrors;
}
if ( ! resolveTransitively(ivyXmlFile)) {
++numErrors;
}
if ( ! findLatestConflictVersions()) {
++numErrors;
}
} catch (Exception e) {
throw new BuildException("Exception reading file " + ivyXmlFile.getPath() + " - " + e.toString(), e);
}
++numChecked;
}
log("Checking for orphans in " + centralizedVersionsFile.getName(), verboseLevel);
for (Map.Entry<String,Dependency> entry : directDependencies.entrySet()) {
String coordinateKey = entry.getKey();
if ( ! entry.getValue().directlyReferenced) {
log("ORPHAN coordinate key '" + coordinateKey + "' in " + centralizedVersionsFile.getName()
+ " is not found in any " + IVY_XML_FILENAME + " file.",
Project.MSG_ERR);
++numErrors;
}
}
int numConflicts = emitConflicts();
int messageLevel = numErrors > 0 ? Project.MSG_ERR : Project.MSG_INFO;
log("Checked that " + centralizedVersionsFile.getName() + " and " + ignoreConflictsFile.getName()
+ " have lexically sorted '/org/name' keys and no duplicates or orphans.",
messageLevel);
log("Scanned " + numChecked + " " + IVY_XML_FILENAME + " files for rev=\"${/org/name}\" format.",
messageLevel);
log("Found " + numConflicts + " indirect dependency version conflicts.");
log(String.format(Locale.ROOT, "Completed in %.2fs., %d error(s).",
(System.currentTimeMillis() - start) / 1000.0, numErrors),
messageLevel);
if (numConflicts > 0 || numErrors > 0) {
throw new BuildException("Lib versions check failed. Check the logs.");
}
}
private boolean findLatestConflictVersions() {
boolean success = true;
StringBuilder latestIvyXml = new StringBuilder();
latestIvyXml.append("<ivy-module version=\"2.0\">\n");
latestIvyXml.append(" <info organisation=\"org.apache.lucene\" module=\"core-tools-find-latest-revision\"/>\n");
latestIvyXml.append(" <configurations>\n");
latestIvyXml.append(" <conf name=\"default\" transitive=\"false\"/>\n");
latestIvyXml.append(" </configurations>\n");
latestIvyXml.append(" <dependencies>\n");
for (Map.Entry<String, Dependency> directDependency : directDependencies.entrySet()) {
Dependency dependency = directDependency.getValue();
if (dependency.conflictLocations.entrySet().isEmpty()) {
continue;
}
latestIvyXml.append(" <dependency org=\"");
latestIvyXml.append(dependency.org);
latestIvyXml.append("\" name=\"");
latestIvyXml.append(dependency.name);
latestIvyXml.append("\" rev=\"latest.release\" conf=\"default->*\"/>\n");
}
latestIvyXml.append(" </dependencies>\n");
latestIvyXml.append("</ivy-module>\n");
File buildDir = new File(commonBuildDir, "ivy-transitive-resolve");
if ( ! buildDir.exists() && ! buildDir.mkdirs()) {
throw new BuildException("Could not create temp directory " + buildDir.getPath());
}
File findLatestIvyXmlFile = new File(buildDir, "find.latest.conflicts.ivy.xml");
try {
try (Writer writer = new OutputStreamWriter(new FileOutputStream(findLatestIvyXmlFile), StandardCharsets.UTF_8)) {
writer.write(latestIvyXml.toString());
}
ResolveOptions options = new ResolveOptions();
options.setDownload(false); // Download only module descriptors, not artifacts
options.setTransitive(false); // Resolve only direct dependencies
options.setUseCacheOnly(false); // Download the internet!
options.setOutputReport(false); // Don't print to the console
options.setLog(LogOptions.LOG_QUIET); // Don't log to the console
options.setConfs(new String[] {"*"}); // Resolve all configurations
ResolveReport resolveReport = ivy.resolve(findLatestIvyXmlFile.toURI().toURL(), options);
IvyNodeElement root = IvyNodeElementAdapter.adapt(resolveReport);
for (IvyNodeElement element : root.getDependencies()) {
String coordinate = "/" + element.getOrganization() + "/" + element.getName();
Dependency dependency = directDependencies.get(coordinate);
if (null == dependency) {
log("ERROR: the following coordinate key does not appear in "
+ centralizedVersionsFile.getName() + ": " + coordinate, Project.MSG_ERR);
success = false;
} else {
dependency.latestVersion = element.getRevision();
}
}
} catch (IOException e) {
log("Exception writing to " + findLatestIvyXmlFile.getPath() + ": " + e.toString(), Project.MSG_ERR);
success = false;
} catch (ParseException e) {
log("Exception parsing filename " + findLatestIvyXmlFile.getPath() + ": " + e.toString(), Project.MSG_ERR);
success = false;
}
return success;
}
/**
* Collects indirect dependency version conflicts to ignore
* in ivy-ignore-conflicts.properties, and also checks for orphans
* (coordinates not included in ivy-versions.properties).
*
* Returns true if no orphans are found.
*/
private boolean collectVersionConflictsToIgnore() {
log("Checking for orphans in " + ignoreConflictsFile.getName(), verboseLevel);
boolean orphansFound = false;
InterpolatedProperties properties = new InterpolatedProperties();
try (InputStream inputStream = new FileInputStream(ignoreConflictsFile);
Reader reader = new InputStreamReader(inputStream, StandardCharsets.UTF_8)) {
properties.load(reader);
} catch (IOException e) {
throw new BuildException("Exception reading " + ignoreConflictsFile + ": " + e.toString(), e);
}
for (Object obj : properties.keySet()) {
String coordinate = (String)obj;
if (COORDINATE_KEY_PATTERN.matcher(coordinate).matches()) {
if ( ! directDependencies.containsKey(coordinate)) {
orphansFound = true;
log("ORPHAN coordinate key '" + coordinate + "' in " + ignoreConflictsFile.getName()
+ " is not found in " + centralizedVersionsFile.getName(),
Project.MSG_ERR);
} else {
String versionsToIgnore = properties.getProperty(coordinate);
List<String> ignore = Arrays.asList(versionsToIgnore.trim().split("\\s*,\\s*|\\s+"));
ignoreConflictVersions.put(coordinate, new HashSet<>(ignore));
}
}
}
return ! orphansFound;
}
private void collectDirectDependencies() {
InterpolatedProperties properties = new InterpolatedProperties();
try (InputStream inputStream = new FileInputStream(centralizedVersionsFile);
Reader reader = new InputStreamReader(inputStream, StandardCharsets.UTF_8)) {
properties.load(reader);
} catch (IOException e) {
throw new BuildException("Exception reading " + centralizedVersionsFile + ": " + e.toString(), e);
}
for (Object obj : properties.keySet()) {
String coordinate = (String)obj;
Matcher matcher = COORDINATE_KEY_PATTERN.matcher(coordinate);
if (matcher.matches()) {
String org = matcher.group(2);
String name = matcher.group(3);
String directVersion = properties.getProperty(coordinate);
Dependency dependency = new Dependency(org, name, directVersion);
directDependencies.put(coordinate, dependency);
}
}
}
/**
* Transitively resolves all dependencies in the given ivy.xml file,
* looking for indirect dependencies with versions that conflict
* with those of direct dependencies. Dependency conflict when a
* direct dependency's version is older than that of an indirect
* dependency with the same /org/name.
*
* Returns true if no version conflicts are found and no resolution
* errors occurred, false otherwise.
*/
private boolean resolveTransitively(File ivyXmlFile) {
boolean success = true;
ResolveOptions options = new ResolveOptions();
options.setDownload(false); // Download only module descriptors, not artifacts
options.setTransitive(true); // Resolve transitively, if not already specified in the ivy.xml file
options.setUseCacheOnly(false); // Download the internet!
options.setOutputReport(false); // Don't print to the console
options.setLog(LogOptions.LOG_QUIET); // Don't log to the console
options.setConfs(new String[] {"*"}); // Resolve all configurations
// Rewrite the ivy.xml, replacing all 'transitive="false"' with 'transitive="true"'
// The Ivy API is file-based, so we have to write the result to the filesystem.
String moduleName = "unknown";
String ivyXmlContent = xmlToString(ivyXmlFile);
Matcher matcher = MODULE_NAME_PATTERN.matcher(ivyXmlContent);
if (matcher.find()) {
moduleName = matcher.group(1);
}
ivyXmlContent = ivyXmlContent.replaceAll("\\btransitive\\s*=\\s*[\"']false[\"']", "transitive=\"true\"");
File transitiveIvyXmlFile = null;
try {
File buildDir = new File(commonBuildDir, "ivy-transitive-resolve");
if ( ! buildDir.exists() && ! buildDir.mkdirs()) {
throw new BuildException("Could not create temp directory " + buildDir.getPath());
}
matcher = MODULE_DIRECTORY_PATTERN.matcher(ivyXmlFile.getCanonicalPath());
if ( ! matcher.matches()) {
throw new BuildException("Unknown ivy.xml module directory: " + ivyXmlFile.getCanonicalPath());
}
String moduleDirPrefix = matcher.group(1).replaceAll("[/\\\\]", ".");
transitiveIvyXmlFile = new File(buildDir, "transitive." + moduleDirPrefix + ".ivy.xml");
try (Writer writer = new OutputStreamWriter(new FileOutputStream(transitiveIvyXmlFile), StandardCharsets.UTF_8)) {
writer.write(ivyXmlContent);
}
ResolveReport resolveReport = ivy.resolve(transitiveIvyXmlFile.toURI().toURL(), options);
IvyNodeElement root = IvyNodeElementAdapter.adapt(resolveReport);
for (IvyNodeElement directDependency : root.getDependencies()) {
String coordinate = "/" + directDependency.getOrganization() + "/" + directDependency.getName();
Dependency dependency = directDependencies.get(coordinate);
if (null == dependency) {
log("ERROR: the following coordinate key does not appear in "
+ centralizedVersionsFile.getName() + ": " + coordinate);
success = false;
} else {
dependency.directlyReferenced = true;
if (collectConflicts(directDependency, directDependency, moduleName)) {
success = false;
}
}
}
} catch (ParseException | IOException e) {
if (null != transitiveIvyXmlFile) {
log("Exception reading " + transitiveIvyXmlFile.getPath() + ": " + e.toString());
}
success = false;
}
return success;
}
/**
* Recursively finds indirect dependencies that have a version conflict with a direct dependency.
* Returns true if one or more conflicts are found, false otherwise
*/
private boolean collectConflicts(IvyNodeElement root, IvyNodeElement parent, String moduleName) {
boolean conflicts = false;
for (IvyNodeElement child : parent.getDependencies()) {
String coordinate = "/" + child.getOrganization() + "/" + child.getName();
Dependency dependency = directDependencies.get(coordinate);
if (null != dependency) { // Ignore this indirect dependency if it's not also a direct dependency
String indirectVersion = child.getRevision();
if (isConflict(coordinate, dependency.directVersion, indirectVersion)) {
conflicts = true;
Set<String> moduleNames = dependency.conflictLocations.get(root);
if (null == moduleNames) {
moduleNames = new HashSet<>();
dependency.conflictLocations.put(root, moduleNames);
}
moduleNames.add(moduleName);
}
conflicts |= collectConflicts(root, child, moduleName);
}
}
return conflicts;
}
/**
* Copy-pasted from Ivy's
* org.apache.ivy.plugins.latest.LatestRevisionStrategy
* with minor modifications
*/
private static final Map<String,Integer> SPECIAL_MEANINGS;
static {
SPECIAL_MEANINGS = new HashMap<>();
SPECIAL_MEANINGS.put("dev", -1);
SPECIAL_MEANINGS.put("rc", 1);
SPECIAL_MEANINGS.put("final", 2);
}
/**
* Copy-pasted from Ivy's
* org.apache.ivy.plugins.latest.LatestRevisionStrategy.MridComparator
* with minor modifications
*/
private static class LatestVersionComparator implements Comparator<String> {
@Override
public int compare(String rev1, String rev2) {
rev1 = rev1.replaceAll("([a-zA-Z])(\\d)", "$1.$2");
rev1 = rev1.replaceAll("(\\d)([a-zA-Z])", "$1.$2");
rev2 = rev2.replaceAll("([a-zA-Z])(\\d)", "$1.$2");
rev2 = rev2.replaceAll("(\\d)([a-zA-Z])", "$1.$2");
String[] parts1 = rev1.split("[-._+]");
String[] parts2 = rev2.split("[-._+]");
int i = 0;
for (; i < parts1.length && i < parts2.length; i++) {
if (parts1[i].equals(parts2[i])) {
continue;
}
boolean is1Number = isNumber(parts1[i]);
boolean is2Number = isNumber(parts2[i]);
if (is1Number && !is2Number) {
return 1;
}
if (is2Number && !is1Number) {
return -1;
}
if (is1Number && is2Number) {
return Long.valueOf(parts1[i]).compareTo(Long.valueOf(parts2[i]));
}
// both are strings, we compare them taking into account special meaning
Integer sm1 = SPECIAL_MEANINGS.get(parts1[i].toLowerCase(Locale.ROOT));
Integer sm2 = SPECIAL_MEANINGS.get(parts2[i].toLowerCase(Locale.ROOT));
if (sm1 != null) {
sm2 = sm2 == null ? 0 : sm2;
return sm1.compareTo(sm2);
}
if (sm2 != null) {
return Integer.valueOf(0).compareTo(sm2);
}
return parts1[i].compareTo(parts2[i]);
}
if (i < parts1.length) {
return isNumber(parts1[i]) ? 1 : -1;
}
if (i < parts2.length) {
return isNumber(parts2[i]) ? -1 : 1;
}
return 0;
}
private static final Pattern IS_NUMBER = Pattern.compile("\\d+");
private static boolean isNumber(String str) {
return IS_NUMBER.matcher(str).matches();
}
}
private static LatestVersionComparator LATEST_VERSION_COMPARATOR = new LatestVersionComparator();
/**
* Returns true if directVersion is less than indirectVersion, and
* coordinate=indirectVersion is not present in ivy-ignore-conflicts.properties.
*/
private boolean isConflict(String coordinate, String directVersion, String indirectVersion) {
boolean isConflict = LATEST_VERSION_COMPARATOR.compare(directVersion, indirectVersion) < 0;
if (isConflict) {
Set<String> ignoredVersions = ignoreConflictVersions.get(coordinate);
if (null != ignoredVersions && ignoredVersions.contains(indirectVersion)) {
isConflict = false;
}
}
return isConflict;
}
/**
* Returns the number of direct dependencies in conflict with indirect
* dependencies.
*/
private int emitConflicts() {
int conflicts = 0;
StringBuilder builder = new StringBuilder();
for (Map.Entry<String,Dependency> directDependency : directDependencies.entrySet()) {
String coordinate = directDependency.getKey();
Set<Map.Entry<IvyNodeElement,Set<String>>> entrySet
= directDependency.getValue().conflictLocations.entrySet();
if (entrySet.isEmpty()) {
continue;
}
++conflicts;
Map.Entry<IvyNodeElement,Set<String>> first = entrySet.iterator().next();
int notPrinted = entrySet.size() - 1;
builder.append("VERSION CONFLICT: transitive dependency in module(s) ");
boolean isFirst = true;
for (String moduleName : first.getValue()) {
if (isFirst) {
isFirst = false;
} else {
builder.append(", ");
}
builder.append(moduleName);
}
builder.append(":\n");
IvyNodeElement element = first.getKey();
builder.append('/').append(element.getOrganization()).append('/').append(element.getName())
.append('=').append(element.getRevision()).append('\n');
emitConflict(builder, coordinate, first.getKey(), 1);
if (notPrinted > 0) {
builder.append("... and ").append(notPrinted).append(" more\n");
}
builder.append("\n");
}
if (builder.length() > 0) {
log(builder.toString());
}
return conflicts;
}
private boolean emitConflict(StringBuilder builder, String conflictCoordinate, IvyNodeElement parent, int depth) {
for (IvyNodeElement child : parent.getDependencies()) {
String indirectCoordinate = "/" + child.getOrganization() + "/" + child.getName();
if (conflictCoordinate.equals(indirectCoordinate)) {
Dependency dependency = directDependencies.get(conflictCoordinate);
String directVersion = dependency.directVersion;
if (isConflict(conflictCoordinate, directVersion, child.getRevision())) {
for (int i = 0 ; i < depth - 1 ; ++i) {
builder.append(" ");
}
builder.append("+-- ");
builder.append(indirectCoordinate).append("=").append(child.getRevision());
builder.append(" <<< Conflict (direct=").append(directVersion);
builder.append(", latest=").append(dependency.latestVersion).append(")\n");
return true;
}
} else if (hasConflicts(conflictCoordinate, child)) {
for (int i = 0 ; i < depth -1 ; ++i) {
builder.append(" ");
}
builder.append("+-- ");
builder.append(indirectCoordinate).append("=").append(child.getRevision()).append("\n");
if (emitConflict(builder, conflictCoordinate, child, depth + 1)) {
return true;
}
}
}
return false;
}
private boolean hasConflicts(String conflictCoordinate, IvyNodeElement parent) {
// the element itself will never be in conflict, since its coordinate is different
for (IvyNodeElement child : parent.getDependencies()) {
String indirectCoordinate = "/" + child.getOrganization() + "/" + child.getName();
if (conflictCoordinate.equals(indirectCoordinate)) {
Dependency dependency = directDependencies.get(conflictCoordinate);
if (isConflict(conflictCoordinate, dependency.directVersion, child.getRevision())) {
return true;
}
} else if (hasConflicts(conflictCoordinate, child)) {
return true;
}
}
return false;
}
private String xmlToString(File ivyXmlFile) {
StringWriter writer = new StringWriter();
try {
StreamSource inputSource = new StreamSource(new FileInputStream(ivyXmlFile.getPath()));
Transformer serializer = TransformerFactory.newInstance().newTransformer();
serializer.transform(inputSource, new StreamResult(writer));
} catch (TransformerException | IOException e) {
throw new BuildException("Exception reading " + ivyXmlFile.getPath() + ": " + e.toString(), e);
}
return writer.toString();
}
private void setupIvy() {
IvySettings ivySettings = new IvySettings();
try {
ivySettings.setVariable("common.build.dir", commonBuildDir.getAbsolutePath());
ivySettings.setVariable("ivy.exclude.types", "source|javadoc");
ivySettings.setVariable("ivy.resolution-cache.dir", ivyResolutionCacheDir.getAbsolutePath());
ivySettings.setVariable("ivy.lock-strategy", ivyLockStrategy);
ivySettings.setVariable("ivysettings.xml", getProject().getProperty("ivysettings.xml")); // nested settings file
ivySettings.setBaseDir(commonBuildDir);
ivySettings.setDefaultConflictManager(new NoConflictManager());
ivy = Ivy.newInstance(ivySettings);
ivy.configure(topLevelIvySettingsFile);
} catch (Exception e) {
throw new BuildException("Exception reading " + topLevelIvySettingsFile.getPath() + ": " + e.toString(), e);
}
}
/**
* Returns true if the "/org/name" coordinate keys in the given
* properties file are lexically sorted and are not duplicates.
*/
private boolean verifySortedCoordinatesPropertiesFile(File coordinatePropertiesFile) {
log("Checking for lexically sorted non-duplicated '/org/name' keys in: " + coordinatePropertiesFile, verboseLevel);
boolean success = true;
String line = null;
String currentKey = null;
String previousKey = null;
try (InputStream stream = new FileInputStream(coordinatePropertiesFile);
Reader reader = new InputStreamReader(stream, StandardCharsets.ISO_8859_1);
BufferedReader bufferedReader = new BufferedReader(reader)) {
while (null != (line = readLogicalPropertiesLine(bufferedReader))) {
final Matcher keyMatcher = COORDINATE_KEY_PATTERN.matcher(line);
if ( ! keyMatcher.lookingAt()) {
continue; // Ignore keys that don't look like "/org/name"
}
currentKey = keyMatcher.group(1);
if (null != previousKey) {
int comparison = currentKey.compareTo(previousKey);
if (0 == comparison) {
log("DUPLICATE coordinate key '" + currentKey + "' in " + coordinatePropertiesFile.getName(),
Project.MSG_ERR);
success = false;
} else if (comparison < 0) {
log("OUT-OF-ORDER coordinate key '" + currentKey + "' in " + coordinatePropertiesFile.getName(),
Project.MSG_ERR);
success = false;
}
}
previousKey = currentKey;
}
} catch (IOException e) {
throw new BuildException("Exception reading " + coordinatePropertiesFile.getPath() + ": " + e.toString(), e);
}
return success;
}
/**
* Builds up logical {@link java.util.Properties} lines, composed of one non-blank,
* non-comment initial line, either:
*
* 1. without a non-escaped trailing slash; or
* 2. with a non-escaped trailing slash, followed by
* zero or more lines with a non-escaped trailing slash, followed by
* one or more lines without a non-escaped trailing slash
*
* All leading non-escaped whitespace and trailing non-escaped whitespace +
* non-escaped slash are trimmed from each line before concatenating.
*
* After composing the logical line, escaped characters are un-escaped.
*
* null is returned if there are no lines left to read.
*/
private String readLogicalPropertiesLine(BufferedReader reader) throws IOException {
final StringBuilder logicalLine = new StringBuilder();
String line;
do {
line = reader.readLine();
if (null == line) {
return null;
}
} while (BLANK_OR_COMMENT_LINE_PATTERN.matcher(line).matches());
Matcher backslashMatcher = TRAILING_BACKSLASH_PATTERN.matcher(line);
// Check for a non-escaped backslash
if (backslashMatcher.find() && 1 == (backslashMatcher.group(1).length() % 2)) {
final Matcher firstLineMatcher = TRAILING_WHITESPACE_BACKSLASH_PATTERN.matcher(line);
if (firstLineMatcher.matches()) {
logicalLine.append(firstLineMatcher.group(1)); // trim trailing backslash and any preceding whitespace
}
line = reader.readLine();
while (null != line
&& (backslashMatcher = TRAILING_BACKSLASH_PATTERN.matcher(line)).find()
&& 1 == (backslashMatcher.group(1).length() % 2)) {
// Trim leading whitespace, the trailing backslash and any preceding whitespace
final Matcher goodStuffMatcher = WHITESPACE_GOODSTUFF_WHITESPACE_BACKSLASH_PATTERN.matcher(line);
if (goodStuffMatcher.matches()) {
logicalLine.append(goodStuffMatcher.group(1));
}
line = reader.readLine();
}
if (null != line) {
// line can't have a non-escaped trailing backslash
final Matcher leadingWhitespaceMatcher = LEADING_WHITESPACE_PATTERN.matcher(line);
if (leadingWhitespaceMatcher.matches()) {
line = leadingWhitespaceMatcher.group(1); // trim leading whitespace
}
logicalLine.append(line);
}
} else {
logicalLine.append(line);
}
// trim non-escaped leading whitespace
final Matcher leadingWhitespaceMatcher = LEADING_WHITESPACE_PATTERN.matcher(logicalLine);
final CharSequence leadingWhitespaceStripped = leadingWhitespaceMatcher.matches()
? leadingWhitespaceMatcher.group(1)
: logicalLine;
// unescape all chars in the logical line
StringBuilder output = new StringBuilder();
final int numChars = leadingWhitespaceStripped.length();
for (int pos = 0 ; pos < numChars - 1 ; ++pos) {
char ch = leadingWhitespaceStripped.charAt(pos);
if (ch == '\\') {
ch = leadingWhitespaceStripped.charAt(++pos);
}
output.append(ch);
}
if (numChars > 0) {
output.append(leadingWhitespaceStripped.charAt(numChars - 1));
}
return output.toString();
}
/**
* Check a single ivy.xml file for dependencies' versions in rev="${/org/name}"
* format. Returns false if problems are found, true otherwise.
*/
private boolean checkIvyXmlFile(File ivyXmlFile)
throws ParserConfigurationException, SAXException, IOException {
log("Scanning: " + ivyXmlFile.getPath(), verboseLevel);
SAXParser xmlReader = SAX_PARSER_FACTORY.newSAXParser();
DependencyRevChecker revChecker = new DependencyRevChecker(ivyXmlFile);
xmlReader.parse(new InputSource(ivyXmlFile.getAbsolutePath()), revChecker);
return ! revChecker.fail;
}
private class DependencyRevChecker extends DefaultHandler {
private final File ivyXmlFile;
private final Stack<String> tags = new Stack<>();
public boolean fail = false;
public DependencyRevChecker(File ivyXmlFile) {
this.ivyXmlFile = ivyXmlFile;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
if (localName.equals("dependency") && insideDependenciesTag()) {
String org = attributes.getValue("org");
boolean foundAllAttributes = true;
if (null == org) {
log("MISSING 'org' attribute on <dependency> in " + ivyXmlFile.getPath(), Project.MSG_ERR);
fail = true;
foundAllAttributes = false;
}
String name = attributes.getValue("name");
if (null == name) {
log("MISSING 'name' attribute on <dependency> in " + ivyXmlFile.getPath(), Project.MSG_ERR);
fail = true;
foundAllAttributes = false;
}
String rev = attributes.getValue("rev");
if (null == rev) {
log("MISSING 'rev' attribute on <dependency> in " + ivyXmlFile.getPath(), Project.MSG_ERR);
fail = true;
foundAllAttributes = false;
}
if (foundAllAttributes) {
String coordinateKey = "/" + org + '/' + name;
String expectedRev = "${" + coordinateKey + '}';
if ( ! rev.equals(expectedRev)) {
log("BAD <dependency> 'rev' attribute value '" + rev + "' - expected '" + expectedRev + "'"
+ " in " + ivyXmlFile.getPath(), Project.MSG_ERR);
fail = true;
}
if ( ! directDependencies.containsKey(coordinateKey)) {
log("MISSING key '" + coordinateKey + "' in " + centralizedVersionsFile.getPath(), Project.MSG_ERR);
fail = true;
}
}
}
tags.push(localName);
}
@Override
public void endElement (String uri, String localName, String qName) throws SAXException {
tags.pop();
}
private boolean insideDependenciesTag() {
return tags.size() == 2 && tags.get(0).equals("ivy-module") && tags.get(1).equals("dependencies");
}
}
}

View File

@ -1,352 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.validation;
import java.io.File;
import java.io.FileInputStream;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.Mapper;
import org.apache.tools.ant.types.Resource;
import org.apache.tools.ant.types.ResourceCollection;
import org.apache.tools.ant.types.resources.FileResource;
import org.apache.tools.ant.types.resources.Resources;
import org.apache.tools.ant.util.FileNameMapper;
/**
* An ANT task that verifies if JAR file have associated <code>LICENSE</code>,
* <code>NOTICE</code>, and <code>sha1</code> files.
*/
public class LicenseCheckTask extends Task {
public final static String CHECKSUM_TYPE = "sha1";
private static final int CHECKSUM_BUFFER_SIZE = 8 * 1024;
private static final int CHECKSUM_BYTE_MASK = 0xFF;
private static final String FAILURE_MESSAGE = "License check failed. Check the logs.\n"
+ "If you recently modified ivy-versions.properties or any module's ivy.xml,\n"
+ "make sure you run \"ant clean-jars jar-checksums\" before running precommit.";
private Pattern skipRegexChecksum;
private boolean skipSnapshotsChecksum;
private boolean skipChecksum;
/**
* All JAR files to check.
*/
private Resources jarResources = new Resources();
/**
* Directory containing licenses
*/
private File licenseDirectory;
/**
* License file mapper.
*/
private FileNameMapper licenseMapper;
/**
* A logging level associated with verbose logging.
*/
private int verboseLevel = Project.MSG_VERBOSE;
/**
* Failure flag.
*/
private boolean failures;
/**
* Adds a set of JAR resources to check.
*/
public void add(ResourceCollection rc) {
jarResources.add(rc);
}
/**
* Adds a license mapper.
*/
public void addConfiguredLicenseMapper(Mapper mapper) {
if (licenseMapper != null) {
throw new BuildException("Only one license mapper is allowed.");
}
this.licenseMapper = mapper.getImplementation();
}
public void setVerbose(boolean verbose) {
verboseLevel = (verbose ? Project.MSG_INFO : Project.MSG_VERBOSE);
}
public void setLicenseDirectory(File file) {
licenseDirectory = file;
}
public void setSkipSnapshotsChecksum(boolean skipSnapshotsChecksum) {
this.skipSnapshotsChecksum = skipSnapshotsChecksum;
}
public void setSkipChecksum(boolean skipChecksum) {
this.skipChecksum = skipChecksum;
}
public void setSkipRegexChecksum(String skipRegexChecksum) {
try {
if (skipRegexChecksum != null && skipRegexChecksum.length() > 0) {
this.skipRegexChecksum = Pattern.compile(skipRegexChecksum);
}
} catch (PatternSyntaxException e) {
throw new BuildException("Unable to compile skipRegexChecksum pattern. Reason: "
+ e.getMessage() + " " + skipRegexChecksum, e);
}
}
/**
* Execute the task.
*/
@Override
public void execute() throws BuildException {
if (licenseMapper == null) {
throw new BuildException("Expected an embedded <licenseMapper>.");
}
if (skipChecksum) {
log("Skipping checksum verification for dependencies", Project.MSG_INFO);
} else {
if (skipSnapshotsChecksum) {
log("Skipping checksum for SNAPSHOT dependencies", Project.MSG_INFO);
}
if (skipRegexChecksum != null) {
log("Skipping checksum for dependencies matching regex: " + skipRegexChecksum.pattern(),
Project.MSG_INFO);
}
}
jarResources.setProject(getProject());
processJars();
if (failures) {
throw new BuildException(FAILURE_MESSAGE);
}
}
/**
* Process all JARs.
*/
private void processJars() {
log("Starting scan.", verboseLevel);
long start = System.currentTimeMillis();
@SuppressWarnings("unchecked")
Iterator<Resource> iter = (Iterator<Resource>) jarResources.iterator();
int checked = 0;
int errors = 0;
while (iter.hasNext()) {
final Resource r = iter.next();
if (!r.isExists()) {
throw new BuildException("JAR resource does not exist: " + r.getName());
}
if (!(r instanceof FileResource)) {
throw new BuildException("Only filesystem resource are supported: " + r.getName()
+ ", was: " + r.getClass().getName());
}
File jarFile = ((FileResource) r).getFile();
if (! checkJarFile(jarFile) ) {
errors++;
}
checked++;
}
log(String.format(Locale.ROOT,
"Scanned %d JAR file(s) for licenses (in %.2fs.), %d error(s).",
checked, (System.currentTimeMillis() - start) / 1000.0, errors),
errors > 0 ? Project.MSG_ERR : Project.MSG_INFO);
}
/**
* Check a single JAR file.
*/
private boolean checkJarFile(File jarFile) {
log("Scanning: " + jarFile.getPath(), verboseLevel);
if (!skipChecksum) {
boolean skipDueToSnapshot = skipSnapshotsChecksum && jarFile.getName().contains("-SNAPSHOT");
if (!skipDueToSnapshot && !matchesRegexChecksum(jarFile, skipRegexChecksum)) {
// validate the jar matches against our expected hash
final File checksumFile = new File(licenseDirectory, jarFile.getName()
+ "." + CHECKSUM_TYPE);
if (!(checksumFile.exists() && checksumFile.canRead())) {
log("MISSING " + CHECKSUM_TYPE + " checksum file for: "
+ jarFile.getPath(), Project.MSG_ERR);
log("EXPECTED " + CHECKSUM_TYPE + " checksum file : "
+ checksumFile.getPath(), Project.MSG_ERR);
this.failures = true;
return false;
} else {
final String expectedChecksum = readChecksumFile(checksumFile);
try {
final MessageDigest md = MessageDigest.getInstance(CHECKSUM_TYPE);
byte[] buf = new byte[CHECKSUM_BUFFER_SIZE];
try {
FileInputStream fis = new FileInputStream(jarFile);
try {
DigestInputStream dis = new DigestInputStream(fis, md);
try {
while (dis.read(buf, 0, CHECKSUM_BUFFER_SIZE) != -1) {
// NOOP
}
} finally {
dis.close();
}
} finally {
fis.close();
}
} catch (IOException ioe) {
throw new BuildException("IO error computing checksum of file: "
+ jarFile, ioe);
}
final byte[] checksumBytes = md.digest();
final String checksum = createChecksumString(checksumBytes);
if (!checksum.equals(expectedChecksum)) {
log("CHECKSUM FAILED for " + jarFile.getPath() + " (expected: \""
+ expectedChecksum + "\" was: \"" + checksum + "\")",
Project.MSG_ERR);
this.failures = true;
return false;
}
} catch (NoSuchAlgorithmException ae) {
throw new BuildException("Digest type " + CHECKSUM_TYPE
+ " not supported by your JVM", ae);
}
}
} else if (skipDueToSnapshot) {
log("Skipping jar because it is a SNAPSHOT : "
+ jarFile.getAbsolutePath(), Project.MSG_INFO);
} else {
log("Skipping jar because it matches regex pattern: "
+ jarFile.getAbsolutePath() + " pattern: " + skipRegexChecksum.pattern(), Project.MSG_INFO);
}
}
// Get the expected license path base from the mapper and search for license files.
Map<File, LicenseType> foundLicenses = new LinkedHashMap<>();
List<File> expectedLocations = new ArrayList<>();
outer:
for (String mappedPath : licenseMapper.mapFileName(jarFile.getName())) {
for (LicenseType licenseType : LicenseType.values()) {
File licensePath = new File(licenseDirectory, mappedPath + licenseType.licenseFileSuffix());
if (licensePath.exists()) {
foundLicenses.put(licensePath, licenseType);
log(" FOUND " + licenseType.name() + " license at " + licensePath.getPath(),
verboseLevel);
// We could continue scanning here to detect duplicate associations?
break outer;
} else {
expectedLocations.add(licensePath);
}
}
}
// Check for NOTICE files.
for (Map.Entry<File, LicenseType> e : foundLicenses.entrySet()) {
LicenseType license = e.getValue();
String licensePath = e.getKey().getName();
String baseName = licensePath.substring(
0, licensePath.length() - license.licenseFileSuffix().length());
File noticeFile = new File(licenseDirectory, baseName + license.noticeFileSuffix());
if (noticeFile.exists()) {
log(" FOUND NOTICE file at " + noticeFile.getAbsolutePath(), verboseLevel);
} else {
if (license.isNoticeRequired()) {
this.failures = true;
log("MISSING NOTICE for the license file:\n "
+ licensePath + "\n Expected location below:\n "
+ noticeFile.getAbsolutePath(), Project.MSG_ERR);
}
}
}
// In case there is something missing, complain.
if (foundLicenses.isEmpty()) {
this.failures = true;
StringBuilder message = new StringBuilder();
message.append("MISSING LICENSE for the following file:\n ").append(jarFile.getAbsolutePath()).append("\n Expected locations below:\n");
for (File location : expectedLocations) {
message.append(" => ").append(location.getAbsolutePath()).append("\n");
}
log(message.toString(), Project.MSG_ERR);
return false;
}
return true;
}
private static final String createChecksumString(byte[] digest) {
StringBuilder checksum = new StringBuilder();
for (int i = 0; i < digest.length; i++) {
checksum.append(String.format(Locale.ROOT, "%02x",
CHECKSUM_BYTE_MASK & digest[i]));
}
return checksum.toString();
}
private static final String readChecksumFile(File f) {
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader
(new FileInputStream(f), StandardCharsets.UTF_8));
try {
String checksum = reader.readLine();
if (null == checksum || 0 == checksum.length()) {
throw new BuildException("Failed to find checksum in file: " + f);
}
return checksum;
} finally {
reader.close();
}
} catch (IOException e) {
throw new BuildException("IO error reading checksum file: " + f, e);
}
}
private static final boolean matchesRegexChecksum(File jarFile, Pattern skipRegexChecksum) {
if (skipRegexChecksum == null) {
return false;
}
Matcher m = skipRegexChecksum.matcher(jarFile.getName());
return m.matches();
}
}

View File

@ -1,75 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.validation;
/**
* A list of accepted licenses. See also http://www.apache.org/legal/3party.html
*
**/
public enum LicenseType {
ASL("Apache Software License 2.0", true),
BSD("Berkeley Software Distribution", true),
BSD_LIKE("BSD like license", true),//BSD like just means someone has taken the BSD license and put in their name, copyright, or it's a very similar license.
CDDL("Common Development and Distribution License", false),
CPL("Common Public License", true),
EPL("Eclipse Public License Version 1.0", false),
MIT("Massachusetts Institute of Tech. License", false),
MPL("Mozilla Public License", false), //NOT SURE on the required notice
PD("Public Domain", false),
//SUNBCLA("Sun Binary Code License Agreement"),
SUN("Sun Open Source License", false),
COMPOUND("Compound license (see NOTICE).", true),
FAKE("FAKE license - not needed", false);
private String display;
private boolean noticeRequired;
LicenseType(String display, boolean noticeRequired) {
this.display = display;
this.noticeRequired = noticeRequired;
}
public boolean isNoticeRequired() {
return noticeRequired;
}
public String getDisplay() {
return display;
}
public String toString() {
return "LicenseType{" +
"display='" + display + '\'' +
'}';
}
/**
* Expected license file suffix for a given license type.
*/
public String licenseFileSuffix() {
return "-LICENSE-" + this.name() + ".txt";
}
/**
* Expected notice file suffix for a given license type.
*/
public String noticeFileSuffix() {
return "-NOTICE.txt";
}
}

View File

@ -1,547 +0,0 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project name="common-solr" default="default" xmlns:rsel="antlib:org.apache.tools.ant.types.resources.selectors">
<description>
This file is designed for importing into a main build file, and not intended
for standalone use.
</description>
<dirname file="${ant.file.common-solr}" property="common-solr.dir"/>
<property name="Name" value="Solr" />
<!-- solr uses Java 11 -->
<property name="javac.release" value="11"/>
<property name="javac.args" value="-Xlint:-deprecation"/>
<property name="javac.profile.args" value=""/>
<property name="dest" location="${common-solr.dir}/build" />
<property name="build.dir" location="${dest}/${ant.project.name}"/>
<property name="jacoco.report.dir" location="${dest}/jacoco"/>
<property name="dist" location="${common-solr.dir}/dist"/>
<property name="package.dir" location="${common-solr.dir}/package"/>
<property name="maven.dist.dir" location="${package.dir}/maven"/>
<property name="lucene-libs" location="${dest}/lucene-libs" />
<property name="tests.userdir" location="src/test-files"/>
<property name="tests.policy" location="${common-solr.dir}/server/etc/security.policy"/>
<property name="server.dir" location="${common-solr.dir}/server" />
<property name="example" location="${common-solr.dir}/example" />
<property name="javadoc.dir" location="${dest}/docs"/>
<property name="javadoc-online.dir" location="${dest}/docs-online"/>
<property name="tests.cleanthreads.sysprop" value="perClass"/>
<property name="changes.target.dir" location="${dest}/docs/changes"/>
<property name="license.dir" location="${common-solr.dir}/licenses"/>
<property name="solr.tgz.unpack.dir" location="${common-solr.dir}/build/solr.tgz.unpacked"/>
<property name="dist.jar.dir.prefix" value="${solr.tgz.unpack.dir}/solr"/>
<property name="dist.jar.dir.suffix" value="dist"/>
<import file="${common-solr.dir}/../lucene/module-build.xml"/>
<property name="solr.tgz.file" location="${common-solr.dir}/package/solr-${version}.tgz"/>
<available file="${solr.tgz.file}" property="solr.tgz.exists"/>
<available type="dir" file="${solr.tgz.unpack.dir}" property="solr.tgz.unpack.dir.exists"/>
<target name="-ensure-solr-tgz-exists" unless="solr.tgz.exists">
<ant dir="${common-solr.dir}" target="create-package" inheritall="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
</target>
<target name="-unpack-solr-tgz" unless="${solr.tgz.unpack.dir.exists}">
<antcall target="-ensure-solr-tgz-exists">
<propertyset refid="uptodate.and.compiled.properties"/>
</antcall>
<mkdir dir="${solr.tgz.unpack.dir}"/>
<untar compression="gzip" src="${solr.tgz.file}" dest="${solr.tgz.unpack.dir}">
<patternset refid="patternset.lucene.solr.jars"/>
</untar>
</target>
<!-- backwards compatibility with existing targets/tasks; TODO: remove this! -->
<property name="fullnamever" value="${final.name}"/>
<path id="additional.dependencies">
<fileset dir="${common-solr.dir}/core/lib" excludes="${common.classpath.excludes}"/>
<fileset dir="${common-solr.dir}/solrj/lib" excludes="${common.classpath.excludes}"/>
<fileset dir="${common-solr.dir}/server/lib" excludes="${common.classpath.excludes}"/>
<fileset dir="lib" excludes="${common.classpath.excludes}" erroronmissingdir="false"/>
</path>
<path id="solr.lucene.libs">
<!-- List of jars that will be used as the foundation for both
the base classpath, as well as copied into the lucene-libs dir
in the release.
-->
<!-- NOTE: lucene-core is explicitly not included because of the
base.classpath (compilation & tests are done directly against
the class files w/o needing to build the jar)
-->
<pathelement location="${analyzers-common.jar}"/>
<pathelement location="${analyzers-kuromoji.jar}"/>
<pathelement location="${analyzers-nori.jar}"/>
<pathelement location="${analyzers-phonetic.jar}"/>
<pathelement location="${codecs.jar}"/>
<pathelement location="${backward-codecs.jar}"/>
<pathelement location="${highlighter.jar}"/>
<pathelement location="${memory.jar}"/>
<pathelement location="${misc.jar}"/>
<pathelement location="${spatial-extras.jar}"/>
<pathelement location="${spatial3d.jar}"/>
<pathelement location="${expressions.jar}"/>
<pathelement location="${suggest.jar}"/>
<pathelement location="${grouping.jar}"/>
<pathelement location="${queries.jar}"/>
<pathelement location="${queryparser.jar}"/>
<pathelement location="${join.jar}"/>
<pathelement location="${sandbox.jar}"/>
<pathelement location="${classification.jar}"/>
</path>
<path id="solr.base.classpath">
<pathelement location="${common-solr.dir}/build/solr-solrj/classes/java"/>
<pathelement location="${common-solr.dir}/build/solr-core/classes/java"/>
<path refid="solr.lucene.libs" />
<path refid="additional.dependencies"/>
<path refid="base.classpath"/>
</path>
<path id="classpath" refid="solr.base.classpath"/>
<path id="solr.test.base.classpath">
<pathelement path="${common-solr.dir}/build/solr-test-framework/classes/java"/>
<fileset dir="${common-solr.dir}/test-framework/lib">
<include name="*.jar"/>
<exclude name="junit-*.jar" />
<exclude name="randomizedtesting-runner-*.jar" />
<exclude name="ant*.jar" />
</fileset>
<pathelement path="src/test-files"/>
<path refid="test.base.classpath"/>
</path>
<path id="test.classpath" refid="solr.test.base.classpath"/>
<macrodef name="solr-contrib-uptodate">
<attribute name="name"/>
<attribute name="property" default="@{name}.uptodate"/>
<attribute name="classpath.property" default="@{name}.jar"/>
<!-- set jarfile only, if the target jar file has no generic name -->
<attribute name="jarfile" default="${common-solr.dir}/build/contrib/solr-@{name}/solr-@{name}-${version}.jar"/>
<sequential>
<!--<echo message="Checking '@{jarfile}' against source folder '${common.dir}/contrib/@{name}/src/java'"/>-->
<property name="@{classpath.property}" location="@{jarfile}"/>
<uptodate property="@{property}" targetfile="@{jarfile}">
<srcfiles dir="${common-solr.dir}/contrib/@{name}/src/java" includes="**/*.java"/>
</uptodate>
</sequential>
</macrodef>
<target name="validate" depends="compile-tools">
</target>
<target name="init-dist" depends="resolve-groovy">
<mkdir dir="${build.dir}"/>
<mkdir dir="${package.dir}"/>
<mkdir dir="${dist}"/>
<mkdir dir="${maven.dist.dir}"/>
</target>
<target name="prep-lucene-jars"
depends="resolve-groovy,
jar-lucene-core, jar-backward-codecs, jar-analyzers-phonetic, jar-analyzers-kuromoji, jar-analyzers-nori, jar-codecs,jar-expressions, jar-suggest, jar-highlighter, jar-memory,
jar-misc, jar-spatial-extras, jar-spatial3d, jar-grouping, jar-queries, jar-queryparser, jar-join, jar-sandbox, jar-classification">
<property name="solr.deps.compiled" value="true"/>
</target>
<target name="lucene-jars-to-solr"
depends="-lucene-jars-to-solr-not-for-package,-lucene-jars-to-solr-package"/>
<target name="-lucene-jars-to-solr-not-for-package" unless="called.from.create-package">
<sequential>
<antcall target="prep-lucene-jars" inheritall="true"/>
<property name="solr.deps.compiled" value="true"/>
<copy todir="${lucene-libs}" preservelastmodified="true" flatten="true" failonerror="true" overwrite="true">
<path refid="solr.lucene.libs" />
<!-- NOTE: lucene-core is not already included in "solr.lucene.libs" because of its use in classpaths. -->
<fileset file="${lucene-core.jar}" />
</copy>
</sequential>
</target>
<target name="-lucene-jars-to-solr-package" if="called.from.create-package">
<sequential>
<antcall target="-unpack-lucene-tgz" inheritall="true"/>
<pathconvert property="relative.solr.lucene.libs" pathsep=",">
<path refid="solr.lucene.libs"/>
<fileset file="${lucene-core.jar}"/>
<globmapper from="${common.build.dir}/*" to="*" handledirsep="true"/>
</pathconvert>
<mkdir dir="${lucene-libs}"/>
<copy todir="${lucene-libs}" preservelastmodified="true" flatten="true" failonerror="true" overwrite="true">
<fileset dir="${lucene.tgz.unpack.dir}/lucene-${version}" includes="${relative.solr.lucene.libs}"/>
</copy>
</sequential>
</target>
<!-- Shared core/solrj/test-framework/contrib targets -->
<macrodef name="solr-jarify" description="Builds a Solr JAR file">
<attribute name="basedir" default="${build.dir}/classes/java"/>
<attribute name="destfile" default="${build.dir}/${final.name}.jar"/>
<attribute name="title" default="Apache Solr Search Server: ${ant.project.name}"/>
<attribute name="excludes" default="**/pom.xml,**/*.iml"/>
<attribute name="metainf.source.dir" default="${common-solr.dir}"/>
<attribute name="implementation.title" default="org.apache.solr"/>
<attribute name="manifest.file" default="${manifest.file}"/>
<element name="solr-jarify-filesets" optional="true"/>
<element name="solr-jarify-additional-manifest-attributes" optional="true"/>
<sequential>
<jarify basedir="@{basedir}" destfile="@{destfile}"
title="@{title}" excludes="@{excludes}"
metainf.source.dir="@{metainf.source.dir}"
implementation.title="@{implementation.title}"
manifest.file="@{manifest.file}">
<filesets>
<solr-jarify-filesets />
</filesets>
<jarify-additional-manifest-attributes>
<solr-jarify-additional-manifest-attributes />
</jarify-additional-manifest-attributes>
</jarify>
</sequential>
</macrodef>
<target name="jar-core" depends="compile-core">
<solr-jarify/>
</target>
<target name="compile-core" depends="prep-lucene-jars,resolve-example,resolve-server,common.compile-core"/>
<target name="compile-test" depends="compile-solr-test-framework,common.compile-test"/>
<target name="dist" depends="jar-core">
<copy file="${build.dir}/${fullnamever}.jar" todir="${dist}"/>
</target>
<property name="lucenedocs" location="${common.dir}/build/docs"/>
<!-- dependency to ensure all lucene javadocs are present -->
<target name="lucene-javadocs" depends="javadocs-lucene-core,javadocs-analyzers-common,javadocs-analyzers-icu,javadocs-analyzers-kuromoji,javadocs-analyzers-nori,javadocs-analyzers-phonetic,javadocs-analyzers-smartcn,javadocs-analyzers-morfologik,javadocs-analyzers-stempel,javadocs-backward-codecs,javadocs-codecs,javadocs-expressions,javadocs-suggest,javadocs-grouping,javadocs-queries,javadocs-queryparser,javadocs-highlighter,javadocs-memory,javadocs-misc,javadocs-spatial-extras,javadocs-join,javadocs-test-framework"/>
<!-- create javadocs for the current module -->
<target name="javadocs" depends="compile-core,define-lucene-javadoc-url,lucene-javadocs,javadocs-solr-core,check-javadocs-uptodate" unless="javadocs-uptodate-${name}">
<sequential>
<mkdir dir="${javadoc.dir}/${name}"/>
<solr-invoke-javadoc>
<solrsources>
<packageset dir="${src.dir}"/>
</solrsources>
<links>
<link href="../solr-solrj"/>
<link href="../solr-core"/>
</links>
</solr-invoke-javadoc>
<solr-jarify basedir="${javadoc.dir}/${name}" destfile="${build.dir}/${final.name}-javadoc.jar"/>
</sequential>
</target>
<target name="check-solr-core-javadocs-uptodate" unless="solr-core-javadocs.uptodate">
<uptodate property="solr-core-javadocs.uptodate" targetfile="${build.dir}/solr-core/solr-core-${version}-javadoc.jar">
<srcfiles dir="${common-solr.dir}/core/src/java" includes="**/*.java"/>
</uptodate>
</target>
<target name="check-solrj-javadocs-uptodate" unless="solrj-javadocs.uptodate">
<uptodate property="solrj-javadocs.uptodate" targetfile="${build.dir}/solr-solrj/solr-solrj-${version}-javadoc.jar">
<srcfiles dir="${common-solr.dir}/solrj/src/java" includes="**/*.java"/>
</uptodate>
</target>
<target name="javadocs-solr-core" depends="check-solr-core-javadocs-uptodate" unless="solr-core-javadocs.uptodate">
<ant dir="${common-solr.dir}/core" target="javadocs" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
<property name="solr-core-javadocs.uptodate" value="true"/>
</target>
<target name="javadocs-solrj" depends="check-solrj-javadocs-uptodate" unless="solrj-javadocs.uptodate">
<ant dir="${common-solr.dir}/solrj" target="javadocs" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
<property name="solrj-javadocs.uptodate" value="true"/>
</target>
<!-- macro to create solr javadocs with links to lucene. make sure calling task depends on lucene-javadocs -->
<macrodef name="solr-invoke-javadoc">
<element name="solrsources" optional="yes"/>
<element name="links" optional="yes"/>
<attribute name="destdir" default="${javadoc.dir}/${name}"/>
<attribute name="title" default="${Name} ${version} ${name} API"/>
<attribute name="overview" default="${src.dir}/overview.html"/>
<sequential>
<mkdir dir="@{destdir}"/>
<invoke-javadoc destdir="@{destdir}" title="@{title}" overview="@{overview}">
<sources>
<solrsources/>
<link offline="true" href="${lucene.javadoc.url}core" packagelistloc="${lucenedocs}/core"/>
<link offline="true" href="${lucene.javadoc.url}analyzers-common" packagelistloc="${lucenedocs}/analyzers-common"/>
<link offline="true" href="${lucene.javadoc.url}analyzers-icu" packagelistloc="${lucenedocs}/analyzers-icu"/>
<link offline="true" href="${lucene.javadoc.url}analyzers-kuromoji" packagelistloc="${lucenedocs}/analyzers-kuromoji"/>
<link offline="true" href="${lucene.javadoc.url}analyzers-nori" packagelistloc="${lucenedocs}/analyzers-nori"/>
<link offline="true" href="${lucene.javadoc.url}analyzers-morfologik" packagelistloc="${lucenedocs}/analyzers-morfologik"/>
<link offline="true" href="${lucene.javadoc.url}analyzers-phonetic" packagelistloc="${lucenedocs}/analyzers-phonetic"/>
<link offline="true" href="${lucene.javadoc.url}analyzers-smartcn" packagelistloc="${lucenedocs}/analyzers-smartcn"/>
<link offline="true" href="${lucene.javadoc.url}analyzers-stempel" packagelistloc="${lucenedocs}/analyzers-stempel"/>
<link offline="true" href="${lucene.javadoc.url}backward-codecs" packagelistloc="${lucenedocs}/backward-codecs"/>
<link offline="true" href="${lucene.javadoc.url}codecs" packagelistloc="${lucenedocs}/codecs"/>
<link offline="true" href="${lucene.javadoc.url}expressions" packagelistloc="${lucenedocs}/expressions"/>
<link offline="true" href="${lucene.javadoc.url}suggest" packagelistloc="${lucenedocs}/suggest"/>
<link offline="true" href="${lucene.javadoc.url}grouping" packagelistloc="${lucenedocs}/grouping"/>
<link offline="true" href="${lucene.javadoc.url}join" packagelistloc="${lucenedocs}/join"/>
<link offline="true" href="${lucene.javadoc.url}queries" packagelistloc="${lucenedocs}/queries"/>
<link offline="true" href="${lucene.javadoc.url}queryparser" packagelistloc="${lucenedocs}/queryparser"/>
<link offline="true" href="${lucene.javadoc.url}highlighter" packagelistloc="${lucenedocs}/highlighter"/>
<link offline="true" href="${lucene.javadoc.url}memory" packagelistloc="${lucenedocs}/memory"/>
<link offline="true" href="${lucene.javadoc.url}misc" packagelistloc="${lucenedocs}/misc"/>
<link offline="true" href="${lucene.javadoc.url}classification" packagelistloc="${lucenedocs}/classification"/>
<link offline="true" href="${lucene.javadoc.url}spatial-extras" packagelistloc="${lucenedocs}/spatial-extras"/>
<links/>
<link href=""/>
</sources>
</invoke-javadoc>
</sequential>
</macrodef>
<target name="define-lucene-javadoc-url" depends="resolve-groovy" unless="lucene.javadoc.url">
<property name="useLocalJavadocUrl" value=""/>
<groovy><![CDATA[
String url, version = properties['version'];
String useLocalJavadocUrl = properties['useLocalJavadocUrl'];
if (version != properties['version.base'] || Boolean.parseBoolean(useLocalJavadocUrl)) {
url = new File(properties['common.dir'], 'build' + File.separator + 'docs').toURI().toASCIIString();
if (!(url =~ /\/$/)) url += '/';
} else {
version = version.replace('.', '_');
url = 'https://lucene.apache.org/core/' + version + '/';
}
task.log('Using the following URL to refer to Lucene Javadocs: ' + url);
properties['lucene.javadoc.url'] = url;
]]></groovy>
</target>
<target name="define-solr-javadoc-url" depends="resolve-groovy" unless="solr.javadoc.url">
<groovy><![CDATA[
String url, version = properties['version'];
if (version != properties['version.base']) {
url = '';
task.log('Disabled Solr Javadocs online URL for packaging (custom build / SNAPSHOT version).');
} else {
version = version.replace('.', '_');
url = 'https://lucene.apache.org/solr/' + version + '/';
task.log('Using the following URL to refer to Solr Javadocs: ' + url);
}
properties['solr.javadoc.url'] = url;
]]></groovy>
</target>
<target name="jar-src">
<sequential>
<mkdir dir="${build.dir}"/>
<solr-jarify basedir="${src.dir}" destfile="${build.dir}/${final.name}-src.jar">
<solr-jarify-filesets>
<fileset dir="${resources.dir}" erroronmissingdir="no"/>
</solr-jarify-filesets>
</solr-jarify>
</sequential>
</target>
<target name="-validate-maven-dependencies" depends="-validate-maven-dependencies.init">
<m2-validate-dependencies pom.xml="${maven.pom.xml}" licenseDirectory="${license.dir}">
<additional-filters>
<replaceregex pattern="jetty([^/]+)$" replace="jetty" flags="gi" />
<replaceregex pattern="slf4j-([^/]+)$" replace="slf4j" flags="gi" />
<replaceregex pattern="(bcmail|bcprov)-([^/]+)$" replace="\1" flags="gi" />
</additional-filters>
<excludes>
<rsel:or>
<rsel:name name="**/lucene-*-${maven.version.glob}.jar" handledirsep="true"/>
<rsel:name name="**/solr-*-${maven.version.glob}.jar" handledirsep="true"/>
<!-- TODO: figure out what is going on here with servlet-apis -->
<rsel:name name="**/*servlet*.jar" handledirsep="true"/>
</rsel:or>
</excludes>
</m2-validate-dependencies>
</target>
<!-- Solr core targets -->
<target name="compile-solr-core" description="Compile Solr core." unless="solr.core.compiled">
<ant dir="${common-solr.dir}/core" target="compile-core" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
<property name="solr.core.compiled" value="true"/>
</target>
<target name="compile-test-solr-core" description="Compile solr core tests">
<ant dir="${common-solr.dir}/core" target="compile-test" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
<property name="solr.core.compiled" value="true"/>
</target>
<target name="dist-core" depends="init-dist"
description="Creates the Solr JAR Distribution file.">
<ant dir="${common-solr.dir}/core" target="dist" inheritall="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
</target>
<!-- Solrj targets -->
<target name="compile-solrj" description="Compile the java client." unless="solrj.compiled">
<ant dir="${common-solr.dir}/solrj" target="compile-core" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
<property name="solrj.compiled" value="true"/>
</target>
<target name="compile-test-solrj" description="Compile java client tests">
<ant dir="${common-solr.dir}/solrj" target="compile-test" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
<property name="solrj.compiled" value="true"/>
</target>
<target name="dist-solrj" depends="init-dist"
description="Creates the Solr-J JAR Distribution file.">
<ant dir="${common-solr.dir}/solrj" target="dist" inheritall="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
</target>
<target name="jar-solrj" description="Jar Solr-J">
<ant dir="${common-solr.dir}/solrj" target="jar-core" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
</target>
<!-- Solr test-framework targets -->
<target name="compile-solr-test-framework" description="Compile the Solr test-framework" unless="solr.test.framework.compiled">
<ant dir="${common-solr.dir}/test-framework" target="compile-core" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
<property name="solr.core.compiled" value="true"/>
<property name="solr.test.framework.compiled" value="true"/>
</target>
<target name="jar-solr-test-framework" depends="compile-solr-test-framework">
<ant dir="${common-solr.dir}/test-framework" target="jar-core" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
</target>
<!-- resolve dependencies in the example (relied upon by compile/tests) -->
<target name="resolve-example" unless="example.libs.uptodate">
<property name="example.libs.uptodate" value="true"/>
</target>
<!-- resolve dependencies in the server directory (relied upon by compile/tests) -->
<target name="resolve-server" unless="server.libs.uptodate">
<ant dir="${common-solr.dir}/server" target="resolve" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
<property name="server.libs.uptodate" value="true"/>
</target>
<macrodef name="contrib-crawl">
<attribute name="target" default=""/>
<attribute name="failonerror" default="true"/>
<sequential>
<subant target="@{target}" failonerror="@{failonerror}" inheritall="false">
<propertyset refid="uptodate.and.compiled.properties"/>
<fileset dir="." includes="contrib/*/build.xml"/>
</subant>
</sequential>
</macrodef>
<target name="-compile-test-lucene-analysis">
<ant dir="${common.dir}/analysis" target="compile-test" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
</target>
<target name="-compile-test-lucene-queryparser">
<ant dir="${common.dir}/queryparser" target="compile-test" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
</target>
<target name="-compile-test-lucene-backward-codecs">
<ant dir="${common.dir}/backward-codecs" target="compile-test" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
</target>
<!-- Solr contrib targets -->
<target name="-compile-analysis-extras">
<ant dir="${common-solr.dir}/contrib/analysis-extras" target="compile" inheritAll="false">
<propertyset refid="uptodate.and.compiled.properties"/>
</ant>
</target>
<target name="compile-contrib" description="Compile contrib modules">
<contrib-crawl target="compile-core"/>
</target>
<target name="compile-test-contrib" description="Compile contrib modules' tests">
<contrib-crawl target="compile-test"/>
</target>
<target name="javadocs-contrib" description="Compile contrib modules">
<contrib-crawl target="javadocs"/>
</target>
<target name="jar-contrib" description="Jar contrib modules">
<contrib-crawl target="jar-core"/>
</target>
<target name="contribs-add-to-webapp">
<mkdir dir="${dest}/web"/>
<delete dir="${dest}/web" includes="**/*" failonerror="false"/>
<contrib-crawl target="add-to-webapp"/>
</target>
<!-- Forbidden API Task, customizations for Solr -->
<target name="-check-forbidden-all" depends="-init-forbidden-apis,compile-core,compile-test">
<property prefix="ivyversions" file="${common.dir}/ivy-versions.properties"/><!-- for commons-io version -->
<forbidden-apis suppressAnnotation="**.SuppressForbidden" classpathref="forbidden-apis.allclasses.classpath" targetVersion="${javac.release}">
<signatures>
<bundled name="jdk-unsafe"/>
<bundled name="jdk-deprecated"/>
<bundled name="jdk-non-portable"/>
<bundled name="jdk-reflection"/>
<bundled name="commons-io-unsafe-${ivyversions./commons-io/commons-io}"/>
<fileset dir="${common.dir}/tools/forbiddenApis">
<include name="base.txt" />
<include name="servlet-api.txt" />
<include name="solr.txt" />
</fileset>
</signatures>
<fileset dir="${build.dir}/classes/java" excludes="${forbidden-base-excludes}"/>
<fileset dir="${build.dir}/classes/test" excludes="${forbidden-tests-excludes}" erroronmissingdir="false"/>
</forbidden-apis>
</target>
<!-- hack for now to disable *all* Solr tests on Jenkins when "tests.disable-solr" property is set -->
<target name="test" unless="tests.disable-solr">
<antcall target="common.test" inheritrefs="true" inheritall="true"/>
</target>
</project>

View File

@ -37,11 +37,3 @@ dependencies {
testImplementation project(':solr:test-framework')
}
// Add two folders to default packaging.
assemblePackaging {
from(projectDir, {
include "bin/**"
include "conf/**"
})
}