mirror of https://github.com/apache/lucene.git
SOLR-14759: a few initial changes so that Lucene can be built independently while Solr code is still in place. (#2448)
This commit is contained in:
parent
408b3775dd
commit
409bc37c13
63
build.gradle
63
build.gradle
|
@ -54,6 +54,9 @@ allprojects {
|
|||
}
|
||||
|
||||
ext {
|
||||
skipSolr = (System.getProperty("skip.solr") as Boolean)
|
||||
skipLucene = (System.getProperty("skip.lucene") as Boolean)
|
||||
|
||||
// "base" version is stripped of the qualifier. Compute it.
|
||||
baseVersion = {
|
||||
def m = (version =~ /^(\d+\.\d+\.\d+)(-(.+))?/)
|
||||
|
@ -112,6 +115,9 @@ apply from: file('gradle/generate-defaults.gradle')
|
|||
// Ant-compatibility layer: apply folder layout early so that
|
||||
// evaluation of other scripts doesn't need to be deferred.
|
||||
apply from: file('gradle/ant-compat/folder-layout.gradle')
|
||||
if (!skipSolr) {
|
||||
apply from: file('gradle/ant-compat/solr.folder-layout.gradle') // SOLR ONLY
|
||||
}
|
||||
|
||||
// Set up defaults and configure aspects for certain modules or functionality
|
||||
// (java, tests)
|
||||
|
@ -139,23 +145,31 @@ apply from: file('gradle/validation/jar-checks.gradle')
|
|||
apply from: file('gradle/validation/git-status.gradle')
|
||||
apply from: file('gradle/validation/versions-props-sorted.gradle')
|
||||
apply from: file('gradle/validation/validate-source-patterns.gradle')
|
||||
apply from: file('gradle/validation/config-file-sanity.gradle')
|
||||
apply from: file('gradle/validation/rat-sources.gradle')
|
||||
apply from: file('gradle/validation/owasp-dependency-check.gradle')
|
||||
apply from: file('gradle/validation/ecj-lint.gradle')
|
||||
apply from: file('gradle/validation/gradlew-scripts-tweaked.gradle')
|
||||
apply from: file('gradle/validation/validate-log-calls.gradle')
|
||||
apply from: file('gradle/validation/check-broken-links.gradle')
|
||||
apply from: file('gradle/validation/spotless.gradle')
|
||||
|
||||
if (!skipLucene) {
|
||||
apply from: file('gradle/validation/spotless.gradle')
|
||||
}
|
||||
|
||||
if (!skipSolr) {
|
||||
apply from: file('gradle/validation/solr.config-file-sanity.gradle') // SOLR ONLY
|
||||
}
|
||||
|
||||
// Source or data regeneration tasks
|
||||
apply from: file('gradle/generation/jflex.gradle')
|
||||
if (!skipLucene) {
|
||||
apply from: file('gradle/generation/jflex.gradle')
|
||||
apply from: file('gradle/generation/util.gradle')
|
||||
apply from: file('gradle/generation/snowball.gradle')
|
||||
apply from: file('gradle/generation/kuromoji.gradle')
|
||||
apply from: file('gradle/generation/nori.gradle')
|
||||
apply from: file('gradle/generation/icu.gradle')
|
||||
}
|
||||
apply from: file('gradle/generation/javacc.gradle')
|
||||
apply from: file('gradle/generation/util.gradle')
|
||||
apply from: file('gradle/generation/snowball.gradle')
|
||||
apply from: file('gradle/generation/kuromoji.gradle')
|
||||
apply from: file('gradle/generation/nori.gradle')
|
||||
apply from: file('gradle/generation/icu.gradle')
|
||||
|
||||
// Shared configuration of subprojects containing native code.
|
||||
apply from: file('gradle/native/disable-native.gradle')
|
||||
|
@ -172,11 +186,17 @@ apply from: file('gradle/help.gradle')
|
|||
// Ant-compatibility layer. ALL OF THESE SHOULD BE GONE at some point. They are
|
||||
// here so that we can coexist with current ant build but they are indicative
|
||||
// of potential problems with the build conventions, dependencies, etc.
|
||||
apply from: file('gradle/ant-compat/force-versions.gradle')
|
||||
apply from: file('gradle/ant-compat/misc.gradle')
|
||||
apply from: file('gradle/ant-compat/post-jar.gradle')
|
||||
apply from: file('gradle/ant-compat/test-classes-cross-deps.gradle')
|
||||
apply from: file('gradle/ant-compat/artifact-naming.gradle')
|
||||
|
||||
if (!skipLucene) {
|
||||
apply from: file('gradle/ant-compat/test-classes-cross-deps.gradle')
|
||||
}
|
||||
|
||||
if (!skipSolr) {
|
||||
apply from: file('gradle/ant-compat/force-versions.gradle') // SOLR ONLY
|
||||
apply from: file('gradle/ant-compat/solr.test-classes-cross-deps.gradle') // SOLR ONLY
|
||||
apply from: file('gradle/ant-compat/artifact-naming.gradle') // SOLR ONLY
|
||||
}
|
||||
|
||||
apply from: file('gradle/documentation/documentation.gradle')
|
||||
apply from: file('gradle/documentation/changes-to-html.gradle')
|
||||
|
@ -184,9 +204,22 @@ apply from: file('gradle/documentation/markdown.gradle')
|
|||
apply from: file('gradle/documentation/render-javadoc.gradle')
|
||||
|
||||
apply from: file('gradle/hacks/gradle-archives.gradle')
|
||||
apply from: file('gradle/hacks/findbugs.gradle')
|
||||
if (!skipSolr) {
|
||||
apply from: file('gradle/hacks/solr.findbugs.gradle') // SOLR ONLY
|
||||
}
|
||||
apply from: file('gradle/hacks/gradle.gradle')
|
||||
apply from: file('gradle/hacks/hashmapAssertions.gradle')
|
||||
|
||||
apply from: file('gradle/solr/packaging.gradle')
|
||||
apply from: file('gradle/solr/solr-forbidden-apis.gradle')
|
||||
if (skipLucene) {
|
||||
apply from: file('gradle/solr-tlp-migration/inaccessible-test-sources.gradle')
|
||||
}
|
||||
apply from: file('gradle/solr-tlp-migration/temp-disable-constraint-checking.gradle')
|
||||
|
||||
if (!skipSolr) {
|
||||
apply from: file('gradle/solr/packaging.gradle') // SOLR ONLY
|
||||
apply from: file('gradle/solr/solr-forbidden-apis.gradle') // SOLR ONLY
|
||||
}
|
||||
|
||||
if (!skipSolr) {
|
||||
apply from: file('gradle/ant-compat/solr.post-jar.gradle') // SOLR ONLY
|
||||
}
|
||||
|
|
|
@ -194,6 +194,9 @@ def checkAll(dirName):
|
|||
elif link == 'http://lucene.apache.org/core/':
|
||||
# OK
|
||||
pass
|
||||
elif re.match("^https?://lucene.apache.org/core/[^/]+/index.html", link):
|
||||
# OK
|
||||
pass
|
||||
elif link == 'http://lucene.apache.org/solr/':
|
||||
# OK
|
||||
pass
|
||||
|
|
|
@ -35,13 +35,6 @@ allprojects {
|
|||
}
|
||||
}
|
||||
|
||||
// Adapt to custom 'web' folder location.
|
||||
configure(project(":solr:webapp")) {
|
||||
plugins.withType(WarPlugin) {
|
||||
webAppDirName = "web"
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
// if 'src/tools' exists, add it as a separate sourceSet.
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
// Force versions of certain components to align them with ant build.
|
||||
|
||||
subprojects {
|
||||
project(":solr").subprojects {
|
||||
plugins.withType(JavaPlugin) {
|
||||
dependencies {
|
||||
implementation enforcedPlatform('org.slf4j:slf4j-api:1.7.24')
|
||||
|
|
|
@ -23,25 +23,25 @@ allprojects {
|
|||
}
|
||||
}
|
||||
|
||||
// Exclude test classes that are not actually stand-alone tests (they're executed from other stuff).
|
||||
configure(project(":lucene:replicator")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
test {
|
||||
exclude "**/SimpleServer*"
|
||||
if (!skipLucene) {
|
||||
// Exclude test classes that are not actually stand-alone tests (they're executed from other stuff).
|
||||
configure(project(":lucene:replicator")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
test {
|
||||
exclude "**/SimpleServer*"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Resources from top-level project folder are looked up via getClass(). Strange.
|
||||
configure(project(":lucene:benchmark")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
task syncConf(type: Sync) {
|
||||
from('conf')
|
||||
into file("${sourceSets.test.java.outputDir}/conf")
|
||||
// Resources from top-level project folder are looked up via getClass(). Strange.
|
||||
configure(project(":lucene:benchmark")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
task syncConf(type: Sync) {
|
||||
from('conf')
|
||||
into file("${sourceSets.test.java.outputDir}/conf")
|
||||
}
|
||||
processTestResources.dependsOn syncConf
|
||||
}
|
||||
processTestResources.dependsOn syncConf
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Adapt to custom 'web' folder location.
|
||||
configure(project(":solr:webapp")) {
|
||||
plugins.withType(WarPlugin) {
|
||||
webAppDirName = "web"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Set up cross-project dependency on test classes. This should be resolved by pulling reused classes into
|
||||
// a separate regular module. Exporting test classes is sort of weird.
|
||||
if (!skipLucene) {
|
||||
configure(project(":solr:core")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
dependencies {
|
||||
testImplementation project(path: ':lucene:backward-codecs', configuration: 'testClassesExported')
|
||||
testImplementation project(path: ':lucene:queryparser', configuration: 'testClassesExported')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:contrib:analysis-extras")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
dependencies {
|
||||
testImplementation project(path: ':lucene:analysis:common', configuration: 'testClassesExported')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -41,20 +41,3 @@ configure(project(":lucene:spatial-extras")) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:core")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
dependencies {
|
||||
testImplementation project(path: ':lucene:backward-codecs', configuration: 'testClassesExported')
|
||||
testImplementation project(path: ':lucene:queryparser', configuration: 'testClassesExported')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:contrib:analysis-extras")) {
|
||||
plugins.withType(JavaPlugin) {
|
||||
dependencies {
|
||||
testImplementation project(path: ':lucene:analysis:common', configuration: 'testClassesExported')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,13 @@ allprojects {
|
|||
// Repositories to fetch dependencies from.
|
||||
repositories {
|
||||
mavenCentral()
|
||||
|
||||
if (System.getProperty("skip.lucene") as Boolean) {
|
||||
maven {
|
||||
name "ApacheSnapshots"
|
||||
url 'https://repository.apache.org/content/repositories/snapshots/'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Artifacts will have names after full gradle project path
|
||||
|
|
|
@ -47,7 +47,7 @@ class ChangesToHtmlTask extends DefaultTask {
|
|||
.fileProvider(project.providers.provider { project.file("${project.docroot}/changes") })
|
||||
|
||||
@Input
|
||||
def luceneDocUrl = "${->project.luceneDocUrl}"
|
||||
def luceneDocUrl = "${-> project.luceneDocUrl }"
|
||||
|
||||
@InputFile
|
||||
def script
|
||||
|
|
|
@ -20,36 +20,70 @@ configure(rootProject) {
|
|||
def refguideUrlVersion = project.baseVersion.replaceFirst(/^(\d+)\.(\d+).*$/, '$1_$2')
|
||||
|
||||
ext {
|
||||
luceneDocUrl = project.propertyOrDefault('lucene.javadoc.url', {
|
||||
if (project.version != project.baseVersion) {
|
||||
// non-release build
|
||||
new File(project('lucene:documentation').buildDir, 'site').toURI().toASCIIString().minus(~'/$')
|
||||
} else {
|
||||
// release build
|
||||
"https://lucene.apache.org/core/${urlVersion}"
|
||||
}
|
||||
}())
|
||||
if (!skipLucene) {
|
||||
luceneDocUrl = project.propertyOrDefault('lucene.javadoc.url', {
|
||||
if (project.version != project.baseVersion) {
|
||||
// non-release build
|
||||
new File(project('lucene:documentation').buildDir, 'site').toURI().toASCIIString().minus(~'/$')
|
||||
} else {
|
||||
// release build
|
||||
"https://lucene.apache.org/core/${urlVersion}"
|
||||
}
|
||||
}())
|
||||
} else {
|
||||
// TODO: The url should be constructed from actual dependency version... but this isn't available
|
||||
// at evaluation time. it'd have to be a lazy property provider.
|
||||
//
|
||||
// There are actually two questions -
|
||||
// 1) what to do about links to yet-unreleased Lucene versions and
|
||||
// 2) forgot what the second question was...
|
||||
//
|
||||
// For now, just hardcode the latest released version.
|
||||
// We could download Lucene javadoc artifacts for the actual dependency and just
|
||||
// link it locally (and publish it on Solr site). This would ensure the docs are always up-to-date,
|
||||
// even for Lucene snapshots.
|
||||
luceneDocVersion = "9_0_0-SNAPSHOT"
|
||||
luceneDocUrl = project.propertyOrDefault('lucene.javadoc.url', "https://lucene.apache.org/core/${luceneDocVersion}")
|
||||
}
|
||||
|
||||
solrDocUrl = project.propertyOrDefault('solr.javadoc.url', {
|
||||
if (project.version != project.baseVersion) {
|
||||
// non-release build
|
||||
new File(project('solr:documentation').buildDir, 'site').toURI().toASCIIString().minus(~'/$')
|
||||
} else {
|
||||
// release build
|
||||
"https://lucene.apache.org/solr/${urlVersion}"
|
||||
}
|
||||
}())
|
||||
// SOLR ONLY
|
||||
if (!skipSolr) {
|
||||
solrDocUrl = project.propertyOrDefault('solr.javadoc.url', {
|
||||
if (project.version != project.baseVersion) {
|
||||
// non-release build
|
||||
new File(project('solr:documentation').buildDir, 'site').toURI().toASCIIString().minus(~'/$')
|
||||
} else {
|
||||
// release build
|
||||
"https://lucene.apache.org/solr/${urlVersion}"
|
||||
}
|
||||
}())
|
||||
|
||||
solrRefguideUrl = project.propertyOrDefault('solr.refguide.url', "https://lucene.apache.org/solr/guide/${refguideUrlVersion}")
|
||||
solrRefguideUrl = project.propertyOrDefault('solr.refguide.url', "https://lucene.apache.org/solr/guide/${refguideUrlVersion}")
|
||||
}
|
||||
}
|
||||
|
||||
task documentation() {
|
||||
group = 'documentation'
|
||||
description = 'Generate all documentation'
|
||||
|
||||
dependsOn ':lucene:documentation:assemble'
|
||||
dependsOn ':solr:documentation:assemble'
|
||||
if (!skipLucene) {
|
||||
dependsOn ':lucene:documentation:assemble'
|
||||
}
|
||||
if (!skipSolr) {
|
||||
dependsOn ':solr:documentation:assemble'
|
||||
|
||||
doFirst {
|
||||
// Sanity check doc. version vs. actual version.
|
||||
def actualLuceneVersion = getVersion("org.apache.lucene", "lucene-core").replace('.', '_')
|
||||
if (luceneDocVersion != actualLuceneVersion) {
|
||||
throw new GradleException("Compiling documentation with Lucene version ${luceneDocVersion} which is " +
|
||||
"different from actual dependency ${actualLuceneVersion}?")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assemble.dependsOn documentation
|
||||
}
|
||||
|
||||
// docroot will point to Lucene and Solr relative directory for each sub-project.
|
||||
|
@ -93,51 +127,49 @@ configure(subprojects.findAll { it.path == ':lucene:documentation' || it.path ==
|
|||
builtBy documentation
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
configure(project(':solr:documentation')) {
|
||||
// SOLR ONLY
|
||||
if (!skipSolr) {
|
||||
configure(project(':solr:documentation')) {
|
||||
task documentationMinimal() {
|
||||
group = 'documentation'
|
||||
description = "Generate stub Solr documentation pointing to web page (that's part of Solr TGZ)"
|
||||
dependsOn 'copyMiniDocumentationAssets', 'createMiniDocumentationIndex', 'copyChangesToHtmlForMiniSite'
|
||||
}
|
||||
|
||||
task documentationMinimal() {
|
||||
group = 'documentation'
|
||||
description = "Generate stub Solr documentation pointing to web page (that's part of Solr TGZ)"
|
||||
dependsOn 'copyMiniDocumentationAssets','createMiniDocumentationIndex','copyChangesToHtmlForMiniSite'
|
||||
}
|
||||
task copyChangesToHtmlForMiniSite(type: Copy) {
|
||||
from project.docroot
|
||||
into project.docrootMinimal
|
||||
include 'changes/**'
|
||||
dependsOn 'changesToHtml'
|
||||
}
|
||||
|
||||
task copyChangesToHtmlForMiniSite(type: Copy) {
|
||||
from project.docroot
|
||||
into project.docrootMinimal
|
||||
include 'changes/**'
|
||||
dependsOn 'changesToHtml'
|
||||
}
|
||||
task copyMiniDocumentationAssets(type: Copy) {
|
||||
includeEmptyDirs = false
|
||||
from('src/assets')
|
||||
into project.docrootMinimal
|
||||
}
|
||||
|
||||
task copyMiniDocumentationAssets(type: Copy) {
|
||||
includeEmptyDirs = false
|
||||
from('src/assets')
|
||||
into project.docrootMinimal
|
||||
}
|
||||
assemble {
|
||||
dependsOn documentationMinimal
|
||||
}
|
||||
|
||||
assemble {
|
||||
dependsOn documentationMinimal
|
||||
}
|
||||
configurations {
|
||||
minimalSite
|
||||
}
|
||||
|
||||
configurations {
|
||||
minimalSite
|
||||
}
|
||||
|
||||
artifacts {
|
||||
minimalSite project.docrootMinimal, {
|
||||
builtBy documentationMinimal
|
||||
artifacts {
|
||||
minimalSite project.docrootMinimal, {
|
||||
builtBy documentationMinimal
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
configure(subprojects.findAll { it.path == ':lucene' || it.path == ':solr' }) {
|
||||
|
||||
ext {
|
||||
docroot = project('documentation').docroot
|
||||
docrootMinimal = project('documentation').docrootMinimal
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -52,70 +52,75 @@ configure(subprojects.findAll { it.path == ':lucene:documentation' || it.path ==
|
|||
}
|
||||
}
|
||||
|
||||
configure(project(':lucene:documentation')) {
|
||||
markdownToHtml {
|
||||
from(project.parent.projectDir) {
|
||||
include 'MIGRATE.md'
|
||||
include 'JRE_VERSION_MIGRATION.md'
|
||||
include 'SYSTEM_REQUIREMENTS.md'
|
||||
}
|
||||
}
|
||||
|
||||
task createDocumentationIndex(type: MarkdownTemplateTask) {
|
||||
dependsOn markdownToHtml
|
||||
|
||||
outputFile = file("${project.docroot}/index.html")
|
||||
templateFile = file("${project.markdownSrc}/index.template.md")
|
||||
|
||||
def defaultCodecFile = project(':lucene:core').file('src/java/org/apache/lucene/codecs/Codec.java')
|
||||
inputs.file(defaultCodecFile)
|
||||
|
||||
// list all properties used by the template here to allow uptodate checks to be correct:
|
||||
inputs.property('version', project.version)
|
||||
|
||||
binding.put('defaultCodecPackage', providers.provider{
|
||||
// static Codec defaultCodec = LOADER . lookup ( "LuceneXXX" ) ;
|
||||
def regex = ~/\bdefaultCodec\s*=\s*LOADER\s*\.\s*lookup\s*\(\s*"([^"]+)"\s*\)\s*;/
|
||||
def matcher = regex.matcher(defaultCodecFile.getText('UTF-8'))
|
||||
if (!matcher.find()) {
|
||||
throw GradleException("Cannot determine default codec from file ${defaultCodecFile}")
|
||||
if (!skipLucene) {
|
||||
configure(project(':lucene:documentation')) {
|
||||
markdownToHtml {
|
||||
from(project.parent.projectDir) {
|
||||
include 'MIGRATE.md'
|
||||
include 'JRE_VERSION_MIGRATION.md'
|
||||
include 'SYSTEM_REQUIREMENTS.md'
|
||||
}
|
||||
return matcher.group(1).toLowerCase(Locale.ROOT)
|
||||
})
|
||||
|
||||
withProjectList()
|
||||
}
|
||||
|
||||
task createDocumentationIndex(type: MarkdownTemplateTask) {
|
||||
dependsOn markdownToHtml
|
||||
|
||||
outputFile = file("${project.docroot}/index.html")
|
||||
templateFile = file("${project.markdownSrc}/index.template.md")
|
||||
|
||||
def defaultCodecFile = project(':lucene:core').file('src/java/org/apache/lucene/codecs/Codec.java')
|
||||
inputs.file(defaultCodecFile)
|
||||
|
||||
// list all properties used by the template here to allow uptodate checks to be correct:
|
||||
inputs.property('version', project.version)
|
||||
|
||||
binding.put('defaultCodecPackage', providers.provider {
|
||||
// static Codec defaultCodec = LOADER . lookup ( "LuceneXXX" ) ;
|
||||
def regex = ~/\bdefaultCodec\s*=\s*LOADER\s*\.\s*lookup\s*\(\s*"([^"]+)"\s*\)\s*;/
|
||||
def matcher = regex.matcher(defaultCodecFile.getText('UTF-8'))
|
||||
if (!matcher.find()) {
|
||||
throw GradleException("Cannot determine default codec from file ${defaultCodecFile}")
|
||||
}
|
||||
return matcher.group(1).toLowerCase(Locale.ROOT)
|
||||
})
|
||||
|
||||
withProjectList()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(':solr:documentation')) {
|
||||
markdownToHtml {
|
||||
from(project.markdownSrc) {
|
||||
include '**/*.md'
|
||||
exclude '**/*.template.md'
|
||||
// SOLR ONLY
|
||||
if (!skipSolr) {
|
||||
configure(project(':solr:documentation')) {
|
||||
markdownToHtml {
|
||||
from(project.markdownSrc) {
|
||||
include '**/*.md'
|
||||
exclude '**/*.template.md'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task createDocumentationIndex(type: MarkdownTemplateTask) {
|
||||
dependsOn markdownToHtml
|
||||
|
||||
outputFile = file("${project.docroot}/index.html")
|
||||
templateFile = file("${project.markdownSrc}/index.template.md")
|
||||
|
||||
// list all properties used by the template here to allow uptodate checks to be correct:
|
||||
inputs.property('version', project.version)
|
||||
inputs.property('solrRefguideUrl', project.solrRefguideUrl)
|
||||
inputs.property('luceneDocUrl', project.luceneDocUrl)
|
||||
|
||||
withProjectList()
|
||||
}
|
||||
|
||||
task createMiniDocumentationIndex(type: MarkdownTemplateTask) {
|
||||
outputFile = file("${project.docrootMinimal}/index.html")
|
||||
templateFile = file("${project.markdownSrc}/online-link.template.md")
|
||||
|
||||
// list all properties used by the template here to allow uptodate checks to be correct:
|
||||
inputs.property('version', project.version)
|
||||
inputs.property('solrDocUrl', project.solrDocUrl)
|
||||
task createDocumentationIndex(type: MarkdownTemplateTask) {
|
||||
dependsOn markdownToHtml
|
||||
|
||||
outputFile = file("${project.docroot}/index.html")
|
||||
templateFile = file("${project.markdownSrc}/index.template.md")
|
||||
|
||||
// list all properties used by the template here to allow uptodate checks to be correct:
|
||||
inputs.property('version', project.version)
|
||||
inputs.property('solrRefguideUrl', project.solrRefguideUrl)
|
||||
inputs.property('luceneDocUrl', project.luceneDocUrl)
|
||||
|
||||
withProjectList()
|
||||
}
|
||||
|
||||
task createMiniDocumentationIndex(type: MarkdownTemplateTask) {
|
||||
outputFile = file("${project.docrootMinimal}/index.html")
|
||||
templateFile = file("${project.markdownSrc}/online-link.template.md")
|
||||
|
||||
// list all properties used by the template here to allow uptodate checks to be correct:
|
||||
inputs.property('version', project.version)
|
||||
inputs.property('solrDocUrl', project.solrDocUrl)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -96,181 +96,184 @@ allprojects {
|
|||
}
|
||||
|
||||
// Configure project-specific tweaks and to-dos.
|
||||
|
||||
configure(project(":lucene:analysis:common")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
if (!skipLucene) {
|
||||
configure(project(":lucene:analysis:common")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure([
|
||||
project(":lucene:analysis:kuromoji"),
|
||||
project(":lucene:analysis:nori"),
|
||||
project(":lucene:analysis:opennlp"),
|
||||
project(":lucene:analysis:smartcn"),
|
||||
project(":lucene:benchmark"),
|
||||
project(":lucene:codecs"),
|
||||
project(":lucene:grouping"),
|
||||
project(":lucene:highlighter"),
|
||||
project(":lucene:luke"),
|
||||
project(":lucene:monitor"),
|
||||
project(":lucene:queries"),
|
||||
project(":lucene:queryparser"),
|
||||
project(":lucene:replicator"),
|
||||
project(":lucene:spatial-extras"),
|
||||
configure([
|
||||
project(":lucene:analysis:kuromoji"),
|
||||
project(":lucene:analysis:nori"),
|
||||
project(":lucene:analysis:opennlp"),
|
||||
project(":lucene:analysis:smartcn"),
|
||||
project(":lucene:benchmark"),
|
||||
project(":lucene:codecs"),
|
||||
project(":lucene:grouping"),
|
||||
project(":lucene:highlighter"),
|
||||
project(":lucene:luke"),
|
||||
project(":lucene:monitor"),
|
||||
project(":lucene:queries"),
|
||||
project(":lucene:queryparser"),
|
||||
project(":lucene:replicator"),
|
||||
project(":lucene:spatial-extras"),
|
||||
]) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure([
|
||||
project(":lucene:analysis:icu"),
|
||||
project(":lucene:analysis:morfologik"),
|
||||
project(":lucene:analysis:phonetic"),
|
||||
project(":lucene:analysis:stempel"),
|
||||
project(":lucene:classification"),
|
||||
project(":lucene:demo"),
|
||||
project(":lucene:expressions"),
|
||||
project(":lucene:facet"),
|
||||
project(":lucene:join"),
|
||||
project(":lucene:spatial3d"),
|
||||
project(":lucene:suggest"),
|
||||
configure([
|
||||
project(":lucene:analysis:icu"),
|
||||
project(":lucene:analysis:morfologik"),
|
||||
project(":lucene:analysis:phonetic"),
|
||||
project(":lucene:analysis:stempel"),
|
||||
project(":lucene:classification"),
|
||||
project(":lucene:demo"),
|
||||
project(":lucene:expressions"),
|
||||
project(":lucene:facet"),
|
||||
project(":lucene:join"),
|
||||
project(":lucene:spatial3d"),
|
||||
project(":lucene:suggest"),
|
||||
]) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing @param tags
|
||||
javadocMissingLevel = "method"
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing @param tags
|
||||
javadocMissingLevel = "method"
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:backward-codecs")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing @param tags
|
||||
javadocMissingLevel = "method"
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:test-framework")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = [
|
||||
"org.apache.lucene.analysis",
|
||||
"org.apache.lucene.analysis.standard",
|
||||
"org.apache.lucene.codecs",
|
||||
"org.apache.lucene.codecs.blockterms",
|
||||
"org.apache.lucene.codecs.bloom",
|
||||
"org.apache.lucene.codecs.compressing",
|
||||
"org.apache.lucene.codecs.uniformsplit",
|
||||
"org.apache.lucene.codecs.uniformsplit.sharedterms",
|
||||
"org.apache.lucene.geo",
|
||||
"org.apache.lucene.index",
|
||||
"org.apache.lucene.search",
|
||||
"org.apache.lucene.search.similarities",
|
||||
"org.apache.lucene.search.spans",
|
||||
"org.apache.lucene.store",
|
||||
"org.apache.lucene.util",
|
||||
"org.apache.lucene.util.automaton",
|
||||
"org.apache.lucene.util.fst"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:sandbox")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:misc")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:core")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
// some packages are fixed already
|
||||
javadocMissingMethod = [
|
||||
"org.apache.lucene.util.automaton",
|
||||
"org.apache.lucene.analysis.standard",
|
||||
"org.apache.lucene.analysis.tokenattributes",
|
||||
"org.apache.lucene.document",
|
||||
"org.apache.lucene.search.similarities",
|
||||
"org.apache.lucene.index",
|
||||
"org.apache.lucene.codecs",
|
||||
"org.apache.lucene.codecs.lucene50",
|
||||
"org.apache.lucene.codecs.lucene60",
|
||||
"org.apache.lucene.codecs.lucene80",
|
||||
"org.apache.lucene.codecs.lucene84",
|
||||
"org.apache.lucene.codecs.lucene86",
|
||||
"org.apache.lucene.codecs.lucene87",
|
||||
"org.apache.lucene.codecs.perfield"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:backward-codecs")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing @param tags
|
||||
javadocMissingLevel = "method"
|
||||
if (!skipSolr) {
|
||||
configure(project(":solr").allprojects) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "package"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:test-framework")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = [
|
||||
"org.apache.lucene.analysis",
|
||||
"org.apache.lucene.analysis.standard",
|
||||
"org.apache.lucene.codecs",
|
||||
"org.apache.lucene.codecs.blockterms",
|
||||
"org.apache.lucene.codecs.bloom",
|
||||
"org.apache.lucene.codecs.compressing",
|
||||
"org.apache.lucene.codecs.uniformsplit",
|
||||
"org.apache.lucene.codecs.uniformsplit.sharedterms",
|
||||
"org.apache.lucene.geo",
|
||||
"org.apache.lucene.index",
|
||||
"org.apache.lucene.search",
|
||||
"org.apache.lucene.search.similarities",
|
||||
"org.apache.lucene.search.spans",
|
||||
"org.apache.lucene.store",
|
||||
"org.apache.lucene.util",
|
||||
"org.apache.lucene.util.automaton",
|
||||
"org.apache.lucene.util.fst"
|
||||
]
|
||||
configure(project(":solr:contrib:analysis-extras")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = [
|
||||
"org.apache.solr.schema",
|
||||
"org.apache.solr.update.processor"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:sandbox")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
configure(project(":solr:contrib:analytics")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = [
|
||||
"org.apache.solr.handler",
|
||||
"org.apache.solr.handler.component",
|
||||
"org.apache.solr.response"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:misc")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
configure(project(":solr:contrib:langid")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = ["org.apache.solr.update.processor"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:core")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "class"
|
||||
// some packages are fixed already
|
||||
javadocMissingMethod = [
|
||||
"org.apache.lucene.util.automaton",
|
||||
"org.apache.lucene.analysis.standard",
|
||||
"org.apache.lucene.analysis.tokenattributes",
|
||||
"org.apache.lucene.document",
|
||||
"org.apache.lucene.search.similarities",
|
||||
"org.apache.lucene.index",
|
||||
"org.apache.lucene.codecs",
|
||||
"org.apache.lucene.codecs.lucene50",
|
||||
"org.apache.lucene.codecs.lucene60",
|
||||
"org.apache.lucene.codecs.lucene80",
|
||||
"org.apache.lucene.codecs.lucene84",
|
||||
"org.apache.lucene.codecs.lucene86",
|
||||
"org.apache.lucene.codecs.lucene87",
|
||||
"org.apache.lucene.codecs.perfield"
|
||||
]
|
||||
configure(project(":solr:solrj")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = ["org.apache.solr.client.solrj.embedded"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr").allprojects) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: fix missing javadocs
|
||||
javadocMissingLevel = "package"
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:contrib:analysis-extras")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = [
|
||||
"org.apache.solr.schema",
|
||||
"org.apache.solr.update.processor"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:contrib:analytics")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = [
|
||||
"org.apache.solr.handler",
|
||||
"org.apache.solr.handler.component",
|
||||
"org.apache.solr.response"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:contrib:langid")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = [ "org.apache.solr.update.processor" ]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:solrj")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = [ "org.apache.solr.client.solrj.embedded" ]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:test-framework")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = [
|
||||
"org.apache.solr",
|
||||
"org.apache.solr.analysis",
|
||||
"org.apache.solr.cloud",
|
||||
"org.apache.solr.core",
|
||||
"org.apache.solr.handler.component",
|
||||
"org.apache.solr.update.processor",
|
||||
"org.apache.solr.util"
|
||||
]
|
||||
configure(project(":solr:test-framework")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// TODO: clean up split packages
|
||||
javadocMissingIgnore = [
|
||||
"org.apache.solr",
|
||||
"org.apache.solr.analysis",
|
||||
"org.apache.solr.cloud",
|
||||
"org.apache.solr.core",
|
||||
"org.apache.solr.handler.component",
|
||||
"org.apache.solr.update.processor",
|
||||
"org.apache.solr.util"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -278,32 +281,38 @@ configure(project(":solr:test-framework")) {
|
|||
// (by removing all the packages which are part of lucene-core)
|
||||
// See: https://issues.apache.org/jira/browse/LUCENE-8738?focusedCommentId=16818106&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-16818106
|
||||
// LUCENE-9499: This workaround should be applied only to test-framework (we have no split package in other modules).
|
||||
configure(project(":lucene:test-framework")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
doLast {
|
||||
Set luceneCorePackages = file("${project(':lucene:core').tasks[name].outputDir}/element-list").readLines('UTF-8').toSet();
|
||||
File elementFile = file("${outputDir}/element-list");
|
||||
List elements = elementFile.readLines('UTF-8');
|
||||
elements.removeAll(luceneCorePackages)
|
||||
elementFile.write(elements.join('\n').concat('\n'), 'UTF-8');
|
||||
if (!skipLucene) {
|
||||
configure(project(":lucene:test-framework")) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
doLast {
|
||||
Set luceneCorePackages = file("${project(':lucene:core').tasks[name].outputDir}/element-list").readLines('UTF-8').toSet();
|
||||
File elementFile = file("${outputDir}/element-list");
|
||||
List elements = elementFile.readLines('UTF-8');
|
||||
elements.removeAll(luceneCorePackages)
|
||||
elementFile.write(elements.join('\n').concat('\n'), 'UTF-8');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(':lucene:demo')) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// For the demo, we link the example source in the javadocs, as it's ref'ed elsewhere
|
||||
linksource = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(':lucene:demo')) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
// For the demo, we link the example source in the javadocs, as it's ref'ed elsewhere
|
||||
linksource = true
|
||||
}
|
||||
}
|
||||
|
||||
// Disable Javadoc rendering for these projects.
|
||||
configure(subprojects.findAll { it.path in [
|
||||
':solr:solr-ref-guide',
|
||||
':solr:server',
|
||||
':solr:webapp']}) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
enabled = false
|
||||
if (!skipSolr) {
|
||||
configure(subprojects.findAll {
|
||||
it.path in [
|
||||
':solr:solr-ref-guide',
|
||||
':solr:server',
|
||||
':solr:webapp']
|
||||
}) {
|
||||
project.tasks.withType(RenderJavadocTask) {
|
||||
enabled = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -348,10 +357,11 @@ class RenderJavadocTask extends DefaultTask {
|
|||
def offlineLinks = [:]
|
||||
|
||||
@Input
|
||||
def luceneDocUrl = "${->project.luceneDocUrl}"
|
||||
def luceneDocUrl = "${-> project.luceneDocUrl }"
|
||||
|
||||
// SOLR ONLY
|
||||
@Input
|
||||
def solrDocUrl = "${->project.solrDocUrl}"
|
||||
def solrDocUrl = "${-> project.skipSolr ? null : project.solrDocUrl }"
|
||||
|
||||
// default is to require full javadocs
|
||||
@Input
|
||||
|
@ -446,22 +456,22 @@ class RenderJavadocTask extends DefaultTask {
|
|||
// lucene-test-framework was first, or broken links to things like LuceneTestCase if lucene-core was first)
|
||||
if (project.path != ':solr:test-framework') { //
|
||||
findRenderTasksInDependencies()
|
||||
.sort(false, Comparator.comparing { (it.project.name != 'core') as Boolean }.thenComparing(Comparator.comparing { it.path }))
|
||||
.each { otherTask ->
|
||||
def otherProject = otherTask.project
|
||||
// For relative links we compute the actual relative link between projects.
|
||||
def crossLuceneSolr = (otherProject.docroot != project.docroot)
|
||||
if (relativeProjectLinks && !crossLuceneSolr) {
|
||||
def pathTo = otherTask.outputDir.toPath().toAbsolutePath()
|
||||
def pathFrom = outputDir.toPath().toAbsolutePath()
|
||||
def relative = pathFrom.relativize(pathTo).toString().replace(File.separator, '/')
|
||||
opts << ['-link', relative]
|
||||
} else {
|
||||
// For absolute links, we determine the target URL by assembling the full URL.
|
||||
def base = otherProject.path.startsWith(":lucene") ? luceneDocUrl : solrDocUrl
|
||||
allOfflineLinks.put("${base}/${otherProject.relativeDocPath}/".toString(), otherTask.outputDir)
|
||||
.sort(false, Comparator.comparing { (it.project.name != 'core') as Boolean }.thenComparing(Comparator.comparing { it.path }))
|
||||
.each { otherTask ->
|
||||
def otherProject = otherTask.project
|
||||
// For relative links we compute the actual relative link between projects.
|
||||
def crossLuceneSolr = (otherProject.docroot != project.docroot)
|
||||
if (relativeProjectLinks && !crossLuceneSolr) {
|
||||
def pathTo = otherTask.outputDir.toPath().toAbsolutePath()
|
||||
def pathFrom = outputDir.toPath().toAbsolutePath()
|
||||
def relative = pathFrom.relativize(pathTo).toString().replace(File.separator, '/')
|
||||
opts << ['-link', relative]
|
||||
} else {
|
||||
// For absolute links, we determine the target URL by assembling the full URL.
|
||||
def base = otherProject.path.startsWith(":lucene") ? luceneDocUrl : solrDocUrl
|
||||
allOfflineLinks.put("${base}/${otherProject.relativeDocPath}/".toString(), otherTask.outputDir)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add offline links.
|
||||
|
|
|
@ -100,186 +100,189 @@ def commonCleanups = { FileTree generatedFiles ->
|
|||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:queryparser")) {
|
||||
task javaccParserClassic(type: JavaCCTask) {
|
||||
description "Regenerate classic query parser from lucene/queryparser/classic/QueryParser.jj"
|
||||
group "generation"
|
||||
if (!skipLucene) {
|
||||
configure(project(":lucene:queryparser")) {
|
||||
task javaccParserClassic(type: JavaCCTask) {
|
||||
description "Regenerate classic query parser from lucene/queryparser/classic/QueryParser.jj"
|
||||
group "generation"
|
||||
|
||||
javaccFile = file('src/java/org/apache/lucene/queryparser/classic/QueryParser.jj')
|
||||
javaccFile = file('src/java/org/apache/lucene/queryparser/classic/QueryParser.jj')
|
||||
|
||||
afterGenerate << commonCleanups
|
||||
afterGenerate << { FileTree generatedFiles ->
|
||||
generatedFiles.matching { include "QueryParser.java" }.each { file ->
|
||||
modifyFile(file, { text ->
|
||||
text = text.replace(
|
||||
"public QueryParser(CharStream ",
|
||||
"protected QueryParser(CharStream ")
|
||||
text = text.replace(
|
||||
"public QueryParser(QueryParserTokenManager ",
|
||||
"protected QueryParser(QueryParserTokenManager ")
|
||||
text = text.replace(
|
||||
"new java.util.ArrayList<int[]>",
|
||||
"new java.util.ArrayList<>")
|
||||
text = text.replace(
|
||||
"final private LookaheadSuccess jj_ls =",
|
||||
"static final private LookaheadSuccess jj_ls =")
|
||||
return text
|
||||
})
|
||||
afterGenerate << commonCleanups
|
||||
afterGenerate << { FileTree generatedFiles ->
|
||||
generatedFiles.matching { include "QueryParser.java" }.each { file ->
|
||||
modifyFile(file, { text ->
|
||||
text = text.replace(
|
||||
"public QueryParser(CharStream ",
|
||||
"protected QueryParser(CharStream ")
|
||||
text = text.replace(
|
||||
"public QueryParser(QueryParserTokenManager ",
|
||||
"protected QueryParser(QueryParserTokenManager ")
|
||||
text = text.replace(
|
||||
"new java.util.ArrayList<int[]>",
|
||||
"new java.util.ArrayList<>")
|
||||
text = text.replace(
|
||||
"final private LookaheadSuccess jj_ls =",
|
||||
"static final private LookaheadSuccess jj_ls =")
|
||||
return text
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task javaccParserSurround(type: JavaCCTask) {
|
||||
description "Regenerate surround query parser from lucene/queryparser/surround/parser/QueryParser.jj"
|
||||
group "generation"
|
||||
task javaccParserSurround(type: JavaCCTask) {
|
||||
description "Regenerate surround query parser from lucene/queryparser/surround/parser/QueryParser.jj"
|
||||
group "generation"
|
||||
|
||||
javaccFile = file('src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.jj')
|
||||
javaccFile = file('src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.jj')
|
||||
|
||||
afterGenerate << commonCleanups
|
||||
afterGenerate << { FileTree generatedFiles ->
|
||||
generatedFiles.matching { include "QueryParser.java" }.each { file ->
|
||||
modifyFile(file, { text ->
|
||||
text = text.replace(
|
||||
"import org.apache.lucene.analysis.TokenStream;",
|
||||
"")
|
||||
text = text.replace(
|
||||
"new java.util.ArrayList<int[]>",
|
||||
"new java.util.ArrayList<>")
|
||||
return text
|
||||
})
|
||||
afterGenerate << commonCleanups
|
||||
afterGenerate << { FileTree generatedFiles ->
|
||||
generatedFiles.matching { include "QueryParser.java" }.each { file ->
|
||||
modifyFile(file, { text ->
|
||||
text = text.replace(
|
||||
"import org.apache.lucene.analysis.TokenStream;",
|
||||
"")
|
||||
text = text.replace(
|
||||
"new java.util.ArrayList<int[]>",
|
||||
"new java.util.ArrayList<>")
|
||||
return text
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task javaccParserFlexible(type: JavaCCTask) {
|
||||
description "Regenerate Flexible query parser from queryparser/flexible/standard/parser/StandardSyntaxParser.jj"
|
||||
group "generation"
|
||||
task javaccParserFlexible(type: JavaCCTask) {
|
||||
description "Regenerate Flexible query parser from queryparser/flexible/standard/parser/StandardSyntaxParser.jj"
|
||||
group "generation"
|
||||
|
||||
javaccFile = file('src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.jj')
|
||||
javaccFile = file('src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.jj')
|
||||
|
||||
afterGenerate << commonCleanups
|
||||
afterGenerate << { FileTree generatedFiles ->
|
||||
generatedFiles.matching { include "ParseException.java" }.each { file ->
|
||||
modifyFile(file, { text ->
|
||||
// Modify constructor.
|
||||
text = text.replace(
|
||||
"class ParseException extends Exception",
|
||||
"class ParseException extends QueryNodeParseException")
|
||||
afterGenerate << commonCleanups
|
||||
afterGenerate << { FileTree generatedFiles ->
|
||||
generatedFiles.matching { include "ParseException.java" }.each { file ->
|
||||
modifyFile(file, { text ->
|
||||
// Modify constructor.
|
||||
text = text.replace(
|
||||
"class ParseException extends Exception",
|
||||
"class ParseException extends QueryNodeParseException")
|
||||
|
||||
// Modify imports.
|
||||
text = text.replace(
|
||||
"package org.apache.lucene.queryparser.flexible.standard.parser;", '''\
|
||||
package org.apache.lucene.queryparser.flexible.standard.parser;
|
||||
// Modify imports.
|
||||
text = text.replace(
|
||||
"package org.apache.lucene.queryparser.flexible.standard.parser;", '''\
|
||||
package org.apache.lucene.queryparser.flexible.standard.parser;
|
||||
|
||||
import org.apache.lucene.queryparser.flexible.messages.*;
|
||||
import org.apache.lucene.queryparser.flexible.core.*;
|
||||
import org.apache.lucene.queryparser.flexible.core.messages.*;
|
||||
''')
|
||||
|
||||
import org.apache.lucene.queryparser.flexible.messages.*;
|
||||
import org.apache.lucene.queryparser.flexible.core.*;
|
||||
import org.apache.lucene.queryparser.flexible.core.messages.*;
|
||||
''')
|
||||
// Modify constructors and code bits
|
||||
text = text.replaceAll(
|
||||
/(?s)[ ]*public ParseException\(Token currentTokenVal[^}]+[}]/, '''\
|
||||
public ParseException(Token currentTokenVal,
|
||||
int[][] expectedTokenSequencesVal, String[] tokenImageVal)
|
||||
{
|
||||
super(new MessageImpl(QueryParserMessages.INVALID_SYNTAX, initialise(
|
||||
currentTokenVal, expectedTokenSequencesVal, tokenImageVal)));
|
||||
this.currentToken = currentTokenVal;
|
||||
this.expectedTokenSequences = expectedTokenSequencesVal;
|
||||
this.tokenImage = tokenImageVal;
|
||||
}
|
||||
''')
|
||||
|
||||
// Modify constructors and code bits
|
||||
text = text.replaceAll(
|
||||
/(?s)[ ]*public ParseException\(Token currentTokenVal[^}]+[}]/, '''\
|
||||
public ParseException(Token currentTokenVal,
|
||||
int[][] expectedTokenSequencesVal, String[] tokenImageVal)
|
||||
{
|
||||
super(new MessageImpl(QueryParserMessages.INVALID_SYNTAX, initialise(
|
||||
currentTokenVal, expectedTokenSequencesVal, tokenImageVal)));
|
||||
this.currentToken = currentTokenVal;
|
||||
this.expectedTokenSequences = expectedTokenSequencesVal;
|
||||
this.tokenImage = tokenImageVal;
|
||||
}
|
||||
''')
|
||||
text = text.replaceAll(
|
||||
/(?s)[ ]*public ParseException\(String message\)[^}]+[}]/, '''\
|
||||
public ParseException(Message message)
|
||||
{
|
||||
super(message);
|
||||
}
|
||||
''')
|
||||
|
||||
text = text.replaceAll(
|
||||
/(?s)[ ]*public ParseException\(String message\)[^}]+[}]/, '''\
|
||||
public ParseException(Message message)
|
||||
{
|
||||
super(message);
|
||||
}
|
||||
''')
|
||||
text = text.replaceAll(
|
||||
/(?s)[ ]*public ParseException\(\)[^}]+[}]/, '''\
|
||||
public ParseException()
|
||||
{
|
||||
super(new MessageImpl(QueryParserMessages.INVALID_SYNTAX, "Error"));
|
||||
}
|
||||
''')
|
||||
return text
|
||||
})
|
||||
}
|
||||
|
||||
text = text.replaceAll(
|
||||
/(?s)[ ]*public ParseException\(\)[^}]+[}]/, '''\
|
||||
public ParseException()
|
||||
{
|
||||
super(new MessageImpl(QueryParserMessages.INVALID_SYNTAX, "Error"));
|
||||
}
|
||||
''')
|
||||
return text
|
||||
})
|
||||
}
|
||||
|
||||
generatedFiles.matching { include "StandardSyntaxParser.java" }.each { file ->
|
||||
modifyFile(file, { text ->
|
||||
// Remove redundant cast
|
||||
text = text.replace(
|
||||
"new java.util.ArrayList<int[]>",
|
||||
"new java.util.ArrayList<>")
|
||||
text = text.replace(
|
||||
"new ArrayList<QueryNode>()",
|
||||
"new ArrayList<>()")
|
||||
text = text.replace(
|
||||
"Collections.<QueryNode> singletonList",
|
||||
"Collections.singletonList")
|
||||
return text
|
||||
})
|
||||
generatedFiles.matching { include "StandardSyntaxParser.java" }.each { file ->
|
||||
modifyFile(file, { text ->
|
||||
// Remove redundant cast
|
||||
text = text.replace(
|
||||
"new java.util.ArrayList<int[]>",
|
||||
"new java.util.ArrayList<>")
|
||||
text = text.replace(
|
||||
"new ArrayList<QueryNode>()",
|
||||
"new ArrayList<>()")
|
||||
text = text.replace(
|
||||
"Collections.<QueryNode> singletonList",
|
||||
"Collections.singletonList")
|
||||
return text
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task regenerate() {
|
||||
description "Regenerate any generated sources"
|
||||
group "generation"
|
||||
task regenerate() {
|
||||
description "Regenerate any generated sources"
|
||||
group "generation"
|
||||
|
||||
// Run regeneration tasks.
|
||||
dependsOn javaccParserClassic, javaccParserSurround, javaccParserFlexible
|
||||
// Run regeneration tasks.
|
||||
dependsOn javaccParserClassic, javaccParserSurround, javaccParserFlexible
|
||||
|
||||
// Clean up and reformat the generated sources after generation.
|
||||
dependsOn "tidy"
|
||||
}
|
||||
// Clean up and reformat the generated sources after generation.
|
||||
dependsOn "tidy"
|
||||
}
|
||||
|
||||
// Make sure tidy runs after generation, if they're defined.
|
||||
tasks.matching { it.name == "tidy" }.configureEach {
|
||||
mustRunAfter javaccParserClassic, javaccParserSurround, javaccParserFlexible
|
||||
}
|
||||
// Make sure tidy runs after generation, if they're defined.
|
||||
tasks.matching { it.name == "tidy" }.configureEach {
|
||||
mustRunAfter javaccParserClassic, javaccParserSurround, javaccParserFlexible
|
||||
}
|
||||
|
||||
task javacc() {
|
||||
description "Regenerate query parsers (javacc syntax definitions)."
|
||||
group "generation"
|
||||
task javacc() {
|
||||
description "Regenerate query parsers (javacc syntax definitions)."
|
||||
group "generation"
|
||||
|
||||
dependsOn javaccParserClassic
|
||||
dependsOn javaccParserSurround
|
||||
dependsOn javaccParserFlexible
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:core")) {
|
||||
task javacc(type: JavaCCTask) {
|
||||
description "Regenerate Solr query parser"
|
||||
group "generation"
|
||||
|
||||
javaccFile = file('src/java/org/apache/solr/parser/QueryParser.jj')
|
||||
|
||||
afterGenerate << commonCleanups
|
||||
afterGenerate << { FileTree generatedFiles ->
|
||||
generatedFiles.matching { include "QueryParser.java" }.each { file ->
|
||||
modifyFile(file, { text ->
|
||||
text = text.replace(
|
||||
"public QueryParser(CharStream ",
|
||||
"protected QueryParser(CharStream ")
|
||||
text = text.replace(
|
||||
"public QueryParser(QueryParserTokenManager ",
|
||||
"protected QueryParser(QueryParserTokenManager ")
|
||||
text = text.replace(
|
||||
"final private LookaheadSuccess jj_ls =",
|
||||
"static final private LookaheadSuccess jj_ls =")
|
||||
return text
|
||||
})
|
||||
}
|
||||
dependsOn javaccParserClassic
|
||||
dependsOn javaccParserSurround
|
||||
dependsOn javaccParserFlexible
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!skipSolr) {
|
||||
configure(project(":solr:core")) {
|
||||
task javacc(type: JavaCCTask) {
|
||||
description "Regenerate Solr query parser"
|
||||
group "generation"
|
||||
|
||||
javaccFile = file('src/java/org/apache/solr/parser/QueryParser.jj')
|
||||
|
||||
afterGenerate << commonCleanups
|
||||
afterGenerate << { FileTree generatedFiles ->
|
||||
generatedFiles.matching { include "QueryParser.java" }.each { file ->
|
||||
modifyFile(file, { text ->
|
||||
text = text.replace(
|
||||
"public QueryParser(CharStream ",
|
||||
"protected QueryParser(CharStream ")
|
||||
text = text.replace(
|
||||
"public QueryParser(QueryParserTokenManager ",
|
||||
"protected QueryParser(QueryParserTokenManager ")
|
||||
text = text.replace(
|
||||
"final private LookaheadSuccess jj_ls =",
|
||||
"static final private LookaheadSuccess jj_ls =")
|
||||
return text
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// We always regenerate, no need to declare outputs.
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
// Configures JAR manifest entries
|
||||
|
||||
allprojects {
|
||||
subprojects {
|
||||
// Declare these inline for now. Don't know if it makes sense to declare them
|
||||
// per-project.
|
||||
def title;
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Source test classes from Lucene won't be accessible anymore after the split.
|
||||
|
||||
configure(project(":solr:core")) {
|
||||
afterEvaluate {
|
||||
sourceSets {
|
||||
test {
|
||||
java {
|
||||
exclude '**/TestLuceneIndexBackCompat.java'
|
||||
exclude '**/TestXmlQParser.java'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Source test classes from Lucene won't be accessible anymore after the split.
|
||||
|
||||
configure(rootProject) {
|
||||
// Temporarily disable checkUnusedConstraints since we can't live with
|
||||
// lucene dependency in versions.props
|
||||
tasks.matching { it.path == ":checkUnusedConstraints" }.all {
|
||||
enabled = false
|
||||
}
|
||||
|
||||
if (skipLucene || skipSolr) {
|
||||
tasks.matching { it.path == ":verifyLocks" }.all {
|
||||
enabled = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (skipLucene) {
|
||||
configure(project(":solr:solr-ref-guide")) {
|
||||
tasks.matching { it.name == "checkLocalJavadocLinksSite" }.all {
|
||||
enabled = false
|
||||
logger.warn("Skipping checkLocalJavadocLinksSite (tlp git migration)")
|
||||
}
|
||||
}
|
||||
}
|
|
@ -62,8 +62,15 @@ allprojects {
|
|||
|
||||
testsCwd = file("${buildDir}/tmp/tests-cwd")
|
||||
testsTmpDir = file(resolvedTestOption("tests.workDir"))
|
||||
commonDir = project(":lucene").projectDir
|
||||
commonSolrDir = project(":solr").projectDir
|
||||
|
||||
if (!skipLucene) {
|
||||
commonDir = project(":lucene").projectDir
|
||||
}
|
||||
|
||||
// SOLR ONLY
|
||||
if (!skipSolr) {
|
||||
commonSolrDir = project(":solr").projectDir
|
||||
}
|
||||
}
|
||||
|
||||
def verboseMode = resolvedTestOption("tests.verbose").toBoolean()
|
||||
|
|
|
@ -113,7 +113,7 @@ configure(allprojects.findAll {project -> project.path.startsWith(":solr") }) {
|
|||
testOptions += [
|
||||
[propName: 'tests.luceneMatchVersion', value: baseVersion, description: "Base Lucene version."],
|
||||
[propName: 'common-solr.dir',
|
||||
value: { -> file("${commonDir}/../solr").path },
|
||||
value: project(":solr").projectDir,
|
||||
description: "Solr base dir.",
|
||||
includeInReproLine: false
|
||||
],
|
||||
|
@ -195,7 +195,9 @@ allprojects {
|
|||
systemProperty 'java.security.policy', file("${resources}/policies/solr-tests.policy")
|
||||
}
|
||||
|
||||
systemProperty 'common.dir', commonDir
|
||||
if (!skipLucene) {
|
||||
systemProperty 'common.dir', commonDir
|
||||
}
|
||||
|
||||
def gradleUserHome = project.gradle.getGradleUserHomeDir()
|
||||
systemProperty 'gradle.lib.dir', Paths.get(project.class.location.toURI()).parent.toAbsolutePath().toString().replace('\\', '/')
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
grant {
|
||||
// 3rd party jar resources (where symlinks are not supported), test-files/ resources
|
||||
permission java.io.FilePermission "${common.dir}${/}-", "read";
|
||||
permission java.io.FilePermission "${common.dir}${/}..${/}solr${/}-", "read";
|
||||
permission java.io.FilePermission "${common-solr.dir}${/}-", "read";
|
||||
|
||||
// system jar resources
|
||||
permission java.io.FilePermission "${java.home}${/}-", "read";
|
||||
|
|
|
@ -74,6 +74,11 @@ allprojects {
|
|||
args += [ "-enableJavadoc" ]
|
||||
args += [ "-properties", file("${resources}/ecj.javadocs.prefs").absolutePath ]
|
||||
|
||||
if (skipLucene) {
|
||||
// disable for now because individual file excludes cannot be applied to ecj.
|
||||
enabled = false
|
||||
}
|
||||
|
||||
doFirst {
|
||||
tmpDst.mkdirs()
|
||||
|
||||
|
@ -106,10 +111,13 @@ allprojects {
|
|||
}
|
||||
}
|
||||
|
||||
// SOLR ONLY
|
||||
if (!skipSolr) {
|
||||
// This excludes solr-ref-guide from the check (excludes are not taken into account
|
||||
// and linting of the ant-based task fails.
|
||||
configure(project(":solr:solr-ref-guide")) {
|
||||
afterEvaluate {
|
||||
project.tasks.findByPath("ecjLintMain").enabled = false
|
||||
configure(project(":solr:solr-ref-guide")) {
|
||||
afterEvaluate {
|
||||
project.tasks.findByPath("ecjLintMain").enabled = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -39,11 +39,16 @@ buildscript {
|
|||
// Configure license checksum folder for top-level projects.
|
||||
// (The file("licenses") inside the configure scope resolves
|
||||
// relative to the current project so they're not the same).
|
||||
configure(project(":lucene")) {
|
||||
ext.licensesDir = file("licenses")
|
||||
if (!skipLucene) {
|
||||
configure(project(":lucene")) {
|
||||
ext.licensesDir = file("licenses")
|
||||
}
|
||||
}
|
||||
configure(project(":solr")) {
|
||||
ext.licensesDir = file("licenses")
|
||||
|
||||
if (!skipSolr) {
|
||||
configure(project(":solr")) {
|
||||
ext.licensesDir = file("licenses")
|
||||
}
|
||||
}
|
||||
|
||||
// All known license types. If 'noticeOptional' is true then
|
||||
|
@ -273,7 +278,15 @@ subprojects {
|
|||
|
||||
// Add top-project level tasks validating dangling files
|
||||
// and regenerating dependency checksums.
|
||||
configure([project(":solr"), project(":lucene"),]) {
|
||||
def plist = []
|
||||
if (!skipLucene) {
|
||||
plist.add(project(":lucene"))
|
||||
}
|
||||
if (!skipSolr) {
|
||||
plist.add(project(":solr"))
|
||||
}
|
||||
|
||||
configure(plist) {
|
||||
def validationTasks = subprojects.collectMany { it.tasks.matching { it.name == "licenses" } }
|
||||
def jarInfoTasks = subprojects.collectMany { it.tasks.matching { it.name == "collectJarInfos" } }
|
||||
|
||||
|
@ -362,29 +375,34 @@ configure([project(":solr"), project(":lucene"),]) {
|
|||
// Exclude files that are not a result of direct dependencies but have to be there.
|
||||
// It would be probably better to move non-dependency licenses into the actual project
|
||||
// where they're used and only assemble them for the distribution package.
|
||||
configure(project(":lucene")) {
|
||||
checkDanglingLicenseFiles {
|
||||
exclude += [
|
||||
"elegant-icon-font-*",
|
||||
"ant-*",
|
||||
"ivy-*",
|
||||
]
|
||||
if (!skipLucene) {
|
||||
configure(project(":lucene")) {
|
||||
checkDanglingLicenseFiles {
|
||||
exclude += [
|
||||
"elegant-icon-font-*",
|
||||
"ant-*",
|
||||
"ivy-*",
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr")) {
|
||||
checkDanglingLicenseFiles {
|
||||
exclude += [
|
||||
"README.committers.txt"
|
||||
]
|
||||
// SOLR ONLY
|
||||
if (!skipSolr) {
|
||||
configure(project(":solr")) {
|
||||
checkDanglingLicenseFiles {
|
||||
exclude += [
|
||||
"README.committers.txt"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// solr-ref-guide doesn't contribute any JARs to dependency checks.
|
||||
configure(project(":solr:solr-ref-guide")) {
|
||||
configurations {
|
||||
jarValidation {
|
||||
exclude group: "*"
|
||||
// solr-ref-guide doesn't contribute any JARs to dependency checks.
|
||||
configure(project(":solr:solr-ref-guide")) {
|
||||
configurations {
|
||||
jarValidation {
|
||||
exclude group: "*"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -27,8 +27,11 @@ configure(rootProject) {
|
|||
dependsOn ":versionsPropsAreSorted"
|
||||
dependsOn ":checkWorkingCopyClean"
|
||||
|
||||
// Solr validation tasks.
|
||||
dependsOn ":solr:validateConfigFileSanity"
|
||||
// SOLR ONLY
|
||||
if (!skipSolr) {
|
||||
// Solr validation tasks.
|
||||
dependsOn ":solr:validateConfigFileSanity"
|
||||
}
|
||||
|
||||
// Attach all these tasks from all projects that have them.
|
||||
// This uses lazy collections as they may not yet be defined.
|
||||
|
|
|
@ -49,71 +49,76 @@ configure(rootProject) {
|
|||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:analysis:common")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/*.aff",
|
||||
"**/*.dic",
|
||||
"**/*.wrong",
|
||||
"**/*.good",
|
||||
"**/*.sug",
|
||||
"**/charfilter/*.htm*",
|
||||
"**/*LuceneResourcesWikiPage.html"
|
||||
]
|
||||
if (!skipLucene) {
|
||||
configure(project(":lucene:analysis:common")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/*.aff",
|
||||
"**/*.dic",
|
||||
"**/*.wrong",
|
||||
"**/*.good",
|
||||
"**/*.sug",
|
||||
"**/charfilter/*.htm*",
|
||||
"**/*LuceneResourcesWikiPage.html"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:analysis:kuromoji")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
// whether rat detects this as binary or not is platform dependent?!
|
||||
"**/bocchan.utf-8"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:analysis:opennlp")) {
|
||||
rat {
|
||||
excludes += [
|
||||
"src/tools/test-model-data/*.txt",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:highlighter")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/CambridgeMA.utf8"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:suggest")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/Top50KWiki.utf8",
|
||||
"**/stop-snowball.txt"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:analysis:kuromoji")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
// whether rat detects this as binary or not is platform dependent?!
|
||||
"**/bocchan.utf-8"
|
||||
]
|
||||
// SOLR ONLY
|
||||
if (!skipSolr) {
|
||||
configure(project(":solr:core")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/htmlStripReaderTest.html"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:analysis:opennlp")) {
|
||||
rat {
|
||||
excludes += [
|
||||
"src/tools/test-model-data/*.txt",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:highlighter")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/CambridgeMA.utf8"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":lucene:suggest")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/Top50KWiki.utf8",
|
||||
"**/stop-snowball.txt"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:core")) {
|
||||
rat {
|
||||
srcExcludes += [
|
||||
"**/htmlStripReaderTest.html"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(":solr:webapp")) {
|
||||
rat {
|
||||
includes = [ "**" ]
|
||||
excludes += [
|
||||
"web/img/**",
|
||||
"*.iml",
|
||||
"build.gradle",
|
||||
"build/**",
|
||||
]
|
||||
configure(project(":solr:webapp")) {
|
||||
rat {
|
||||
includes = ["**"]
|
||||
excludes += [
|
||||
"web/img/**",
|
||||
"*.iml",
|
||||
"build.gradle",
|
||||
"build/**",
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -88,17 +88,22 @@ subprojects {
|
|||
check.dependsOn validateSourcePatterns
|
||||
}
|
||||
|
||||
configure(project(':lucene:benchmark')) {
|
||||
project.tasks.withType(ValidateSourcePatternsTask) {
|
||||
sourceFiles.exclude 'temp/**'
|
||||
sourceFiles.exclude 'work/**'
|
||||
if (!skipLucene) {
|
||||
configure(project(':lucene:benchmark')) {
|
||||
project.tasks.withType(ValidateSourcePatternsTask) {
|
||||
sourceFiles.exclude 'temp/**'
|
||||
sourceFiles.exclude 'work/**'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(project(':solr:core')) {
|
||||
project.tasks.withType(ValidateSourcePatternsTask) {
|
||||
sourceFiles.exclude 'src/**/CheckLoggingConfiguration.java'
|
||||
sourceFiles.exclude 'src/test/org/apache/hadoop/**'
|
||||
// SOLR ONLY
|
||||
if (!skipSolr) {
|
||||
configure(project(':solr:core')) {
|
||||
project.tasks.withType(ValidateSourcePatternsTask) {
|
||||
sourceFiles.exclude 'src/**/CheckLoggingConfiguration.java'
|
||||
sourceFiles.exclude 'src/test/org/apache/hadoop/**'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -124,6 +129,14 @@ configure(rootProject) {
|
|||
exclude "${it}/**"
|
||||
}
|
||||
|
||||
if (skipSolr) {
|
||||
exclude "solr/**"
|
||||
}
|
||||
|
||||
if (skipLucene) {
|
||||
exclude "lucene/**"
|
||||
}
|
||||
|
||||
// default excludes.
|
||||
exclude '**/build/**'
|
||||
exclude 'dev-tools/missing-doclet/src/**/*.java' // <-- TODO: remove once we allow "var" on master
|
||||
|
|
112
settings.gradle
112
settings.gradle
|
@ -19,59 +19,63 @@ rootProject.name = "lucene-solr"
|
|||
|
||||
includeBuild("dev-tools/missing-doclet")
|
||||
|
||||
include "lucene:analysis:common"
|
||||
include "lucene:analysis:icu"
|
||||
include "lucene:analysis:kuromoji"
|
||||
include "lucene:analysis:morfologik"
|
||||
include "lucene:analysis:nori"
|
||||
include "lucene:analysis:opennlp"
|
||||
include "lucene:analysis:phonetic"
|
||||
include "lucene:analysis:smartcn"
|
||||
include "lucene:analysis:stempel"
|
||||
include "lucene:backward-codecs"
|
||||
include "lucene:benchmark"
|
||||
include "lucene:classification"
|
||||
include "lucene:codecs"
|
||||
include "lucene:core"
|
||||
include "lucene:demo"
|
||||
include "lucene:expressions"
|
||||
include "lucene:facet"
|
||||
include "lucene:grouping"
|
||||
include "lucene:highlighter"
|
||||
include "lucene:join"
|
||||
include "lucene:luke"
|
||||
include "lucene:memory"
|
||||
include "lucene:misc"
|
||||
include "lucene:misc:native"
|
||||
include "lucene:monitor"
|
||||
include "lucene:queries"
|
||||
include "lucene:queryparser"
|
||||
include "lucene:replicator"
|
||||
include "lucene:sandbox"
|
||||
include "lucene:spatial-extras"
|
||||
include "lucene:spatial3d"
|
||||
include "lucene:suggest"
|
||||
include "lucene:test-framework"
|
||||
include "lucene:documentation"
|
||||
include "lucene:packaging"
|
||||
if (!(System.getProperty("skip.lucene") as Boolean)) {
|
||||
include "lucene:analysis:common"
|
||||
include "lucene:analysis:icu"
|
||||
include "lucene:analysis:kuromoji"
|
||||
include "lucene:analysis:morfologik"
|
||||
include "lucene:analysis:nori"
|
||||
include "lucene:analysis:opennlp"
|
||||
include "lucene:analysis:phonetic"
|
||||
include "lucene:analysis:smartcn"
|
||||
include "lucene:analysis:stempel"
|
||||
include "lucene:backward-codecs"
|
||||
include "lucene:benchmark"
|
||||
include "lucene:classification"
|
||||
include "lucene:codecs"
|
||||
include "lucene:core"
|
||||
include "lucene:demo"
|
||||
include "lucene:expressions"
|
||||
include "lucene:facet"
|
||||
include "lucene:grouping"
|
||||
include "lucene:highlighter"
|
||||
include "lucene:join"
|
||||
include "lucene:luke"
|
||||
include "lucene:memory"
|
||||
include "lucene:misc"
|
||||
include "lucene:misc:native"
|
||||
include "lucene:monitor"
|
||||
include "lucene:queries"
|
||||
include "lucene:queryparser"
|
||||
include "lucene:replicator"
|
||||
include "lucene:sandbox"
|
||||
include "lucene:spatial-extras"
|
||||
include "lucene:spatial3d"
|
||||
include "lucene:suggest"
|
||||
include "lucene:test-framework"
|
||||
include "lucene:documentation"
|
||||
include "lucene:packaging"
|
||||
}
|
||||
|
||||
include "solr:solrj"
|
||||
include "solr:core"
|
||||
include "solr:server"
|
||||
include "solr:contrib:analysis-extras"
|
||||
include "solr:contrib:analytics"
|
||||
include "solr:contrib:clustering"
|
||||
include "solr:contrib:extraction"
|
||||
include "solr:contrib:langid"
|
||||
include "solr:contrib:jaegertracer-configurator"
|
||||
include "solr:contrib:prometheus-exporter"
|
||||
include "solr:contrib:scripting"
|
||||
include "solr:contrib:ltr"
|
||||
include "solr:webapp"
|
||||
include "solr:test-framework"
|
||||
include "solr:solr-ref-guide"
|
||||
include "solr:example"
|
||||
if (!(System.getProperty("skip.solr") as Boolean)) {
|
||||
include "solr:solrj"
|
||||
include "solr:core"
|
||||
include "solr:server"
|
||||
include "solr:contrib:analysis-extras"
|
||||
include "solr:contrib:analytics"
|
||||
include "solr:contrib:clustering"
|
||||
include "solr:contrib:extraction"
|
||||
include "solr:contrib:langid"
|
||||
include "solr:contrib:jaegertracer-configurator"
|
||||
include "solr:contrib:prometheus-exporter"
|
||||
include "solr:contrib:scripting"
|
||||
include "solr:contrib:ltr"
|
||||
include "solr:webapp"
|
||||
include "solr:test-framework"
|
||||
include "solr:solr-ref-guide"
|
||||
include "solr:example"
|
||||
|
||||
include "solr:documentation"
|
||||
include "solr:packaging"
|
||||
include "solr:docker"
|
||||
include "solr:documentation"
|
||||
include "solr:packaging"
|
||||
include "solr:docker"
|
||||
}
|
|
@ -23,12 +23,21 @@ description = 'Additional analysis components'
|
|||
dependencies {
|
||||
api project(':solr:core')
|
||||
|
||||
implementation project(':lucene:analysis:icu')
|
||||
implementation project(':lucene:analysis:smartcn')
|
||||
implementation project(':lucene:analysis:morfologik')
|
||||
implementation project(':lucene:analysis:opennlp')
|
||||
implementation project(':lucene:analysis:smartcn')
|
||||
implementation project(':lucene:analysis:stempel')
|
||||
if (skipLucene) {
|
||||
implementation "org.apache.lucene:lucene-analysis-icu"
|
||||
implementation "org.apache.lucene:lucene-analysis-smartcn"
|
||||
implementation "org.apache.lucene:lucene-analysis-morfologik"
|
||||
implementation "org.apache.lucene:lucene-analysis-opennlp"
|
||||
implementation "org.apache.lucene:lucene-analysis-smartcn"
|
||||
implementation "org.apache.lucene:lucene-analysis-stempel"
|
||||
} else {
|
||||
implementation project(':lucene:analysis:icu')
|
||||
implementation project(':lucene:analysis:smartcn')
|
||||
implementation project(':lucene:analysis:morfologik')
|
||||
implementation project(':lucene:analysis:opennlp')
|
||||
implementation project(':lucene:analysis:smartcn')
|
||||
implementation project(':lucene:analysis:stempel')
|
||||
}
|
||||
|
||||
testImplementation project(':solr:test-framework')
|
||||
testImplementation('org.mockito:mockito-core', {
|
||||
|
|
|
@ -22,7 +22,13 @@ description = 'Search Results Clustering Integraton'
|
|||
|
||||
dependencies {
|
||||
implementation project(':solr:core')
|
||||
implementation project(':lucene:analysis:common')
|
||||
|
||||
if (skipLucene) {
|
||||
implementation "org.apache.lucene:lucene-analysis-common"
|
||||
} else {
|
||||
implementation project(':lucene:analysis:common')
|
||||
}
|
||||
|
||||
implementation 'org.carrot2:carrot2-core'
|
||||
|
||||
testImplementation project(':solr:test-framework')
|
||||
|
|
|
@ -21,7 +21,12 @@ description = 'Learning to Rank Package'
|
|||
|
||||
dependencies {
|
||||
implementation project(':solr:core')
|
||||
implementation project(':lucene:analysis:common')
|
||||
|
||||
if (skipLucene) {
|
||||
implementation "org.apache.lucene:lucene-analysis-common"
|
||||
} else {
|
||||
implementation project(':lucene:analysis:common')
|
||||
}
|
||||
|
||||
testImplementation('org.mockito:mockito-core', {
|
||||
exclude group: "net.bytebuddy", module: "byte-buddy-agent"
|
||||
|
|
|
@ -21,24 +21,45 @@ apply plugin: 'java-library'
|
|||
description = 'Apache Solr Core'
|
||||
|
||||
dependencies {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:analysis:common')
|
||||
api project(':lucene:analysis:kuromoji')
|
||||
api project(':lucene:analysis:nori')
|
||||
api project(':lucene:analysis:phonetic')
|
||||
api project(':lucene:backward-codecs')
|
||||
api project(':lucene:classification')
|
||||
api project(':lucene:codecs')
|
||||
api project(':lucene:expressions')
|
||||
api project(':lucene:grouping')
|
||||
api project(':lucene:highlighter')
|
||||
api project(':lucene:join')
|
||||
api project(':lucene:misc')
|
||||
api project(':lucene:queries')
|
||||
api project(':lucene:queryparser')
|
||||
api project(':lucene:sandbox')
|
||||
api project(':lucene:spatial-extras')
|
||||
api project(':lucene:suggest')
|
||||
if (skipLucene) {
|
||||
api "org.apache.lucene:lucene-core"
|
||||
api "org.apache.lucene:lucene-analysis-common"
|
||||
api "org.apache.lucene:lucene-analysis-kuromoji"
|
||||
api "org.apache.lucene:lucene-analysis-nori"
|
||||
api "org.apache.lucene:lucene-analysis-phonetic"
|
||||
api "org.apache.lucene:lucene-backward-codecs"
|
||||
api "org.apache.lucene:lucene-classification"
|
||||
api "org.apache.lucene:lucene-codecs"
|
||||
api "org.apache.lucene:lucene-expressions"
|
||||
api "org.apache.lucene:lucene-grouping"
|
||||
api "org.apache.lucene:lucene-highlighter"
|
||||
api "org.apache.lucene:lucene-join"
|
||||
api "org.apache.lucene:lucene-misc"
|
||||
api "org.apache.lucene:lucene-queries"
|
||||
api "org.apache.lucene:lucene-queryparser"
|
||||
api "org.apache.lucene:lucene-sandbox"
|
||||
api "org.apache.lucene:lucene-spatial-extras"
|
||||
api "org.apache.lucene:lucene-suggest"
|
||||
} else {
|
||||
api project(':lucene:core')
|
||||
api project(':lucene:analysis:common')
|
||||
api project(':lucene:analysis:kuromoji')
|
||||
api project(':lucene:analysis:nori')
|
||||
api project(':lucene:analysis:phonetic')
|
||||
api project(':lucene:backward-codecs')
|
||||
api project(':lucene:classification')
|
||||
api project(':lucene:codecs')
|
||||
api project(':lucene:expressions')
|
||||
api project(':lucene:grouping')
|
||||
api project(':lucene:highlighter')
|
||||
api project(':lucene:join')
|
||||
api project(':lucene:misc')
|
||||
api project(':lucene:queries')
|
||||
api project(':lucene:queryparser')
|
||||
api project(':lucene:sandbox')
|
||||
api project(':lucene:spatial-extras')
|
||||
api project(':lucene:suggest')
|
||||
}
|
||||
|
||||
// Export these dependencies so that they're imported transitively by
|
||||
// other modules.
|
||||
|
@ -117,7 +138,12 @@ dependencies {
|
|||
runtimeOnly ('org.apache.kerby:kerby-pkix')
|
||||
runtimeOnly ('com.google.protobuf:protobuf-java')
|
||||
|
||||
testImplementation project(':lucene:analysis:icu')
|
||||
if (skipLucene) {
|
||||
testImplementation "org.apache.lucene:lucene-analysis-icu"
|
||||
} else {
|
||||
testImplementation project(':lucene:analysis:icu')
|
||||
}
|
||||
|
||||
testImplementation project(':solr:contrib:analysis-extras')
|
||||
testImplementation project(':solr:test-framework')
|
||||
|
||||
|
|
|
@ -27,15 +27,15 @@ import java.util.Properties;
|
|||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.lucene.backward_index.TestBackwardsCompatibility;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.util.TestHarness;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Verify we can read/write previous versions' Lucene indexes. */
|
||||
@Ignore("Missing Lucene back-compat index files")
|
||||
@LuceneTestCase.AwaitsFix(bugUrl = "SOLR-15224: Missing Lucene back-compat index files")
|
||||
public class TestLuceneIndexBackCompat extends SolrTestCaseJ4 {
|
||||
private static final String[] oldNames = {
|
||||
"8.0.0-cfs",
|
||||
|
|
|
@ -1,72 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.search;
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenFilter;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.queryparser.xml.CoreParser;
|
||||
|
||||
import org.apache.solr.SolrTestCase;
|
||||
import org.apache.solr.util.StartupLoggingUtils;
|
||||
import org.apache.solr.util.TestHarness;
|
||||
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Ignore("Was relying on Lucene test sources. Should copy?")
|
||||
public class TestXmlQParser extends SolrTestCase /* extends TestCoreParser */ {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
private CoreParser solrCoreParser;
|
||||
private static TestHarness harness;
|
||||
|
||||
@BeforeClass
|
||||
public static void init() throws Exception {
|
||||
// we just need to stub this out so we can construct a SolrCoreParser
|
||||
harness = new TestHarness(TestHarness.buildTestNodeConfig(createTempDir()));
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void shutdownLogger() throws Exception {
|
||||
harness.close();
|
||||
harness = null;
|
||||
StartupLoggingUtils.shutdown();
|
||||
}
|
||||
|
||||
// @Override
|
||||
protected CoreParser coreParser() {
|
||||
if (solrCoreParser == null) {
|
||||
solrCoreParser = new SolrCoreParser(
|
||||
"contents",
|
||||
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET),
|
||||
harness.getRequestFactory("/select", 0, 0).makeRequest());
|
||||
}
|
||||
return solrCoreParser;
|
||||
}
|
||||
|
||||
//public void testSomeOtherQuery() {
|
||||
// Query q = parse("SomeOtherQuery.xml");
|
||||
// dumpResults("SomeOtherQuery", q, ?);
|
||||
//}
|
||||
|
||||
}
|
|
@ -97,10 +97,12 @@ distributions {
|
|||
include "README.md"
|
||||
})
|
||||
|
||||
from(project(":lucene").projectDir, {
|
||||
include "CHANGES.txt"
|
||||
rename { file -> 'LUCENE_CHANGES.txt' }
|
||||
})
|
||||
if (!skipLucene) {
|
||||
from(project(":lucene").projectDir, {
|
||||
include "CHANGES.txt"
|
||||
rename { file -> 'LUCENE_CHANGES.txt' }
|
||||
})
|
||||
}
|
||||
|
||||
from(configurations.contrib, {
|
||||
into "contrib"
|
||||
|
|
|
@ -132,7 +132,7 @@ ext {
|
|||
// NOTE: extra '../' because we'll in a sub-dir of buildDir that will be built later...
|
||||
props: [
|
||||
htmlSolrJavadocs : 'link:../' + buildDir.toPath().relativize(project(':solr:documentation').docroot.toPath()).toString().replace(File.separator, '/'),
|
||||
htmlLuceneJavadocs : 'link:../' + buildDir.toPath().relativize(project(':lucene:documentation').docroot.toPath()).toString().replace(File.separator, '/')
|
||||
htmlLuceneJavadocs : skipLucene ? 'link:../' : 'link:../' + buildDir.toPath().relativize(project(':lucene:documentation').docroot.toPath()).toString().replace(File.separator, '/')
|
||||
]
|
||||
]
|
||||
}
|
||||
|
|
|
@ -22,8 +22,14 @@ description = 'Solr Test Framework'
|
|||
dependencies {
|
||||
api project(':solr:core')
|
||||
api project(':solr:solrj')
|
||||
api project(':lucene:test-framework')
|
||||
api project(':lucene:analysis:common')
|
||||
|
||||
if (skipLucene) {
|
||||
api "org.apache.lucene:lucene-test-framework"
|
||||
api "org.apache.lucene:lucene-analysis-common"
|
||||
} else {
|
||||
api project(':lucene:test-framework')
|
||||
api project(':lucene:analysis:common')
|
||||
}
|
||||
|
||||
api 'org.apache.logging.log4j:log4j-core'
|
||||
api 'io.opentracing:opentracing-mock'
|
||||
|
|
|
@ -64,6 +64,7 @@ org.apache.httpcomponents:httpmime=4.5.10
|
|||
org.apache.james:apache-mime4j*=0.8.3
|
||||
org.apache.kerby:*=1.0.1
|
||||
org.apache.logging.log4j:*=2.13.2
|
||||
org.apache.lucene:*=9.0.0-SNAPSHOT
|
||||
org.apache.opennlp:opennlp-tools=1.9.1
|
||||
org.apache.pdfbox:*=2.0.17
|
||||
org.apache.pdfbox:jempbox=1.8.16
|
||||
|
|
Loading…
Reference in New Issue