LUCENE-9180: dos2unix files that don't need dos line endings

This commit is contained in:
Robert Muir 2020-01-27 11:29:59 -05:00
parent a3b0cfcbe2
commit 8e357b167b
No known key found for this signature in database
GPG Key ID: 817AE1DD322D7ECA
17 changed files with 1217 additions and 1217 deletions

4
.gitattributes vendored
View File

@ -1,2 +1,2 @@
# Ignore all differences in line endings for the lock file.
versions.lock text eol=lf
# Ignore all differences in line endings for the lock file.
versions.lock text eol=lf

View File

@ -1,47 +1,47 @@
// Add "help" tasks which display plain text files under 'help' folder.
configure(rootProject) {
def helpFiles = [
["Workflow", "help/workflow.txt", "Typical workflow commands."],
["Ant", "help/ant.txt", "Ant-gradle migration help."],
["Tests", "help/tests.txt", "Tests, filtering, beasting, etc."],
["Deps", "help/dependencies.txt", "Declaring, inspecting and excluding dependencies."],
["ForbiddenApis", "help/forbiddenApis.txt", "How to add/apply rules for forbidden APIs."],
["LocalSettings", "help/localSettings.txt", "Local settings, overrides and build performance tweaks."],
["Git", "help/git.txt", "Git assistance and guides."],
]
helpFiles.each { section, path, sectionInfo ->
task "help${section}" {
group = 'Help (developer guides and hints)'
description = sectionInfo
doFirst {
println "\n" + rootProject.file(path).getText("UTF-8")
}
}
}
help {
doLast {
println ""
println "This is an experimental Lucene/Solr gradle build. See some"
println "guidelines, ant-equivalent commands etc. under help/*; or type:"
helpFiles.each { section, path, sectionInfo ->
println String.format(Locale.ROOT,
" gradlew :help%-14s # %s", section, sectionInfo)
}
}
}
task allHelpFilesExit() {
doFirst {
helpFiles.each { section, path, sectionInfo ->
if (!rootProject.file(path).exists()) {
throw new GradleException("Help file missing: ${path} (correct help.gradle)")
}
}
}
}
check.dependsOn allHelpFilesExit
}
// Add "help" tasks which display plain text files under 'help' folder.
configure(rootProject) {
def helpFiles = [
["Workflow", "help/workflow.txt", "Typical workflow commands."],
["Ant", "help/ant.txt", "Ant-gradle migration help."],
["Tests", "help/tests.txt", "Tests, filtering, beasting, etc."],
["Deps", "help/dependencies.txt", "Declaring, inspecting and excluding dependencies."],
["ForbiddenApis", "help/forbiddenApis.txt", "How to add/apply rules for forbidden APIs."],
["LocalSettings", "help/localSettings.txt", "Local settings, overrides and build performance tweaks."],
["Git", "help/git.txt", "Git assistance and guides."],
]
helpFiles.each { section, path, sectionInfo ->
task "help${section}" {
group = 'Help (developer guides and hints)'
description = sectionInfo
doFirst {
println "\n" + rootProject.file(path).getText("UTF-8")
}
}
}
help {
doLast {
println ""
println "This is an experimental Lucene/Solr gradle build. See some"
println "guidelines, ant-equivalent commands etc. under help/*; or type:"
helpFiles.each { section, path, sectionInfo ->
println String.format(Locale.ROOT,
" gradlew :help%-14s # %s", section, sectionInfo)
}
}
}
task allHelpFilesExit() {
doFirst {
helpFiles.each { section, path, sectionInfo ->
if (!rootProject.file(path).exists()) {
throw new GradleException("Help file missing: ${path} (correct help.gradle)")
}
}
}
}
check.dependsOn allHelpFilesExit
}

View File

@ -1,44 +1,44 @@
import org.apache.lucene.gradle.ErrorReportingTestListener
// Display all failed tests at the end of the build.
def failedTests = []
allprojects {
tasks.withType(Test) { Test task ->
afterTest { desc, result ->
if (result.resultType == TestResult.ResultType.FAILURE) {
failedTests << [
"name": "${desc.className}.${desc.name}",
"project": "${test.project.path}",
"output": file("${task.testOutputsDir}/${ErrorReportingTestListener.getOutputLogName(desc.parent)}"),
"reproduce": "gradlew ${project.path}:test --tests \"${desc.className}\" ${task.project.testOptionsForReproduceLine}"
]
}
}
afterSuite { desc, result ->
if (result.exceptions) {
failedTests << [
"name": "${desc.name}",
"project": "${test.project.path}",
"output": file("${task.testOutputsDir}/${ErrorReportingTestListener.getOutputLogName(desc)}"),
"reproduce": "gradlew ${project.path}:test --tests \"${desc.name}\" ${task.project.testOptionsForReproduceLine}"
]
}
}
}
}
gradle.buildFinished { result ->
if (failedTests) {
def formatted = failedTests
.sort { a, b -> b.project.compareTo(a.project) }
.collect { e -> String.format(Locale.ROOT,
" - %s (%s)\n Test output: %s\n Reproduce with: %s\n",
e.name, e.project, e.output, e.reproduce) }
.join("\n")
logger.error("\nERROR: The following test(s) have failed:\n${formatted}")
}
}
import org.apache.lucene.gradle.ErrorReportingTestListener
// Display all failed tests at the end of the build.
def failedTests = []
allprojects {
tasks.withType(Test) { Test task ->
afterTest { desc, result ->
if (result.resultType == TestResult.ResultType.FAILURE) {
failedTests << [
"name": "${desc.className}.${desc.name}",
"project": "${test.project.path}",
"output": file("${task.testOutputsDir}/${ErrorReportingTestListener.getOutputLogName(desc.parent)}"),
"reproduce": "gradlew ${project.path}:test --tests \"${desc.className}\" ${task.project.testOptionsForReproduceLine}"
]
}
}
afterSuite { desc, result ->
if (result.exceptions) {
failedTests << [
"name": "${desc.name}",
"project": "${test.project.path}",
"output": file("${task.testOutputsDir}/${ErrorReportingTestListener.getOutputLogName(desc)}"),
"reproduce": "gradlew ${project.path}:test --tests \"${desc.name}\" ${task.project.testOptionsForReproduceLine}"
]
}
}
}
}
gradle.buildFinished { result ->
if (failedTests) {
def formatted = failedTests
.sort { a, b -> b.project.compareTo(a.project) }
.collect { e -> String.format(Locale.ROOT,
" - %s (%s)\n Test output: %s\n Reproduce with: %s\n",
e.name, e.project, e.output, e.reproduce) }
.join("\n")
logger.error("\nERROR: The following test(s) have failed:\n${formatted}")
}
}

View File

@ -1,20 +1,20 @@
// Per-project test summary.
allprojects {
tasks.withType(Test) { task ->
afterSuite { desc, result ->
if (!desc.parent) {
if (result.testCount > 0) {
def components = [
"test(s)" : result.testCount,
"failure(s)": result.failedTestCount,
"skipped" : result.skippedTestCount
].findAll { k, v -> v > 0 }.collect { k, v -> "$v $k" }.join(", ")
logger.lifecycle("${task.path} (${result.resultType}): ${components}")
}
}
}
}
}
// Per-project test summary.
allprojects {
tasks.withType(Test) { task ->
afterSuite { desc, result ->
if (!desc.parent) {
if (result.testCount > 0) {
def components = [
"test(s)" : result.testCount,
"failure(s)": result.failedTestCount,
"skipped" : result.skippedTestCount
].findAll { k, v -> v > 0 }.collect { k, v -> "$v $k" }.join(", ")
logger.lifecycle("${task.path} (${result.resultType}): ${components}")
}
}
}
}
}

View File

@ -1,35 +1,35 @@
// This adds support for compiling and testing against a different Java runtime.
// This is the only way to build against JVMs not yet supported by Gradle itself.
import org.gradle.internal.jvm.Jvm
def jvmForTests = {
def runtimeJavaHome = propertyOrDefault("runtime.java.home", System.getenv('RUNTIME_JAVA_HOME'))
if (!runtimeJavaHome) {
return Jvm.current()
} else {
return Jvm.forHome(file(runtimeJavaHome))
}
}()
def jvmGradle = Jvm.current()
def differentTestJvm = (jvmGradle.javaHome.canonicalPath != jvmForTests.javaHome.canonicalPath)
// Set up tasks to use the alternative Java.
if (differentTestJvm) {
configure(rootProject) {
task testJvmWarning() {
doFirst {
logger.warn("This Java will be used for running tests: ${jvmForTests.javaExecutable}")
}
}
}
// Set up test tasks to use the alternative JVM.
allprojects {
tasks.withType(Test) {
dependsOn ":testJvmWarning"
executable = jvmForTests.javaExecutable
}
}
// This adds support for compiling and testing against a different Java runtime.
// This is the only way to build against JVMs not yet supported by Gradle itself.
import org.gradle.internal.jvm.Jvm
def jvmForTests = {
def runtimeJavaHome = propertyOrDefault("runtime.java.home", System.getenv('RUNTIME_JAVA_HOME'))
if (!runtimeJavaHome) {
return Jvm.current()
} else {
return Jvm.forHome(file(runtimeJavaHome))
}
}()
def jvmGradle = Jvm.current()
def differentTestJvm = (jvmGradle.javaHome.canonicalPath != jvmForTests.javaHome.canonicalPath)
// Set up tasks to use the alternative Java.
if (differentTestJvm) {
configure(rootProject) {
task testJvmWarning() {
doFirst {
logger.warn("This Java will be used for running tests: ${jvmForTests.javaExecutable}")
}
}
}
// Set up test tasks to use the alternative JVM.
allprojects {
tasks.withType(Test) {
dependsOn ":testJvmWarning"
executable = jvmForTests.javaExecutable
}
}
}

View File

@ -1,31 +1,31 @@
// Add test duration summary at the end of the build.
def allTests = []
allprojects {
tasks.withType(Test) { task ->
afterTest { desc, result ->
def duration = (result.getEndTime() - result.getStartTime())
allTests << [
name : "${desc.className.replaceAll('.+\\.', "")}.${desc.name} (${project.path})",
duration: duration
]
}
}
}
gradle.buildFinished { result ->
if (allTests && result.getFailure() == null) {
def slowest = allTests
.sort { a, b -> b.duration.compareTo(a.duration) }
.take(10)
.findAll { e -> e.duration >= 500 }
.collect { e -> String.format(Locale.ROOT, "%5.2fs %s", e.duration / 1000d, e.name) }
if (slowest) {
logger.lifecycle("The slowest tests (exceeding 500 ms) during this run:\n " +
slowest.join("\n "))
}
}
}
// Add test duration summary at the end of the build.
def allTests = []
allprojects {
tasks.withType(Test) { task ->
afterTest { desc, result ->
def duration = (result.getEndTime() - result.getStartTime())
allTests << [
name : "${desc.className.replaceAll('.+\\.', "")}.${desc.name} (${project.path})",
duration: duration
]
}
}
}
gradle.buildFinished { result ->
if (allTests && result.getFailure() == null) {
def slowest = allTests
.sort { a, b -> b.duration.compareTo(a.duration) }
.take(10)
.findAll { e -> e.duration >= 500 }
.collect { e -> String.format(Locale.ROOT, "%5.2fs %s", e.duration / 1000d, e.name) }
if (slowest) {
logger.lifecycle("The slowest tests (exceeding 500 ms) during this run:\n " +
slowest.join("\n "))
}
}
}

View File

@ -1,30 +1,30 @@
// This checks that we're running the desired version of Gradle and
// that the JVM is supported.
import org.gradle.util.GradleVersion
configure(rootProject) {
ext {
expectedGradleVersion = '6.0.1'
minJavaVersion = JavaVersion.VERSION_11
}
wrapper {
distributionType = Wrapper.DistributionType.ALL
gradleVersion = expectedGradleVersion
}
def currentJavaVersion = JavaVersion.current()
if (currentJavaVersion < minJavaVersion) {
throw new GradleException("At least Java ${minJavaVersion} is required, you are running Java ${currentJavaVersion} "
+ "[${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]")
}
// If we're regenerating the wrapper, skip the check.
if (!gradle.startParameter.taskNames.contains("wrapper")) {
def currentGradleVersion = GradleVersion.current()
if (currentGradleVersion != GradleVersion.version(expectedGradleVersion)) {
throw new GradleException("Gradle ${expectedGradleVersion} is required (hint: use the gradlew script): this gradle is ${currentGradleVersion}")
}
}
}
// This checks that we're running the desired version of Gradle and
// that the JVM is supported.
import org.gradle.util.GradleVersion
configure(rootProject) {
ext {
expectedGradleVersion = '6.0.1'
minJavaVersion = JavaVersion.VERSION_11
}
wrapper {
distributionType = Wrapper.DistributionType.ALL
gradleVersion = expectedGradleVersion
}
def currentJavaVersion = JavaVersion.current()
if (currentJavaVersion < minJavaVersion) {
throw new GradleException("At least Java ${minJavaVersion} is required, you are running Java ${currentJavaVersion} "
+ "[${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]")
}
// If we're regenerating the wrapper, skip the check.
if (!gradle.startParameter.taskNames.contains("wrapper")) {
def currentGradleVersion = GradleVersion.current()
if (currentGradleVersion != GradleVersion.version(expectedGradleVersion)) {
throw new GradleException("Gradle ${expectedGradleVersion} is required (hint: use the gradlew script): this gradle is ${currentGradleVersion}")
}
}
}

View File

@ -1,116 +1,116 @@
// This configures application of forbidden API rules
// via https://github.com/policeman-tools/forbidden-apis
// Only apply forbidden-apis to java projects.
allprojects { prj ->
plugins.withId("java", {
prj.apply plugin: 'de.thetaphi.forbiddenapis'
// This helper method appends signature files based on a set of true
// dependencies from a given configuration.
def dynamicSignatures = { configuration, suffix ->
def deps = configuration.resolvedConfiguration.resolvedArtifacts
.collect { a -> a.moduleVersion.id }
.collect { id -> [
"${id.group}.${id.name}.all.txt",
"${id.group}.${id.name}.${suffix}.txt",
]}
.flatten()
.sort()
deps += ["defaults.all.txt", "defaults.${suffix}.txt"]
deps.each { sig ->
def signaturesFile = rootProject.file("gradle/validation/forbidden-apis/${sig}")
if (signaturesFile.exists()) {
logger.info("Signature file applied: ${sig}")
signaturesFiles += files(signaturesFile)
} else {
logger.debug("Signature file omitted (does not exist): ${sig}")
}
}
}
// Configure defaults for sourceSets.main
forbiddenApisMain {
bundledSignatures += [
'jdk-unsafe',
'jdk-deprecated',
'jdk-non-portable',
'jdk-reflection',
'jdk-system-out',
]
suppressAnnotations += [
"**.SuppressForbidden"
]
}
// Configure defaults for sourceSets.test
forbiddenApisTest {
bundledSignatures += [
'jdk-unsafe',
'jdk-deprecated',
'jdk-non-portable',
'jdk-reflection',
]
signaturesFiles = files(
rootProject.file("gradle/validation/forbidden-apis/defaults.tests.txt")
)
suppressAnnotations += [
"**.SuppressForbidden"
]
}
// Attach validation to check task.
check.dependsOn forbiddenApisMain, forbiddenApisTest
// Disable sysout signatures for these projects.
if (prj.path in [
":lucene:demo",
":lucene:benchmark",
":lucene:test-framework",
":solr:solr-ref-guide",
":solr:test-framework"
]) {
forbiddenApisMain.bundledSignatures -= [
'jdk-system-out'
]
}
// Configure lucene-specific rules.
if (prj.path.startsWith(":lucene")) {
forbiddenApisMain {
doFirst dynamicSignatures.curry(configurations.compileClasspath, "lucene")
}
forbiddenApisTest {
doFirst dynamicSignatures.curry(configurations.testCompileClasspath, "lucene")
}
}
// Configure solr-specific rules.
if (prj.path.startsWith(":solr")) {
forbiddenApisMain {
doFirst dynamicSignatures.curry(configurations.compileClasspath, "solr")
}
forbiddenApisTest {
doFirst dynamicSignatures.curry(configurations.testCompileClasspath, "solr")
}
}
// We rely on resolved configurations to compute the relevant set of rule
// files for forbiddenApis. Since we don't want to resolve these configurations until
// the task is executed, we can't really use them as task inputs properly. This is a
// chicken-and-egg problem.
//
// This is the simplest workaround possible: just point at all the rule files and indicate
// them as inputs. This way if a rule is modified, checks will be reapplied.
configure([forbiddenApisMain, forbiddenApisTest]) { task ->
task.inputs.dir(rootProject.file("gradle/validation/forbidden-apis"))
}
})
// This configures application of forbidden API rules
// via https://github.com/policeman-tools/forbidden-apis
// Only apply forbidden-apis to java projects.
allprojects { prj ->
plugins.withId("java", {
prj.apply plugin: 'de.thetaphi.forbiddenapis'
// This helper method appends signature files based on a set of true
// dependencies from a given configuration.
def dynamicSignatures = { configuration, suffix ->
def deps = configuration.resolvedConfiguration.resolvedArtifacts
.collect { a -> a.moduleVersion.id }
.collect { id -> [
"${id.group}.${id.name}.all.txt",
"${id.group}.${id.name}.${suffix}.txt",
]}
.flatten()
.sort()
deps += ["defaults.all.txt", "defaults.${suffix}.txt"]
deps.each { sig ->
def signaturesFile = rootProject.file("gradle/validation/forbidden-apis/${sig}")
if (signaturesFile.exists()) {
logger.info("Signature file applied: ${sig}")
signaturesFiles += files(signaturesFile)
} else {
logger.debug("Signature file omitted (does not exist): ${sig}")
}
}
}
// Configure defaults for sourceSets.main
forbiddenApisMain {
bundledSignatures += [
'jdk-unsafe',
'jdk-deprecated',
'jdk-non-portable',
'jdk-reflection',
'jdk-system-out',
]
suppressAnnotations += [
"**.SuppressForbidden"
]
}
// Configure defaults for sourceSets.test
forbiddenApisTest {
bundledSignatures += [
'jdk-unsafe',
'jdk-deprecated',
'jdk-non-portable',
'jdk-reflection',
]
signaturesFiles = files(
rootProject.file("gradle/validation/forbidden-apis/defaults.tests.txt")
)
suppressAnnotations += [
"**.SuppressForbidden"
]
}
// Attach validation to check task.
check.dependsOn forbiddenApisMain, forbiddenApisTest
// Disable sysout signatures for these projects.
if (prj.path in [
":lucene:demo",
":lucene:benchmark",
":lucene:test-framework",
":solr:solr-ref-guide",
":solr:test-framework"
]) {
forbiddenApisMain.bundledSignatures -= [
'jdk-system-out'
]
}
// Configure lucene-specific rules.
if (prj.path.startsWith(":lucene")) {
forbiddenApisMain {
doFirst dynamicSignatures.curry(configurations.compileClasspath, "lucene")
}
forbiddenApisTest {
doFirst dynamicSignatures.curry(configurations.testCompileClasspath, "lucene")
}
}
// Configure solr-specific rules.
if (prj.path.startsWith(":solr")) {
forbiddenApisMain {
doFirst dynamicSignatures.curry(configurations.compileClasspath, "solr")
}
forbiddenApisTest {
doFirst dynamicSignatures.curry(configurations.testCompileClasspath, "solr")
}
}
// We rely on resolved configurations to compute the relevant set of rule
// files for forbiddenApis. Since we don't want to resolve these configurations until
// the task is executed, we can't really use them as task inputs properly. This is a
// chicken-and-egg problem.
//
// This is the simplest workaround possible: just point at all the rule files and indicate
// them as inputs. This way if a rule is modified, checks will be reapplied.
configure([forbiddenApisMain, forbiddenApisTest]) { task ->
task.inputs.dir(rootProject.file("gradle/validation/forbidden-apis"))
}
})
}

View File

@ -1,64 +1,64 @@
// This adds top-level 'precommit' task with essential
// precommit validation checks.
import org.eclipse.jgit.api.*;
import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
import org.eclipse.jgit.errors.*;
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath 'org.eclipse.jgit:org.eclipse.jgit:5.3.0.201903130848-r'
classpath 'commons-codec:commons-codec:1.6'
}
}
def gitStatus(dir) {
try {
def repository = new FileRepositoryBuilder()
.setWorkTree(dir)
.setMustExist(true)
.build()
def status = new Git(repository).status().call()
return status
} catch (RepositoryNotFoundException | NoWorkTreeException e) {
logger.warn("WARNING: Directory is not a valid GIT checkout (won't check dirty files): ${dir}")
return null
} catch (NotSupportedException e) {
throw new GradleException("jgit does not support git repository version at this location: ${dir}", e)
}
}
configure(rootProject) {
// Verify git working copy does not have any unstaged modified files.
task checkWorkingCopyClean() {
doFirst {
def status = gitStatus(rootProject.projectDir)
if (status == null) {
// Ignore the check. This isn't a git checkout.
} else {
def offenders = [
// Exclude staged changes. These are fine in precommit.
// "(added)": status.added,
// "(changed)": status.changed,
// "(removed)": status.removed,
"(conflicting)": status.conflicting,
"(missing)": status.missing,
"(modified)": status.modified,
"(untracked)": [status.untracked, status.untrackedFolders].flatten()
].collectMany { fileStatus, files ->
files.collect {file -> " - ${file} ${fileStatus}" }
}.sort()
if (offenders) {
throw new GradleException("Working copy is not a clean git checkout, offending files:\n${offenders.join("\n")}")
}
}
}
}
}
// This adds top-level 'precommit' task with essential
// precommit validation checks.
import org.eclipse.jgit.api.*;
import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
import org.eclipse.jgit.errors.*;
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath 'org.eclipse.jgit:org.eclipse.jgit:5.3.0.201903130848-r'
classpath 'commons-codec:commons-codec:1.6'
}
}
def gitStatus(dir) {
try {
def repository = new FileRepositoryBuilder()
.setWorkTree(dir)
.setMustExist(true)
.build()
def status = new Git(repository).status().call()
return status
} catch (RepositoryNotFoundException | NoWorkTreeException e) {
logger.warn("WARNING: Directory is not a valid GIT checkout (won't check dirty files): ${dir}")
return null
} catch (NotSupportedException e) {
throw new GradleException("jgit does not support git repository version at this location: ${dir}", e)
}
}
configure(rootProject) {
// Verify git working copy does not have any unstaged modified files.
task checkWorkingCopyClean() {
doFirst {
def status = gitStatus(rootProject.projectDir)
if (status == null) {
// Ignore the check. This isn't a git checkout.
} else {
def offenders = [
// Exclude staged changes. These are fine in precommit.
// "(added)": status.added,
// "(changed)": status.changed,
// "(removed)": status.removed,
"(conflicting)": status.conflicting,
"(missing)": status.missing,
"(modified)": status.modified,
"(untracked)": [status.untracked, status.untrackedFolders].flatten()
].collectMany { fileStatus, files ->
files.collect {file -> " - ${file} ${fileStatus}" }
}.sort()
if (offenders) {
throw new GradleException("Working copy is not a clean git checkout, offending files:\n${offenders.join("\n")}")
}
}
}
}
}

View File

@ -1,377 +1,377 @@
// This adds validation of project dependencies:
// 1) license file
// 2) notice file
// 3) checksum validation/ generation.
import org.apache.commons.codec.digest.DigestUtils
import org.apache.commons.codec.digest.MessageDigestAlgorithms
// This should be false only for debugging.
def failOnError = true
// We're using commons-codec for computing checksums.
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath 'commons-codec:commons-codec:1.13'
}
}
// Configure license checksum folder for top-level projects.
// (The file("licenses") inside the configure scope resolves
// relative to the current project so they're not the same).
configure(project(":lucene")) {
ext.licensesDir = file("licenses")
}
configure(project(":solr")) {
ext.licensesDir = file("licenses")
}
// All known license types. If 'noticeOptional' is true then
// the notice file must accompany the license.
def licenseTypes = [
"ASL" : [name: "Apache Software License 2.0"],
"BSD" : [name: "Berkeley Software Distribution"],
//BSD like just means someone has taken the BSD license and put in their name, copyright, or it's a very similar license.
"BSD_LIKE": [name: "BSD like license"],
"CDDL" : [name: "Common Development and Distribution License", noticeOptional: true],
"CPL" : [name: "Common Public License"],
"EPL" : [name: "Eclipse Public License Version 1.0", noticeOptional: true],
"MIT" : [name: "Massachusetts Institute of Tech. License", noticeOptional: true],
"MPL" : [name: "Mozilla Public License", noticeOptional: true /* NOT SURE on the required notice */],
"PD" : [name: "Public Domain", noticeOptional: true],
"SUN" : [name: "Sun Open Source License", noticeOptional: true],
"COMPOUND": [name: "Compound license (details in NOTICE file)."],
]
allprojects {
task licenses() {
group = 'Dependency validation'
description = "Apply all dependency/ license checks."
}
check.dependsOn(licenses)
}
subprojects {
// Configure jarValidation configuration for all projects. Any dependency
// declared on this configuration (or any configuration it extends from) will
// be verified.
configurations {
jarValidation
}
// For Java projects, add all dependencies from the following configurations
// to jar validation
plugins.withType(JavaPlugin) {
configurations {
jarValidation {
extendsFrom runtimeClasspath
extendsFrom compileClasspath
extendsFrom testRuntimeClasspath
extendsFrom testCompileClasspath
}
}
}
// Collects dependency JAR information for a project and saves it in
// project.ext.jarInfos. Each dependency has a map of attributes
// which make it easier to process it later on (name, hash, origin module,
// see the code below for details).
task collectJarInfos() {
dependsOn configurations.jarValidation
doFirst {
def isSolr = project.path.startsWith(":solr")
// When gradle resolves a configuration it applies exclude rules from inherited configurations
// globally (this seems like a bug to me). So we process each inherited configuration independently
// but make sure there are no other dependencies on jarValidation itself.
if (!configurations.jarValidation.dependencies.isEmpty()) {
throw new GradleException("jarValidation must only inherit from other configurations (can't have its own dependencies).")
}
def excludeRules = configurations.jarValidation.excludeRules
ArrayDeque<ResolvedDependency> queue = new ArrayDeque<>()
configurations.jarValidation.extendsFrom.each { conf ->
if (excludeRules) {
conf = configurations.detachedConfiguration().extendsFrom(conf)
conf.excludeRules = excludeRules
}
if (conf.canBeResolved) {
queue.addAll(conf.resolvedConfiguration.firstLevelModuleDependencies)
}
}
def visited = new HashSet<>()
def infos = []
while (!queue.isEmpty()) {
def dep = queue.removeFirst()
// Skip any artifacts from other Solr modules (they will be resolved there).
if (dep.moduleGroup == "org.apache.solr") {
continue
}
// Skip any artifacts from Lucene modules.
if (dep.moduleGroup.startsWith("org.apache.lucene")) {
// ... but process their transitive dependencies for Solr compatibility.
if (isSolr) {
queue.addAll(dep.children)
}
} else {
queue.addAll(dep.children)
dep.moduleArtifacts.each { resolvedArtifact ->
def file = resolvedArtifact.file
if (visited.add(file)) {
infos.add([
name : resolvedArtifact.name,
jarName : file.toPath().getFileName().toString(),
path : file,
module : resolvedArtifact.moduleVersion,
checksum : provider { new DigestUtils(MessageDigestAlgorithms.SHA_1).digestAsHex(file).trim() },
// We keep track of the files referenced by this dependency (sha, license, notice, etc.)
// so that we can determine unused dangling files later on.
referencedFiles: []
])
}
}
}
}
project.ext.jarInfos = infos.sort {a, b -> "${a.module}".compareTo("${b.module}")}
// jarInfos.each { info -> println "${info.module}" }
}
}
// Verifies that each JAR has a corresponding checksum and that it matches actual JAR available for this dependency.
task validateJarChecksums() {
group = 'Dependency validation'
description = "Validate checksums of dependencies"
dependsOn collectJarInfos
doLast {
def errors = []
jarInfos.each { dep ->
def expectedChecksumFile = file("${licensesDir}/${dep.jarName}.sha1")
if (!expectedChecksumFile.exists()) {
errors << "Dependency checksum missing ('${dep.module}'), expected it at: ${expectedChecksumFile}"
} else {
dep.referencedFiles += expectedChecksumFile
def expected = expectedChecksumFile.getText("UTF-8").trim()
def actual = dep.checksum.get()
if (expected.compareToIgnoreCase(actual) != 0) {
errors << "Dependency checksum mismatch ('${dep.module}'), expected it to be: ${expected}, but was: ${actual}"
} else {
logger.log(LogLevel.INFO, "Dependency checksum OK ('${dep.module}')")
}
}
}
if (errors) {
def msg = "Dependency checksum validation failed:\n - " + errors.join("\n - ")
if (failOnError) {
throw new GradleException(msg)
} else {
logger.log(LogLevel.WARN, "WARNING: ${msg}")
}
}
}
}
// Locate the set of license file candidates for this dependency. We
// search for [jar-or-prefix]-LICENSE-[type].txt
// where 'jar-or-prefix' can be any '-'-delimited prefix of the dependency JAR's name.
// So for 'commons-io' it can be 'commons-io-LICENSE-foo.txt' or
// 'commons-LICENSE.txt'
task validateJarLicenses() {
group = 'Dependency validation'
description = "Validate license and notice files of dependencies"
dependsOn collectJarInfos
doLast {
def errors = []
jarInfos.each { dep ->
def baseName = dep.name
def found = []
def candidates = []
while (true) {
candidates += file("${licensesDir}/${baseName}-LICENSE-[type].txt")
found += fileTree(dir: licensesDir, include: "${baseName}-LICENSE-*.txt").files
def prefix = baseName.replaceAll(/[\-][^-]+$/, "")
if (found || prefix == baseName) {
break
}
baseName = prefix
}
if (found.size() == 0) {
errors << "License file missing ('${dep.module}'), expected it at: ${candidates.join(" or ")}," +
" where [type] can be any of ${licenseTypes.keySet()}."
} else if (found.size() > 1) {
errors << "Multiple license files matching for ('${dep.module}'): ${found.join(", ")}"
} else {
def licenseFile = found.get(0)
dep.referencedFiles += licenseFile
def m = (licenseFile.name =~ /LICENSE-(.+)\.txt$/)
if (!m) throw new GradleException("License file name doesn't contain license type?: ${licenseFile.name}")
def licenseName = m[0][1]
def licenseType = licenseTypes[licenseName]
if (!licenseType) {
errors << "Unknown license type suffix for ('${dep.module}'): ${licenseFile} (must be one of ${licenseTypes.keySet()})"
} else {
logger.log(LogLevel.INFO, "Dependency license file OK ('${dep.module}'): " + licenseName)
// Look for sibling NOTICE file.
def noticeFile = file(licenseFile.path.replaceAll(/\-LICENSE-.+/, "-NOTICE.txt"))
if (noticeFile.exists()) {
dep.referencedFiles += noticeFile
logger.log(LogLevel.INFO, "Dependency notice file OK ('${dep.module}'): " + noticeFile)
} else if (!licenseType.noticeOptional) {
errors << "Notice file missing for ('${dep.module}'), expected it at: ${noticeFile}"
}
}
}
}
if (errors) {
def msg = "Certain license/ notice files are missing:\n - " + errors.join("\n - ")
if (failOnError) {
throw new GradleException(msg)
} else {
logger.log(LogLevel.WARN, "WARNING: ${msg}")
}
}
}
}
licenses.dependsOn validateJarChecksums, validateJarLicenses
}
// Add top-project level tasks validating dangling files
// and regenerating dependency checksums.
configure([project(":solr"), project(":lucene"),]) {
def validationTasks = subprojects.collectMany { it.tasks.matching { it.name == "licenses" } }
def jarInfoTasks = subprojects.collectMany { it.tasks.matching { it.name == "collectJarInfos" } }
// Update dependency checksums.
task updateLicenses() {
group = 'Dependency validation'
description = "Write or update checksums of dependencies"
dependsOn jarInfoTasks
doLast {
licensesDir.mkdirs()
// Clean any previous checksums. In theory we wouldn't have to do it --
// dangling files from any previous JARs would be reported;
// it automates the process of updating versions and makes it easier though so
// why not.
project.delete fileTree(licensesDir, {
include "*.sha1"
exclude checkDanglingLicenseFiles.ext.exclude
})
def updated = []
jarInfoTasks.collectMany { task -> task.project.jarInfos }.each { dep ->
def expectedChecksumFile = file("${licensesDir}/${dep.jarName}.sha1")
def actual = dep.checksum.get()
if (expectedChecksumFile.exists()) {
def expected = expectedChecksumFile.getText("UTF-8").trim()
if (expected.compareToIgnoreCase(actual) == 0) {
return;
}
}
updated += "Updated checksum ('${dep.module}'): ${expectedChecksumFile}"
expectedChecksumFile.write(actual + "\n", "UTF-8")
}
updated.sort().each { line -> logger.log(LogLevel.LIFECYCLE, line) }
}
}
// Any validation task must run after all updates have been applied.
// We add an ordering constraint that any validation task (or its dependency subgraph)
// must run after updateLicenses
validationTasks
.collectMany { task -> [task, task.dependsOn]}
.flatten()
.each { task ->
task.mustRunAfter updateLicenses
}
// Check for dangling files in the licenses folder.
task checkDanglingLicenseFiles() {
dependsOn validationTasks
ext {
exclude = []
}
doFirst {
def allReferenced = validationTasks.collectMany { task ->
task.project.jarInfos.collectMany { it.referencedFiles }
}.collect { it.toString() }
def patterns = ext.exclude
def allExisting = fileTree(licensesDir, {
exclude patterns
}).files.collect { it.toString() }
def dangling = (allExisting - allReferenced).sort()
if (dangling) {
gradle.buildFinished {
logger.warn("WARNING: there were unreferenced files under license folder:\n - ${dangling.join("\n - ")}")
}
}
}
}
licenses.dependsOn checkDanglingLicenseFiles
}
// Exclude files that are not a result of direct dependencies but have to be there.
// It would be probably better to move non-dependency licenses into the actual project
// where they're used and only assemble them for the distribution package.
configure(project(":lucene")) {
checkDanglingLicenseFiles {
exclude += [
"elegant-icon-font-*",
"ant-*",
"ivy-*",
]
}
}
configure(project(":solr")) {
checkDanglingLicenseFiles {
exclude += [
"README.committers.txt",
// solr-ref-guide compilation-only dependencies.
"android-json-*",
"ant-*",
"asciidoctor-ant-*",
"jsoup-*",
"junit4-ant-*",
"slf4j-simple-*",
"start.jar.sha1"
]
}
}
// solr-ref-guide doesn't contribute any JARs to dependency checks.
configure(project(":solr:solr-ref-guide")) {
configurations {
jarValidation {
exclude group: "*"
}
}
// This adds validation of project dependencies:
// 1) license file
// 2) notice file
// 3) checksum validation/ generation.
import org.apache.commons.codec.digest.DigestUtils
import org.apache.commons.codec.digest.MessageDigestAlgorithms
// This should be false only for debugging.
def failOnError = true
// We're using commons-codec for computing checksums.
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath 'commons-codec:commons-codec:1.13'
}
}
// Configure license checksum folder for top-level projects.
// (The file("licenses") inside the configure scope resolves
// relative to the current project so they're not the same).
configure(project(":lucene")) {
ext.licensesDir = file("licenses")
}
configure(project(":solr")) {
ext.licensesDir = file("licenses")
}
// All known license types. If 'noticeOptional' is true then
// the notice file must accompany the license.
def licenseTypes = [
"ASL" : [name: "Apache Software License 2.0"],
"BSD" : [name: "Berkeley Software Distribution"],
//BSD like just means someone has taken the BSD license and put in their name, copyright, or it's a very similar license.
"BSD_LIKE": [name: "BSD like license"],
"CDDL" : [name: "Common Development and Distribution License", noticeOptional: true],
"CPL" : [name: "Common Public License"],
"EPL" : [name: "Eclipse Public License Version 1.0", noticeOptional: true],
"MIT" : [name: "Massachusetts Institute of Tech. License", noticeOptional: true],
"MPL" : [name: "Mozilla Public License", noticeOptional: true /* NOT SURE on the required notice */],
"PD" : [name: "Public Domain", noticeOptional: true],
"SUN" : [name: "Sun Open Source License", noticeOptional: true],
"COMPOUND": [name: "Compound license (details in NOTICE file)."],
]
allprojects {
task licenses() {
group = 'Dependency validation'
description = "Apply all dependency/ license checks."
}
check.dependsOn(licenses)
}
subprojects {
// Configure jarValidation configuration for all projects. Any dependency
// declared on this configuration (or any configuration it extends from) will
// be verified.
configurations {
jarValidation
}
// For Java projects, add all dependencies from the following configurations
// to jar validation
plugins.withType(JavaPlugin) {
configurations {
jarValidation {
extendsFrom runtimeClasspath
extendsFrom compileClasspath
extendsFrom testRuntimeClasspath
extendsFrom testCompileClasspath
}
}
}
// Collects dependency JAR information for a project and saves it in
// project.ext.jarInfos. Each dependency has a map of attributes
// which make it easier to process it later on (name, hash, origin module,
// see the code below for details).
task collectJarInfos() {
dependsOn configurations.jarValidation
doFirst {
def isSolr = project.path.startsWith(":solr")
// When gradle resolves a configuration it applies exclude rules from inherited configurations
// globally (this seems like a bug to me). So we process each inherited configuration independently
// but make sure there are no other dependencies on jarValidation itself.
if (!configurations.jarValidation.dependencies.isEmpty()) {
throw new GradleException("jarValidation must only inherit from other configurations (can't have its own dependencies).")
}
def excludeRules = configurations.jarValidation.excludeRules
ArrayDeque<ResolvedDependency> queue = new ArrayDeque<>()
configurations.jarValidation.extendsFrom.each { conf ->
if (excludeRules) {
conf = configurations.detachedConfiguration().extendsFrom(conf)
conf.excludeRules = excludeRules
}
if (conf.canBeResolved) {
queue.addAll(conf.resolvedConfiguration.firstLevelModuleDependencies)
}
}
def visited = new HashSet<>()
def infos = []
while (!queue.isEmpty()) {
def dep = queue.removeFirst()
// Skip any artifacts from other Solr modules (they will be resolved there).
if (dep.moduleGroup == "org.apache.solr") {
continue
}
// Skip any artifacts from Lucene modules.
if (dep.moduleGroup.startsWith("org.apache.lucene")) {
// ... but process their transitive dependencies for Solr compatibility.
if (isSolr) {
queue.addAll(dep.children)
}
} else {
queue.addAll(dep.children)
dep.moduleArtifacts.each { resolvedArtifact ->
def file = resolvedArtifact.file
if (visited.add(file)) {
infos.add([
name : resolvedArtifact.name,
jarName : file.toPath().getFileName().toString(),
path : file,
module : resolvedArtifact.moduleVersion,
checksum : provider { new DigestUtils(MessageDigestAlgorithms.SHA_1).digestAsHex(file).trim() },
// We keep track of the files referenced by this dependency (sha, license, notice, etc.)
// so that we can determine unused dangling files later on.
referencedFiles: []
])
}
}
}
}
project.ext.jarInfos = infos.sort {a, b -> "${a.module}".compareTo("${b.module}")}
// jarInfos.each { info -> println "${info.module}" }
}
}
// Verifies that each JAR has a corresponding checksum and that it matches actual JAR available for this dependency.
task validateJarChecksums() {
group = 'Dependency validation'
description = "Validate checksums of dependencies"
dependsOn collectJarInfos
doLast {
def errors = []
jarInfos.each { dep ->
def expectedChecksumFile = file("${licensesDir}/${dep.jarName}.sha1")
if (!expectedChecksumFile.exists()) {
errors << "Dependency checksum missing ('${dep.module}'), expected it at: ${expectedChecksumFile}"
} else {
dep.referencedFiles += expectedChecksumFile
def expected = expectedChecksumFile.getText("UTF-8").trim()
def actual = dep.checksum.get()
if (expected.compareToIgnoreCase(actual) != 0) {
errors << "Dependency checksum mismatch ('${dep.module}'), expected it to be: ${expected}, but was: ${actual}"
} else {
logger.log(LogLevel.INFO, "Dependency checksum OK ('${dep.module}')")
}
}
}
if (errors) {
def msg = "Dependency checksum validation failed:\n - " + errors.join("\n - ")
if (failOnError) {
throw new GradleException(msg)
} else {
logger.log(LogLevel.WARN, "WARNING: ${msg}")
}
}
}
}
// Locate the set of license file candidates for this dependency. We
// search for [jar-or-prefix]-LICENSE-[type].txt
// where 'jar-or-prefix' can be any '-'-delimited prefix of the dependency JAR's name.
// So for 'commons-io' it can be 'commons-io-LICENSE-foo.txt' or
// 'commons-LICENSE.txt'
task validateJarLicenses() {
group = 'Dependency validation'
description = "Validate license and notice files of dependencies"
dependsOn collectJarInfos
doLast {
def errors = []
jarInfos.each { dep ->
def baseName = dep.name
def found = []
def candidates = []
while (true) {
candidates += file("${licensesDir}/${baseName}-LICENSE-[type].txt")
found += fileTree(dir: licensesDir, include: "${baseName}-LICENSE-*.txt").files
def prefix = baseName.replaceAll(/[\-][^-]+$/, "")
if (found || prefix == baseName) {
break
}
baseName = prefix
}
if (found.size() == 0) {
errors << "License file missing ('${dep.module}'), expected it at: ${candidates.join(" or ")}," +
" where [type] can be any of ${licenseTypes.keySet()}."
} else if (found.size() > 1) {
errors << "Multiple license files matching for ('${dep.module}'): ${found.join(", ")}"
} else {
def licenseFile = found.get(0)
dep.referencedFiles += licenseFile
def m = (licenseFile.name =~ /LICENSE-(.+)\.txt$/)
if (!m) throw new GradleException("License file name doesn't contain license type?: ${licenseFile.name}")
def licenseName = m[0][1]
def licenseType = licenseTypes[licenseName]
if (!licenseType) {
errors << "Unknown license type suffix for ('${dep.module}'): ${licenseFile} (must be one of ${licenseTypes.keySet()})"
} else {
logger.log(LogLevel.INFO, "Dependency license file OK ('${dep.module}'): " + licenseName)
// Look for sibling NOTICE file.
def noticeFile = file(licenseFile.path.replaceAll(/\-LICENSE-.+/, "-NOTICE.txt"))
if (noticeFile.exists()) {
dep.referencedFiles += noticeFile
logger.log(LogLevel.INFO, "Dependency notice file OK ('${dep.module}'): " + noticeFile)
} else if (!licenseType.noticeOptional) {
errors << "Notice file missing for ('${dep.module}'), expected it at: ${noticeFile}"
}
}
}
}
if (errors) {
def msg = "Certain license/ notice files are missing:\n - " + errors.join("\n - ")
if (failOnError) {
throw new GradleException(msg)
} else {
logger.log(LogLevel.WARN, "WARNING: ${msg}")
}
}
}
}
licenses.dependsOn validateJarChecksums, validateJarLicenses
}
// Add top-project level tasks validating dangling files
// and regenerating dependency checksums.
configure([project(":solr"), project(":lucene"),]) {
def validationTasks = subprojects.collectMany { it.tasks.matching { it.name == "licenses" } }
def jarInfoTasks = subprojects.collectMany { it.tasks.matching { it.name == "collectJarInfos" } }
// Update dependency checksums.
task updateLicenses() {
group = 'Dependency validation'
description = "Write or update checksums of dependencies"
dependsOn jarInfoTasks
doLast {
licensesDir.mkdirs()
// Clean any previous checksums. In theory we wouldn't have to do it --
// dangling files from any previous JARs would be reported;
// it automates the process of updating versions and makes it easier though so
// why not.
project.delete fileTree(licensesDir, {
include "*.sha1"
exclude checkDanglingLicenseFiles.ext.exclude
})
def updated = []
jarInfoTasks.collectMany { task -> task.project.jarInfos }.each { dep ->
def expectedChecksumFile = file("${licensesDir}/${dep.jarName}.sha1")
def actual = dep.checksum.get()
if (expectedChecksumFile.exists()) {
def expected = expectedChecksumFile.getText("UTF-8").trim()
if (expected.compareToIgnoreCase(actual) == 0) {
return;
}
}
updated += "Updated checksum ('${dep.module}'): ${expectedChecksumFile}"
expectedChecksumFile.write(actual + "\n", "UTF-8")
}
updated.sort().each { line -> logger.log(LogLevel.LIFECYCLE, line) }
}
}
// Any validation task must run after all updates have been applied.
// We add an ordering constraint that any validation task (or its dependency subgraph)
// must run after updateLicenses
validationTasks
.collectMany { task -> [task, task.dependsOn]}
.flatten()
.each { task ->
task.mustRunAfter updateLicenses
}
// Check for dangling files in the licenses folder.
task checkDanglingLicenseFiles() {
dependsOn validationTasks
ext {
exclude = []
}
doFirst {
def allReferenced = validationTasks.collectMany { task ->
task.project.jarInfos.collectMany { it.referencedFiles }
}.collect { it.toString() }
def patterns = ext.exclude
def allExisting = fileTree(licensesDir, {
exclude patterns
}).files.collect { it.toString() }
def dangling = (allExisting - allReferenced).sort()
if (dangling) {
gradle.buildFinished {
logger.warn("WARNING: there were unreferenced files under license folder:\n - ${dangling.join("\n - ")}")
}
}
}
}
licenses.dependsOn checkDanglingLicenseFiles
}
// Exclude files that are not a result of direct dependencies but have to be there.
// It would be probably better to move non-dependency licenses into the actual project
// where they're used and only assemble them for the distribution package.
configure(project(":lucene")) {
checkDanglingLicenseFiles {
exclude += [
"elegant-icon-font-*",
"ant-*",
"ivy-*",
]
}
}
configure(project(":solr")) {
checkDanglingLicenseFiles {
exclude += [
"README.committers.txt",
// solr-ref-guide compilation-only dependencies.
"android-json-*",
"ant-*",
"asciidoctor-ant-*",
"jsoup-*",
"junit4-ant-*",
"slf4j-simple-*",
"start.jar.sha1"
]
}
}
// solr-ref-guide doesn't contribute any JARs to dependency checks.
configure(project(":solr:solr-ref-guide")) {
configurations {
jarValidation {
exclude group: "*"
}
}
}

View File

@ -1,30 +1,30 @@
// This adds top-level 'precommit' task.
configure(rootProject) {
task precommit() {
group = 'Precommit'
description = "All precommit checks"
// Root-level validation tasks.
dependsOn ":verifyLocks"
dependsOn ":versionsPropsAreSorted"
dependsOn ":checkWorkingCopyClean"
dependsOn ":validateSourcePatterns"
// Solr validation tasks.
dependsOn ":solr:validateConfigFileSanity"
// Attach all these tasks from all projects that have them.
// This uses lazy collections as they may not yet be defined.
dependsOn allprojects.collect { prj ->
prj.tasks.matching { task -> task.name in [
"forbiddenApisMain",
"forbiddenApisTest",
"licenses",
"javadoc",
"rat",
]}
}
}
}
// This adds top-level 'precommit' task.
configure(rootProject) {
task precommit() {
group = 'Precommit'
description = "All precommit checks"
// Root-level validation tasks.
dependsOn ":verifyLocks"
dependsOn ":versionsPropsAreSorted"
dependsOn ":checkWorkingCopyClean"
dependsOn ":validateSourcePatterns"
// Solr validation tasks.
dependsOn ":solr:validateConfigFileSanity"
// Attach all these tasks from all projects that have them.
// This uses lazy collections as they may not yet be defined.
dependsOn allprojects.collect { prj ->
prj.tasks.matching { task -> task.name in [
"forbiddenApisMain",
"forbiddenApisTest",
"licenses",
"javadoc",
"rat",
]}
}
}
}

View File

@ -1,138 +1,138 @@
Dependencies
============
Each gradle project can have multiple (named) "configurations"
and each configuration can have dependencies attached to it.
There are some standard conventions so, for example, the Java plugin
adds standard configurations such as "api", "implementation",
"testImplementation" and others. These configurations can also inherit
from each other; more about this typic can be found here:
https://docs.gradle.org/current/userguide/dependency_management_for_java_projects.html#dependency_management_for_java_projects
https://docs.gradle.org/current/userguide/java_library_plugin.html#sec:java_library_separation
https://docs.gradle.org/current/userguide/java_plugin.html#sec:java_plugin_and_dependency_management
For the needs of Lucene and Solr we will typically focus on three
configurations and attach project dependencies to them:
api - makes a dependency available for main classes, tests and any
other modules importing the project (exportable dependency),
implementation - makes a dependency available for main classes, tests
but will *not* export the dependency for other modules (so their
compilation classpath won't contain it).
testImplementation - makes a dependency only available for test classes.
Adding a library dependency
---------------------------
Let's say we wish to add a dependency on library "foo.bar:baz" in
version 1.2 to :lucene:core. Let's assume this library is only
used internally by the project. The :lucene:core project is configured
by lucene/core/build.gradle and we would add (or modify) the dependency
block as follows:
dependencies {
implementation "foo.bar:baz"
}
The "implementation" here is a named configuration; we don't need to declare
it because it is declared for us by the java-library plugin.
In "normal" gradle the version of the dependency would be present
directly inside the declaration but we use a plugin
(palantir-consistent-versions) to manage all dependency versions
from the top-level (so that conflicts can be resolved globally).
If this is the first time "foo.bar:baz" is added to the project, we'd have
to add its version to "versions.props" file at the top level of the
checkout:
foo.bar:baz=1.2
and then regenerate the "versions.lock" file using the following
command:
gradlew --write-locks
IMPORTANT: The versions.lock file will contain the actual version
of the dependency picked based on other project dependencies and
their transitive dependencies. This selected version may be
different from what each of these actually requires (the highest
version number will be typically selected). To see which dependencies
require which version of the library use:
gradlew why --hash=...
where the hash code comes from versions.lock file. For example, at
the time of writing, jackson-databind has the following entry:
com.fasterxml.jackson.core:jackson-databind:2.10.0 (3 constraints: 931a7796)
and "gradlew why --hash=931a7796" prints:
com.fasterxml.jackson.core:jackson-databind:2.10.0
projects -> 2.10.0
net.thisptr:jackson-jq -> 2.7.0
org.carrot2:carrot2-mini -> 2.9.9.3
Once the dependency is added it always makes sense to see the
tree of all module dependencies and maybe exclude transitive
dependencies of foo.bar:baz that we won't need.
Inspecting current dependencies
-------------------------------
The tree of dependencies of a project (in all configurations) can
be dumped by the following command (example):
gradlew -p lucene\analysis\icu dependencies
But this can be a bit overwhelming; we will most likely be interested
in just the "publicly visible" and "classpath-visible" configurations.
The publicly visible project dependencies (classes shared by other
modules importing our module) can be displayed with:
gradlew -p lucene\analysis\icu dependencies --configuration api
And the "private" set of dependencies (real classpath) can be dumped
with:
gradlew -p lucene\analysis\icu dependencies --configuration runtimeClasspath
Excluding a transitive dependency
---------------------------------
Let's say "foo.bar:baz" has a transitive dependency on project
"foo.bar:irrelevant" and we know the transitive dependency is not
crucial for the functioning of "foo.bar:baz". We can exclude it
by adding an exclusion block to the original declaration:
dependencies {
implementation("foo.bar:baz", {
exclude group: "foo.bar", module: "irrelevant"
})
}
Note the brackets - they are important and prevent accidental
mistakes of applying the exclusion to the wrong scope.
Updating dependency checksum and licenses
-----------------------------------------
The last step is to make sure the licenses, notice files and checksums
are in place for any new dependencies. This command will print what's
missing and where:
gradlew licenses
To update JAR checksums for licenses use:
gradlew updateLicenses
Dependencies
============
Each gradle project can have multiple (named) "configurations"
and each configuration can have dependencies attached to it.
There are some standard conventions so, for example, the Java plugin
adds standard configurations such as "api", "implementation",
"testImplementation" and others. These configurations can also inherit
from each other; more about this typic can be found here:
https://docs.gradle.org/current/userguide/dependency_management_for_java_projects.html#dependency_management_for_java_projects
https://docs.gradle.org/current/userguide/java_library_plugin.html#sec:java_library_separation
https://docs.gradle.org/current/userguide/java_plugin.html#sec:java_plugin_and_dependency_management
For the needs of Lucene and Solr we will typically focus on three
configurations and attach project dependencies to them:
api - makes a dependency available for main classes, tests and any
other modules importing the project (exportable dependency),
implementation - makes a dependency available for main classes, tests
but will *not* export the dependency for other modules (so their
compilation classpath won't contain it).
testImplementation - makes a dependency only available for test classes.
Adding a library dependency
---------------------------
Let's say we wish to add a dependency on library "foo.bar:baz" in
version 1.2 to :lucene:core. Let's assume this library is only
used internally by the project. The :lucene:core project is configured
by lucene/core/build.gradle and we would add (or modify) the dependency
block as follows:
dependencies {
implementation "foo.bar:baz"
}
The "implementation" here is a named configuration; we don't need to declare
it because it is declared for us by the java-library plugin.
In "normal" gradle the version of the dependency would be present
directly inside the declaration but we use a plugin
(palantir-consistent-versions) to manage all dependency versions
from the top-level (so that conflicts can be resolved globally).
If this is the first time "foo.bar:baz" is added to the project, we'd have
to add its version to "versions.props" file at the top level of the
checkout:
foo.bar:baz=1.2
and then regenerate the "versions.lock" file using the following
command:
gradlew --write-locks
IMPORTANT: The versions.lock file will contain the actual version
of the dependency picked based on other project dependencies and
their transitive dependencies. This selected version may be
different from what each of these actually requires (the highest
version number will be typically selected). To see which dependencies
require which version of the library use:
gradlew why --hash=...
where the hash code comes from versions.lock file. For example, at
the time of writing, jackson-databind has the following entry:
com.fasterxml.jackson.core:jackson-databind:2.10.0 (3 constraints: 931a7796)
and "gradlew why --hash=931a7796" prints:
com.fasterxml.jackson.core:jackson-databind:2.10.0
projects -> 2.10.0
net.thisptr:jackson-jq -> 2.7.0
org.carrot2:carrot2-mini -> 2.9.9.3
Once the dependency is added it always makes sense to see the
tree of all module dependencies and maybe exclude transitive
dependencies of foo.bar:baz that we won't need.
Inspecting current dependencies
-------------------------------
The tree of dependencies of a project (in all configurations) can
be dumped by the following command (example):
gradlew -p lucene\analysis\icu dependencies
But this can be a bit overwhelming; we will most likely be interested
in just the "publicly visible" and "classpath-visible" configurations.
The publicly visible project dependencies (classes shared by other
modules importing our module) can be displayed with:
gradlew -p lucene\analysis\icu dependencies --configuration api
And the "private" set of dependencies (real classpath) can be dumped
with:
gradlew -p lucene\analysis\icu dependencies --configuration runtimeClasspath
Excluding a transitive dependency
---------------------------------
Let's say "foo.bar:baz" has a transitive dependency on project
"foo.bar:irrelevant" and we know the transitive dependency is not
crucial for the functioning of "foo.bar:baz". We can exclude it
by adding an exclusion block to the original declaration:
dependencies {
implementation("foo.bar:baz", {
exclude group: "foo.bar", module: "irrelevant"
})
}
Note the brackets - they are important and prevent accidental
mistakes of applying the exclusion to the wrong scope.
Updating dependency checksum and licenses
-----------------------------------------
The last step is to make sure the licenses, notice files and checksums
are in place for any new dependencies. This command will print what's
missing and where:
gradlew licenses
To update JAR checksums for licenses use:
gradlew updateLicenses

View File

@ -1,14 +1,14 @@
A list of resources that may be helpful for those learning git:
- Dawid's task-oriented help:
https://github.com/dweiss/lucene-git-guides
- git for computer scientists (a nice writeup on how git is essentially a graph
of commits and labels attached to those commits):
https://eagain.net/articles/git-for-computer-scientists/
- git pro (book)
https://git-scm.com/book/en/v2
- git workflows, their pros and cons:
https://help.github.com/articles/what-is-a-good-git-workflow/
A list of resources that may be helpful for those learning git:
- Dawid's task-oriented help:
https://github.com/dweiss/lucene-git-guides
- git for computer scientists (a nice writeup on how git is essentially a graph
of commits and labels attached to those commits):
https://eagain.net/articles/git-for-computer-scientists/
- git pro (book)
https://git-scm.com/book/en/v2
- git workflows, their pros and cons:
https://help.github.com/articles/what-is-a-good-git-workflow/

View File

@ -1,128 +1,128 @@
Testing
=======
Examples below assume cwd at the gradlew script in the top directory of
the project's checkout.
Generic test/ checkup commands
------------------------------
Run all unit tests:
gradlew test
Run all verification tasks, including tests:
gradlew check
Run all verification tasks, excluding tests (-x is gradle's generic task
exclusion mechanism):
gradlew check -x test
Run verification for a selected module only:
gradlew :lucene:core:check # By full gradle project path
gradlew -p lucene/core check # By folder designation + task
Randomization
-------------
To run tests with the given starting seed pass 'tests.seed'
property:
gradlew :lucene:misc:test -Ptests.seed=DEADBEEF
There are a lot of other test randomization properties
available. To list them, their defaults and current values
run the testOpts task against a project that has tests.
For example:
gradlew -p lucene/core testOpts
Filtering
---------
Run tests of lucene-core module:
gradlew -p lucene/core test
Run a single test case (from a single module). Uses gradle's built-in filtering
(https://docs.gradle.org/current/userguide/java_testing.html#test_filtering):
gradlew -p lucene/core test --tests TestDemo
Run all tests in a package:
gradlew -p lucene/core test --tests "org.apache.lucene.document.*"
Run all test classes/ methods that match this pattern:
gradlew -p lucene/core test --tests "*testFeatureMissing*"
Test groups
-----------
Tests can be filtered by an annotation they're marked with.
Some test group annotations include: @AwaitsFix, @Nightly, @Slow
This uses filtering infrastructure on the *runner* (randomizedtesting),
not gradle's built-in mechanisms (but it can be combined with "--tests").
For example, run all lucene-core tests annotated as @Slow:
gradlew -p lucene/core test -Ptests.filter=@Slow
Test group filters can be combined into Boolean expressions:
gradlew -p lucene/core test "default and not(@awaitsfix or @slow)"
Reiteration ("beasting")
------------------------
Multiply each test case N times (this works by repeating the same test
within the same JVM; it also works in IDEs):
gradlew -p lucene/core test --tests TestDemo -Ptests.iters=5
Tests tasks will be (by default) re-executed on each invocation because
we pick a random global tests.seed. If you run the same tests twice
with the same seed, the test task will be skipped (as it is up-to-date
with respect to source code):
gradlew -p lucene/core test -Ptests.seed=deadbeef
to force re-execution of tests, even for the same master seed, apply
cleanTest task:
gradlew -p lucene/core cleanTest test -Ptests.seed=deadbeef
Verbose mode and debugging
--------------------------
The "tests.verbose" mode switch enables standard streams from tests
to be dumped directly to the console. Run your verbose tests explicitly
specifying the project and test task or a fully qualified task path. Example:
gradlew -p lucene/core test -Ptests.verbose=true --tests "TestDemo"
Testing against different JVMs
------------------------------
By default tests are executed with the same Java gradle is using internally.
To run tests against a different Java version define a property called
"runtime.java.home" or define an environment variable "RUNTIME_JAVA_HOME"
pointing at the JDK installation folder.
If property is used, it can be a system property (-D...) or a project
property (-P...).
Example:
gradlew test -p lucene/test-framework --tests TestJvmInfo -Dtests.verbose=true -Druntime.java.home=/jvms/jdk14
Testing
=======
Examples below assume cwd at the gradlew script in the top directory of
the project's checkout.
Generic test/ checkup commands
------------------------------
Run all unit tests:
gradlew test
Run all verification tasks, including tests:
gradlew check
Run all verification tasks, excluding tests (-x is gradle's generic task
exclusion mechanism):
gradlew check -x test
Run verification for a selected module only:
gradlew :lucene:core:check # By full gradle project path
gradlew -p lucene/core check # By folder designation + task
Randomization
-------------
To run tests with the given starting seed pass 'tests.seed'
property:
gradlew :lucene:misc:test -Ptests.seed=DEADBEEF
There are a lot of other test randomization properties
available. To list them, their defaults and current values
run the testOpts task against a project that has tests.
For example:
gradlew -p lucene/core testOpts
Filtering
---------
Run tests of lucene-core module:
gradlew -p lucene/core test
Run a single test case (from a single module). Uses gradle's built-in filtering
(https://docs.gradle.org/current/userguide/java_testing.html#test_filtering):
gradlew -p lucene/core test --tests TestDemo
Run all tests in a package:
gradlew -p lucene/core test --tests "org.apache.lucene.document.*"
Run all test classes/ methods that match this pattern:
gradlew -p lucene/core test --tests "*testFeatureMissing*"
Test groups
-----------
Tests can be filtered by an annotation they're marked with.
Some test group annotations include: @AwaitsFix, @Nightly, @Slow
This uses filtering infrastructure on the *runner* (randomizedtesting),
not gradle's built-in mechanisms (but it can be combined with "--tests").
For example, run all lucene-core tests annotated as @Slow:
gradlew -p lucene/core test -Ptests.filter=@Slow
Test group filters can be combined into Boolean expressions:
gradlew -p lucene/core test "default and not(@awaitsfix or @slow)"
Reiteration ("beasting")
------------------------
Multiply each test case N times (this works by repeating the same test
within the same JVM; it also works in IDEs):
gradlew -p lucene/core test --tests TestDemo -Ptests.iters=5
Tests tasks will be (by default) re-executed on each invocation because
we pick a random global tests.seed. If you run the same tests twice
with the same seed, the test task will be skipped (as it is up-to-date
with respect to source code):
gradlew -p lucene/core test -Ptests.seed=deadbeef
to force re-execution of tests, even for the same master seed, apply
cleanTest task:
gradlew -p lucene/core cleanTest test -Ptests.seed=deadbeef
Verbose mode and debugging
--------------------------
The "tests.verbose" mode switch enables standard streams from tests
to be dumped directly to the console. Run your verbose tests explicitly
specifying the project and test task or a fully qualified task path. Example:
gradlew -p lucene/core test -Ptests.verbose=true --tests "TestDemo"
Testing against different JVMs
------------------------------
By default tests are executed with the same Java gradle is using internally.
To run tests against a different Java version define a property called
"runtime.java.home" or define an environment variable "RUNTIME_JAVA_HOME"
pointing at the JDK installation folder.
If property is used, it can be a system property (-D...) or a project
property (-P...).
Example:
gradlew test -p lucene/test-framework --tests TestJvmInfo -Dtests.verbose=true -Druntime.java.home=/jvms/jdk14

View File

@ -1,27 +1,27 @@
BSD License
Copyright (c) 2000-2006, www.hamcrest.org
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer. Redistributions in binary form must reproduce
the above copyright notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
Neither the name of Hamcrest nor the names of its contributors may be used to endorse
or promote products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
BSD License
Copyright (c) 2000-2006, www.hamcrest.org
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer. Redistributions in binary form must reproduce
the above copyright notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
Neither the name of Hamcrest nor the names of its contributors may be used to endorse
or promote products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.

View File

@ -1,27 +1,27 @@
BSD License
Copyright (c) 2000-2006, www.hamcrest.org
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer. Redistributions in binary form must reproduce
the above copyright notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
Neither the name of Hamcrest nor the names of its contributors may be used to endorse
or promote products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
BSD License
Copyright (c) 2000-2006, www.hamcrest.org
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer. Redistributions in binary form must reproduce
the above copyright notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
Neither the name of Hamcrest nor the names of its contributors may be used to endorse
or promote products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.

View File

@ -1,92 +1,92 @@
Copyright 2006 The Inconsolata Project Authors
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This license is copied below, and is also available with a FAQ at:
http://scripts.sil.org/OFL
-----------------------------------------------------------
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
-----------------------------------------------------------
PREAMBLE
The goals of the Open Font License (OFL) are to stimulate worldwide
development of collaborative font projects, to support the font creation
efforts of academic and linguistic communities, and to provide a free and
open framework in which fonts may be shared and improved in partnership
with others.
The OFL allows the licensed fonts to be used, studied, modified and
redistributed freely as long as they are not sold by themselves. The
fonts, including any derivative works, can be bundled, embedded,
redistributed and/or sold with any software provided that any reserved
names are not used by derivative works. The fonts and derivatives,
however, cannot be released under any other type of license. The
requirement for fonts to remain under this license does not apply
to any document created using the fonts or their derivatives.
DEFINITIONS
"Font Software" refers to the set of files released by the Copyright
Holder(s) under this license and clearly marked as such. This may
include source files, build scripts and documentation.
"Reserved Font Name" refers to any names specified as such after the
copyright statement(s).
"Original Version" refers to the collection of Font Software components as
distributed by the Copyright Holder(s).
"Modified Version" refers to any derivative made by adding to, deleting,
or substituting -- in part or in whole -- any of the components of the
Original Version, by changing formats or by porting the Font Software to a
new environment.
"Author" refers to any designer, engineer, programmer, technical
writer or other person who contributed to the Font Software.
PERMISSION & CONDITIONS
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Font Software, to use, study, copy, merge, embed, modify,
redistribute, and sell modified and unmodified copies of the Font
Software, subject to the following conditions:
1) Neither the Font Software nor any of its individual components,
in Original or Modified Versions, may be sold by itself.
2) Original or Modified Versions of the Font Software may be bundled,
redistributed and/or sold with any software, provided that each copy
contains the above copyright notice and this license. These can be
included either as stand-alone text files, human-readable headers or
in the appropriate machine-readable metadata fields within text or
binary files as long as those fields can be easily viewed by the user.
3) No Modified Version of the Font Software may use the Reserved Font
Name(s) unless explicit written permission is granted by the corresponding
Copyright Holder. This restriction only applies to the primary font name as
presented to the users.
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
Software shall not be used to promote, endorse or advertise any
Modified Version, except to acknowledge the contribution(s) of the
Copyright Holder(s) and the Author(s) or with their explicit written
permission.
5) The Font Software, modified or unmodified, in part or in whole,
must be distributed entirely under this license, and must not be
distributed under any other license. The requirement for fonts to
remain under this license does not apply to any document created
using the Font Software.
TERMINATION
This license becomes null and void if any of the above conditions are
not met.
DISCLAIMER
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.
Copyright 2006 The Inconsolata Project Authors
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This license is copied below, and is also available with a FAQ at:
http://scripts.sil.org/OFL
-----------------------------------------------------------
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
-----------------------------------------------------------
PREAMBLE
The goals of the Open Font License (OFL) are to stimulate worldwide
development of collaborative font projects, to support the font creation
efforts of academic and linguistic communities, and to provide a free and
open framework in which fonts may be shared and improved in partnership
with others.
The OFL allows the licensed fonts to be used, studied, modified and
redistributed freely as long as they are not sold by themselves. The
fonts, including any derivative works, can be bundled, embedded,
redistributed and/or sold with any software provided that any reserved
names are not used by derivative works. The fonts and derivatives,
however, cannot be released under any other type of license. The
requirement for fonts to remain under this license does not apply
to any document created using the fonts or their derivatives.
DEFINITIONS
"Font Software" refers to the set of files released by the Copyright
Holder(s) under this license and clearly marked as such. This may
include source files, build scripts and documentation.
"Reserved Font Name" refers to any names specified as such after the
copyright statement(s).
"Original Version" refers to the collection of Font Software components as
distributed by the Copyright Holder(s).
"Modified Version" refers to any derivative made by adding to, deleting,
or substituting -- in part or in whole -- any of the components of the
Original Version, by changing formats or by porting the Font Software to a
new environment.
"Author" refers to any designer, engineer, programmer, technical
writer or other person who contributed to the Font Software.
PERMISSION & CONDITIONS
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Font Software, to use, study, copy, merge, embed, modify,
redistribute, and sell modified and unmodified copies of the Font
Software, subject to the following conditions:
1) Neither the Font Software nor any of its individual components,
in Original or Modified Versions, may be sold by itself.
2) Original or Modified Versions of the Font Software may be bundled,
redistributed and/or sold with any software, provided that each copy
contains the above copyright notice and this license. These can be
included either as stand-alone text files, human-readable headers or
in the appropriate machine-readable metadata fields within text or
binary files as long as those fields can be easily viewed by the user.
3) No Modified Version of the Font Software may use the Reserved Font
Name(s) unless explicit written permission is granted by the corresponding
Copyright Holder. This restriction only applies to the primary font name as
presented to the users.
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
Software shall not be used to promote, endorse or advertise any
Modified Version, except to acknowledge the contribution(s) of the
Copyright Holder(s) and the Author(s) or with their explicit written
permission.
5) The Font Software, modified or unmodified, in part or in whole,
must be distributed entirely under this license, and must not be
distributed under any other license. The requirement for fonts to
remain under this license does not apply to any document created
using the Font Software.
TERMINATION
This license becomes null and void if any of the above conditions are
not met.
DISCLAIMER
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.