Apply 2-space indent to all gradle scripts (#49071)

Backport of #48849. Update `.editorconfig` to make the Java settings the
default for all files, and then apply a 2-space indent to all `*.gradle`
files. Then reformat all the files.
This commit is contained in:
Rory Hunter 2019-11-14 11:01:23 +00:00 committed by GitHub
parent 7c3198ba44
commit c46a0e8708
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
191 changed files with 4618 additions and 4614 deletions

View File

@ -2,88 +2,88 @@ import com.bettercloud.vault.VaultConfig;
import com.bettercloud.vault.Vault;
initscript {
repositories {
mavenCentral()
}
dependencies {
classpath 'com.bettercloud:vault-java-driver:4.1.0'
}
repositories {
mavenCentral()
}
dependencies {
classpath 'com.bettercloud:vault-java-driver:4.1.0'
}
}
boolean USE_ARTIFACTORY=false
boolean USE_ARTIFACTORY = false
if (System.getenv('VAULT_ADDR') == null) {
throw new GradleException("You must set the VAULT_ADDR environment variable to use this init script.")
throw new GradleException("You must set the VAULT_ADDR environment variable to use this init script.")
}
if (System.getenv('VAULT_ROLE_ID') == null && System.getenv('VAULT_SECRET_ID') == null && System.getenv('VAULT_TOKEN') == null) {
throw new GradleException("You must set either the VAULT_ROLE_ID and VAULT_SECRET_ID environment variables, " +
"or the VAULT_TOKEN environment variable to use this init script.")
throw new GradleException("You must set either the VAULT_ROLE_ID and VAULT_SECRET_ID environment variables, " +
"or the VAULT_TOKEN environment variable to use this init script.")
}
final String vaultToken = System.getenv('VAULT_TOKEN') ?: new Vault(
new VaultConfig()
.address(System.env.VAULT_ADDR)
.engineVersion(1)
.build()
)
.withRetries(5, 1000)
.auth()
.loginByAppRole("approle", System.env.VAULT_ROLE_ID, System.env.VAULT_SECRET_ID)
.getAuthClientToken();
new VaultConfig()
.address(System.env.VAULT_ADDR)
.engineVersion(1)
.build()
)
.withRetries(5, 1000)
.auth()
.loginByAppRole("approle", System.env.VAULT_ROLE_ID, System.env.VAULT_SECRET_ID)
.getAuthClientToken();
final Vault vault = new Vault(
new VaultConfig()
.address(System.env.VAULT_ADDR)
.engineVersion(1)
.token(vaultToken)
.build()
new VaultConfig()
.address(System.env.VAULT_ADDR)
.engineVersion(1)
.token(vaultToken)
.build()
)
.withRetries(5, 1000)
.withRetries(5, 1000)
if (USE_ARTIFACTORY) {
final Map<String,String> artifactoryCredentials = vault.logical()
.read("secret/elasticsearch-ci/artifactory.elstc.co")
.getData();
logger.info("Using elastic artifactory repos")
Closure configCache = {
return {
name "artifactory-gradle-release"
url "https://artifactory.elstc.co/artifactory/gradle-release"
credentials {
username artifactoryCredentials.get("username")
password artifactoryCredentials.get("token")
}
}
final Map<String, String> artifactoryCredentials = vault.logical()
.read("secret/elasticsearch-ci/artifactory.elstc.co")
.getData();
logger.info("Using elastic artifactory repos")
Closure configCache = {
return {
name "artifactory-gradle-release"
url "https://artifactory.elstc.co/artifactory/gradle-release"
credentials {
username artifactoryCredentials.get("username")
password artifactoryCredentials.get("token")
}
}
settingsEvaluated { settings ->
settings.pluginManagement {
repositories {
maven configCache()
}
}
}
settingsEvaluated { settings ->
settings.pluginManagement {
repositories {
maven configCache()
}
}
projectsLoaded {
allprojects {
buildscript {
repositories {
maven configCache()
}
}
repositories {
maven configCache()
}
}
projectsLoaded {
allprojects {
buildscript {
repositories {
maven configCache()
}
}
repositories {
maven configCache()
}
}
}
}
projectsLoaded {
rootProject {
project.pluginManager.withPlugin('com.gradle.build-scan') {
buildScan.server = 'https://gradle-enterprise.elastic.co'
}
rootProject {
project.pluginManager.withPlugin('com.gradle.build-scan') {
buildScan.server = 'https://gradle-enterprise.elastic.co'
}
}
}
@ -91,24 +91,24 @@ final String buildCacheUrl = System.getProperty('org.elasticsearch.build.cache.u
final boolean buildCachePush = Boolean.valueOf(System.getProperty('org.elasticsearch.build.cache.push', 'false'))
if (buildCacheUrl) {
final Map<String,String> buildCacheCredentials = vault.logical()
.read("secret/elasticsearch-ci/gradle-build-cache")
.getData();
gradle.settingsEvaluated { settings ->
settings.buildCache {
local {
// Disable the local build cache in CI since we use ephemeral workers and it incurs an IO penalty
enabled = false
}
remote(HttpBuildCache) {
url = buildCacheUrl
push = buildCachePush
credentials {
username = buildCacheCredentials.get("username")
password = buildCacheCredentials.get("password")
}
}
final Map<String, String> buildCacheCredentials = vault.logical()
.read("secret/elasticsearch-ci/gradle-build-cache")
.getData();
gradle.settingsEvaluated { settings ->
settings.buildCache {
local {
// Disable the local build cache in CI since we use ephemeral workers and it incurs an IO penalty
enabled = false
}
remote(HttpBuildCache) {
url = buildCacheUrl
push = buildCachePush
credentials {
username = buildCacheCredentials.get("username")
password = buildCacheCredentials.get("password")
}
}
}
}
}

View File

@ -2,12 +2,15 @@
root = true
[*.java]
[*]
charset = utf-8
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
insert_final_newline = true
indent_style = space
indent_size = 4
[*.bat]
indent_size = 2
[*.gradle]
indent_size = 2

View File

@ -29,16 +29,16 @@ archivesBaseName = 'elasticsearch-benchmarks'
test.enabled = false
dependencies {
compile(project(":server")) {
// JMH ships with the conflicting version 4.6. This prevents us from using jopt-simple in benchmarks (which should be ok) but allows
// us to invoke the JMH uberjar as usual.
exclude group: 'net.sf.jopt-simple', module: 'jopt-simple'
}
compile "org.openjdk.jmh:jmh-core:$versions.jmh"
annotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh"
// Dependencies of JMH
runtime 'net.sf.jopt-simple:jopt-simple:4.6'
runtime 'org.apache.commons:commons-math3:3.2'
compile(project(":server")) {
// JMH ships with the conflicting version 4.6. This prevents us from using jopt-simple in benchmarks (which should be ok) but allows
// us to invoke the JMH uberjar as usual.
exclude group: 'net.sf.jopt-simple', module: 'jopt-simple'
}
compile "org.openjdk.jmh:jmh-core:$versions.jmh"
annotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh"
// Dependencies of JMH
runtime 'net.sf.jopt-simple:jopt-simple:4.6'
runtime 'org.apache.commons:commons-math3:3.2'
}
compileJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked,-processing"
@ -55,13 +55,13 @@ forbiddenApisMain.enabled = false
dependencyLicenses.enabled = false
dependenciesInfo.enabled = false
thirdPartyAudit.ignoreViolations (
// these classes intentionally use JDK internal API (and this is ok since the project is maintained by Oracle employees)
'org.openjdk.jmh.profile.AbstractHotspotProfiler',
'org.openjdk.jmh.profile.HotspotThreadProfiler',
'org.openjdk.jmh.profile.HotspotClassloadingProfiler',
'org.openjdk.jmh.profile.HotspotCompilationProfiler',
'org.openjdk.jmh.profile.HotspotMemoryProfiler',
'org.openjdk.jmh.profile.HotspotRuntimeProfiler',
'org.openjdk.jmh.util.Utils'
thirdPartyAudit.ignoreViolations(
// these classes intentionally use JDK internal API (and this is ok since the project is maintained by Oracle employees)
'org.openjdk.jmh.profile.AbstractHotspotProfiler',
'org.openjdk.jmh.profile.HotspotThreadProfiler',
'org.openjdk.jmh.profile.HotspotClassloadingProfiler',
'org.openjdk.jmh.profile.HotspotCompilationProfiler',
'org.openjdk.jmh.profile.HotspotMemoryProfiler',
'org.openjdk.jmh.profile.HotspotRuntimeProfiler',
'org.openjdk.jmh.util.Utils'
)

View File

@ -32,10 +32,10 @@ import org.gradle.util.GradleVersion
import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure
plugins {
id 'com.gradle.build-scan' version '2.4.2'
id 'lifecycle-base'
id 'elasticsearch.global-build-info'
id "com.diffplug.gradle.spotless" version "3.24.2" apply false
id 'com.gradle.build-scan' version '2.4.2'
id 'lifecycle-base'
id 'elasticsearch.global-build-info'
id "com.diffplug.gradle.spotless" version "3.24.2" apply false
}
apply plugin: 'nebula.info-scm'
@ -139,14 +139,14 @@ subprojects {
BwcVersions versions = new BwcVersions(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8'))
task updateCIBwcVersions() {
doLast {
File yml = file(".ci/bwcVersions")
yml.text = ""
yml << "BWC_VERSION:\n"
versions.indexCompatible.each {
yml << " - \"$it\"\n"
}
doLast {
File yml = file(".ci/bwcVersions")
yml.text = ""
yml << "BWC_VERSION:\n"
versions.indexCompatible.each {
yml << " - \"$it\"\n"
}
}
}
// build metadata from previous build, contains eg hashes for bwc builds
@ -164,9 +164,9 @@ allprojects {
project.ext {
// for ide hacks...
isEclipse = System.getProperty("eclipse.launcher") != null || // Detects gradle launched from Eclipse's IDE
System.getProperty("eclipse.application") != null || // Detects gradle launched from the Eclipse compiler server
gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff
gradle.startParameter.taskNames.contains('cleanEclipse')
System.getProperty("eclipse.application") != null || // Detects gradle launched from the Eclipse compiler server
gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff
gradle.startParameter.taskNames.contains('cleanEclipse')
isIdea = System.getProperty("idea.active") != null || gradle.startParameter.taskNames.contains('idea') || gradle.startParameter.taskNames.contains('cleanIdea')
// for BWC testing
@ -181,22 +181,22 @@ task verifyVersions {
if (gradle.startParameter.isOffline()) {
throw new GradleException("Must run in online mode to verify versions")
}
// Read the list from maven central.
// Fetch the metadata an parse the xml into Version instances because it's more straight forward here
// Read the list from maven central.
// Fetch the metadata an parse the xml into Version instances because it's more straight forward here
// rather than bwcVersion ( VersionCollection ).
new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s ->
bwcVersions.compareToAuthoritative(
new XmlParser().parse(s)
.versioning.versions.version
.collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ }
.collect { Version.fromString(it) }
new XmlParser().parse(s)
.versioning.versions.version
.collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ }
.collect { Version.fromString(it) }
)
}
String ciYml = file(".ci/bwcVersions").text
bwcVersions.indexCompatible.each {
if (ciYml.contains("\"$it\"\n") == false) {
throw new Exception(".ci/bwcVersions is outdated, run `./gradlew updateCIBwcVersions` and check in the results");
}
if (ciYml.contains("\"$it\"\n") == false) {
throw new Exception(".ci/bwcVersions is outdated, run `./gradlew updateCIBwcVersions` and check in the results");
}
}
}
}
@ -250,7 +250,7 @@ allprojects {
// the "value" -quiet is added, separated by a space. This is ok since the javadoc
// command already adds -quiet, so we are just duplicating it
// see https://discuss.gradle.org/t/add-custom-javadoc-option-that-does-not-take-an-argument/5959
javadoc.options.encoding='UTF8'
javadoc.options.encoding = 'UTF8'
javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet')
}
@ -291,19 +291,19 @@ allprojects {
}
boolean hasShadow = project.plugins.hasPlugin(ShadowPlugin)
project.configurations.compile.dependencies
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(hasShadow, c) })
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(hasShadow, c) })
project.configurations.compileOnly.dependencies
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(false, c) })
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(false, c) })
if (hasShadow) {
// include any dependencies for shadow JAR projects that are *not* bundled in the shadow JAR
project.configurations.shadow.dependencies
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(false, c) })
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(false, c) })
}
}
}
@ -363,7 +363,7 @@ allprojects {
}
tasks.named('cleanIdea') {
delete 'build-idea'
delete 'build-idea'
}
}
@ -402,9 +402,9 @@ allprojects {
prefix = prefix.replace(':', '_')
}
if (eclipse.project.name.startsWith(prefix)) {
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/elastic-license-header.txt')
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/elastic-license-header.txt')
} else {
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/oss-license-header.txt')
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/oss-license-header.txt')
}
String lineSeparator = Os.isFamily(Os.FAMILY_WINDOWS) ? '\\\\r\\\\n' : '\\\\n'
@ -414,7 +414,7 @@ allprojects {
// TODO: "package this up" for external builds
from new File(project.rootDir, 'buildSrc/src/main/resources/eclipse.settings')
into '.settings'
filter{ it.replaceAll('@@LICENSE_HEADER_TEXT@@', licenseHeader)}
filter { it.replaceAll('@@LICENSE_HEADER_TEXT@@', licenseHeader) }
}
// otherwise .settings is not nuked entirely
tasks.register('wipeEclipseSettings', Delete) {
@ -431,13 +431,14 @@ class Run extends DefaultTask {
boolean debug = false
@Option(
option = "debug-jvm",
description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch."
option = "debug-jvm",
description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch."
)
public void setDebug(boolean enabled) {
project.project(':distribution').run.debug = enabled
}
}
task run(type: Run) {
dependsOn ':distribution:run'
description = 'Runs elasticsearch in the foreground'
@ -446,20 +447,20 @@ task run(type: Run) {
}
wrapper {
distributionType = 'ALL'
doLast {
final DistributionLocator locator = new DistributionLocator()
final GradleVersion version = GradleVersion.version(wrapper.gradleVersion)
final URI distributionUri = locator.getDistributionFor(version, wrapper.distributionType.name().toLowerCase(Locale.ENGLISH))
final URI sha256Uri = new URI(distributionUri.toString() + ".sha256")
final String sha256Sum = new String(sha256Uri.toURL().bytes)
wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n"
println "Added checksum to wrapper properties"
// Update build-tools to reflect the Gradle upgrade
// TODO: we can remove this once we have tests to make sure older versions work.
project(':build-tools').file('src/main/resources/minimumGradleVersion').text = gradleVersion
println "Updated minimum Gradle Version"
}
distributionType = 'ALL'
doLast {
final DistributionLocator locator = new DistributionLocator()
final GradleVersion version = GradleVersion.version(wrapper.gradleVersion)
final URI distributionUri = locator.getDistributionFor(version, wrapper.distributionType.name().toLowerCase(Locale.ENGLISH))
final URI sha256Uri = new URI(distributionUri.toString() + ".sha256")
final String sha256Sum = new String(sha256Uri.toURL().bytes)
wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n"
println "Added checksum to wrapper properties"
// Update build-tools to reflect the Gradle upgrade
// TODO: we can remove this once we have tests to make sure older versions work.
project(':build-tools').file('src/main/resources/minimumGradleVersion').text = gradleVersion
println "Updated minimum Gradle Version"
}
}
gradle.projectsEvaluated {
@ -485,10 +486,10 @@ gradle.projectsEvaluated {
String coords = "${p.group}:${p.name}"
if (false == coordsToProject.putIfAbsent(coords, p)) {
throw new GradleException(
"Detected that two projects: ${p.path} and ${coordsToProject[coords].path} " +
"have the same name and group: ${coords}. " +
"This doesn't currently work correctly in Gradle, see: " +
"https://github.com/gradle/gradle/issues/847"
"Detected that two projects: ${p.path} and ${coordsToProject[coords].path} " +
"have the same name and group: ${coords}. " +
"This doesn't currently work correctly in Gradle, see: " +
"https://github.com/gradle/gradle/issues/847"
)
}
}
@ -496,10 +497,10 @@ gradle.projectsEvaluated {
allprojects {
tasks.register('resolveAllDependencies') {
dependsOn tasks.matching { it.name == "pullFixture"}
doLast {
configurations.findAll { it.isCanBeResolved() }.each { it.resolve() }
}
dependsOn tasks.matching { it.name == "pullFixture" }
doLast {
configurations.findAll { it.isCanBeResolved() }.each { it.resolve() }
}
}
// helper task to print direct dependencies of a single task
@ -528,10 +529,10 @@ allprojects {
def checkPart1 = tasks.register('checkPart1')
def checkPart2 = tasks.register('checkPart2')
plugins.withId('lifecycle-base') {
if (project.path.startsWith(":x-pack:")) {
checkPart2.configure { dependsOn 'check' }
} else {
checkPart1.configure { dependsOn 'check' }
}
if (project.path.startsWith(":x-pack:")) {
checkPart2.configure { dependsOn 'check' }
} else {
checkPart1.configure { dependsOn 'check' }
}
}
}

View File

@ -28,7 +28,7 @@ group = 'org.elasticsearch.gradle'
String minimumGradleVersion = file('src/main/resources/minimumGradleVersion').text.trim()
if (GradleVersion.current() < GradleVersion.version(minimumGradleVersion)) {
throw new GradleException("Gradle ${minimumGradleVersion}+ is required to build elasticsearch")
throw new GradleException("Gradle ${minimumGradleVersion}+ is required to build elasticsearch")
}
if (project == rootProject) {
@ -66,8 +66,8 @@ if (JavaVersion.current() < JavaVersion.VERSION_11) {
}
sourceSets {
// We have a few classes that need to be compiled for older java versions
minimumRuntime { }
// We have a few classes that need to be compiled for older java versions
minimumRuntime {}
}
configurations {
@ -75,16 +75,16 @@ configurations {
}
compileMinimumRuntimeJava {
targetCompatibility = 8
sourceCompatibility = 8
targetCompatibility = 8
sourceCompatibility = 8
}
jar {
from sourceSets.minimumRuntime.output
from sourceSets.minimumRuntime.output
}
javadoc {
source sourceSets.minimumRuntime.allSource
source sourceSets.minimumRuntime.allSource
}
/*****************************************************************************
@ -102,7 +102,7 @@ dependencies {
}
compile localGroovy()
compile 'commons-codec:commons-codec:1.12'
compile 'org.apache.commons:commons-compress:1.19'
@ -186,13 +186,13 @@ if (project != rootProject) {
distribution project(':distribution:archives:linux-tar')
distribution project(':distribution:archives:oss-linux-tar')
}
// for external projects we want to remove the marker file indicating we are running the Elasticsearch project
processResources {
exclude 'buildSrc.marker'
into('META-INF') {
from configurations.reaper
}
exclude 'buildSrc.marker'
into('META-INF') {
from configurations.reaper
}
}
// TODO: re-enable once randomizedtesting gradle code is published and removed from here
@ -255,8 +255,8 @@ class VersionPropertiesLoader {
}
if (elasticsearch.matches("[0-9]+\\.[0-9]+\\.[0-9]+") == false) {
throw new IllegalStateException(
"Expected elasticsearch version to be numbers only of the form X.Y.Z but it was: " +
elasticsearch
"Expected elasticsearch version to be numbers only of the form X.Y.Z but it was: " +
elasticsearch
)
}
String qualifier = systemProperties.getProperty("build.version_qualifier", "");

View File

@ -1,8 +1,8 @@
apply plugin: 'java'
jar {
archiveName = "${project.name}.jar"
manifest {
attributes 'Main-Class': 'org.elasticsearch.gradle.reaper.Reaper'
}
archiveName = "${project.name}.jar"
manifest {
attributes 'Main-Class': 'org.elasticsearch.gradle.reaper.Reaper'
}
}

View File

@ -22,34 +22,34 @@ import org.elasticsearch.gradle.info.BuildParams
*/
plugins {
id 'elasticsearch.global-build-info'
id 'elasticsearch.global-build-info'
}
boolean internal = Boolean.parseBoolean(System.getProperty("tests.internal", "true"))
BuildParams.init { it.setIsInternal(internal) }
project.gradle.projectsEvaluated {
// wire the download service url to wiremock
String fakeDownloadService = System.getProperty('tests.download_service')
if (fakeDownloadService != null) {
IvyArtifactRepository repository = (IvyArtifactRepository) rootProject.repositories.getByName("elasticsearch-downloads")
repository.setUrl(fakeDownloadService)
repository = (IvyArtifactRepository) project('subproj').repositories.getByName("elasticsearch-downloads")
repository.setUrl(fakeDownloadService)
if (internal == false) {
repository = (IvyArtifactRepository) rootProject.repositories.getByName("elasticsearch-snapshots")
repository.setUrl(fakeDownloadService)
repository = (IvyArtifactRepository) project('subproj').repositories.getByName("elasticsearch-snapshots")
repository.setUrl(fakeDownloadService)
}
// wire the download service url to wiremock
String fakeDownloadService = System.getProperty('tests.download_service')
if (fakeDownloadService != null) {
IvyArtifactRepository repository = (IvyArtifactRepository) rootProject.repositories.getByName("elasticsearch-downloads")
repository.setUrl(fakeDownloadService)
repository = (IvyArtifactRepository) project('subproj').repositories.getByName("elasticsearch-downloads")
repository.setUrl(fakeDownloadService)
if (internal == false) {
repository = (IvyArtifactRepository) rootProject.repositories.getByName("elasticsearch-snapshots")
repository.setUrl(fakeDownloadService)
repository = (IvyArtifactRepository) project('subproj').repositories.getByName("elasticsearch-snapshots")
repository.setUrl(fakeDownloadService)
}
}
}
if (internal) {
Version currentVersion = Version.fromString("9.0.0")
BwcVersions versions = new BwcVersions(new TreeSet<>(
Arrays.asList(Version.fromString("8.0.0"), Version.fromString("8.0.1"), Version.fromString("8.1.0"), currentVersion)),
currentVersion)
allprojects {
ext.bwcVersions = versions
}
Version currentVersion = Version.fromString("9.0.0")
BwcVersions versions = new BwcVersions(new TreeSet<>(
Arrays.asList(Version.fromString("8.0.0"), Version.fromString("8.0.1"), Version.fromString("8.1.0"), currentVersion)),
currentVersion)
allprojects {
ext.bwcVersions = versions
}
}

View File

@ -1,4 +1,3 @@
String distroConfig = System.getProperty('tests.local_distro.config')
if (distroConfig != null) {
// setup the test distribution as an artifact of this project

View File

@ -1,5 +1,5 @@
plugins {
id 'elasticsearch.distribution-download'
id 'elasticsearch.distribution-download'
}
String distroVersion = System.getProperty('tests.distro.version')

View File

@ -1,38 +1,38 @@
plugins {
id 'elasticsearch.build'
id 'elasticsearch.build'
}
ext.licenseFile = file("$buildDir/dummy/license")
ext.noticeFile = file("$buildDir/dummy/notice")
buildResources {
copy 'checkstyle.xml'
copy 'checkstyle.xml'
}
task sampleCopyAll(type: Sync) {
/** Note: no explicit dependency. This works with tasks that use the Provider API a.k.a "Lazy Configuration" **/
from buildResources
into "$buildDir/sampleCopyAll"
/** Note: no explicit dependency. This works with tasks that use the Provider API a.k.a "Lazy Configuration" **/
from buildResources
into "$buildDir/sampleCopyAll"
}
task sample {
// This does not work, task dependencies can't be providers
// dependsOn buildResources.resource('minimumRuntimeVersion')
// Nor does this, despite https://github.com/gradle/gradle/issues/3811
// dependsOn buildResources.outputDir
// for now it's just
dependsOn buildResources
// we have to reference it at configuration time in order to be picked up
ext.checkstyle_suppressions = buildResources.copy('checkstyle_suppressions.xml')
doLast {
println "This task is using ${file(checkstyle_suppressions)}"
}
// This does not work, task dependencies can't be providers
// dependsOn buildResources.resource('minimumRuntimeVersion')
// Nor does this, despite https://github.com/gradle/gradle/issues/3811
// dependsOn buildResources.outputDir
// for now it's just
dependsOn buildResources
// we have to reference it at configuration time in order to be picked up
ext.checkstyle_suppressions = buildResources.copy('checkstyle_suppressions.xml')
doLast {
println "This task is using ${file(checkstyle_suppressions)}"
}
}
task noConfigAfterExecution {
dependsOn buildResources
doLast {
println "This should cause an error because we are refferencing " +
"${buildResources.copy('checkstyle_suppressions.xml')} after the `buildResources` task has ran."
}
dependsOn buildResources
doLast {
println "This should cause an error because we are refferencing " +
"${buildResources.copy('checkstyle_suppressions.xml')} after the `buildResources` task has ran."
}
}

View File

@ -1,19 +1,19 @@
plugins {
id 'java'
id 'elasticsearch.build'
id 'java'
id 'elasticsearch.build'
}
ext.licenseFile = file("LICENSE")
ext.noticeFile = file("NOTICE")
dependencies {
compile "junit:junit:${versions.junit}"
// missing classes in thirdparty audit
compile 'org.hamcrest:hamcrest-core:1.3'
compile "junit:junit:${versions.junit}"
// missing classes in thirdparty audit
compile 'org.hamcrest:hamcrest-core:1.3'
}
repositories {
jcenter()
jcenter()
}
// todo remove offending rules
@ -28,7 +28,7 @@ thirdPartyAudit.enabled = false
loggerUsageCheck.enabled = false
task hello {
doFirst {
println "build plugin can be applied"
}
doFirst {
println "build plugin can be applied"
}
}

View File

@ -1,17 +1,16 @@
project.gradle.projectsEvaluated {
// wire the jdk repo to wiremock
String fakeJdkRepo = Objects.requireNonNull(System.getProperty('tests.jdk_repo'))
String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor'))
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
println rootProject.repositories.asMap.keySet()
IvyArtifactRepository repository =
(IvyArtifactRepository) rootProject.repositories.getByName("jdk_repo_${fakeJdkVendor}_${fakeJdkVersion}")
repository.setUrl(fakeJdkRepo)
// wire the jdk repo to wiremock
String fakeJdkRepo = Objects.requireNonNull(System.getProperty('tests.jdk_repo'))
String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor'))
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
println rootProject.repositories.asMap.keySet()
IvyArtifactRepository repository =
(IvyArtifactRepository) rootProject.repositories.getByName("jdk_repo_${fakeJdkVendor}_${fakeJdkVersion}")
repository.setUrl(fakeJdkRepo)
}
task numConfigurations {
doLast {
println "NUM CONFIGS: ${project.configurations.size()}"
}
}
doLast {
println "NUM CONFIGS: ${project.configurations.size()}"
}
}

View File

@ -3,9 +3,9 @@ evaluationDependsOn ':subproj'
String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor'))
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
jdks {
linux_jdk {
vendor = fakeJdkVendor
version = fakeJdkVersion
platform = "linux"
}
}
linux_jdk {
vendor = fakeJdkVendor
version = fakeJdkVersion
platform = "linux"
}
}

View File

@ -1,45 +1,45 @@
plugins {
id 'elasticsearch.jdk-download'
id 'elasticsearch.jdk-download'
}
String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor'))
String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version'))
jdks {
linux {
vendor = fakeJdkVendor
version = fakeJdkVersion
platform = "linux"
}
darwin {
vendor = fakeJdkVendor
version = fakeJdkVersion
platform = "darwin"
}
windows {
vendor = fakeJdkVendor
version = fakeJdkVersion
platform = "windows"
}
linux {
vendor = fakeJdkVendor
version = fakeJdkVersion
platform = "linux"
}
darwin {
vendor = fakeJdkVendor
version = fakeJdkVersion
platform = "darwin"
}
windows {
vendor = fakeJdkVendor
version = fakeJdkVersion
platform = "windows"
}
}
task getLinuxJdk {
dependsOn jdks.linux
doLast {
println "JDK HOME: " + jdks.linux
}
dependsOn jdks.linux
doLast {
println "JDK HOME: " + jdks.linux
}
}
task getDarwinJdk {
dependsOn jdks.darwin
doLast {
println "JDK HOME: " + jdks.darwin
}
dependsOn jdks.darwin
doLast {
println "JDK HOME: " + jdks.darwin
}
}
task getWindowsJdk {
dependsOn jdks.windows
doLast {
println "JDK HOME: " + jdks.windows
}
}
dependsOn jdks.windows
doLast {
println "JDK HOME: " + jdks.windows
}
}

View File

@ -1,11 +1,11 @@
plugins {
id 'elasticsearch.reaper'
id 'elasticsearch.reaper'
}
task launchReaper {
doLast {
def reaper = project.extensions.getByName('reaper')
reaper.registerCommand('test', 'true')
reaper.unregister('test')
}
}
doLast {
def reaper = project.extensions.getByName('reaper')
reaper.registerCommand('test', 'true')
reaper.unregister('test')
}
}

View File

@ -1,53 +1,53 @@
import org.elasticsearch.gradle.tar.SymbolicLinkPreservingTar
plugins {
id 'base'
id 'distribution'
id 'elasticsearch.symbolic-link-preserving-tar'
id 'base'
id 'distribution'
id 'elasticsearch.symbolic-link-preserving-tar'
}
final String source = Objects.requireNonNull(System.getProperty('tests.symbolic_link_preserving_tar_source'))
boolean preserveFileTimestamps;
final String testPreserveFileTimestamps =
Objects.requireNonNull(System.getProperty('tests.symbolic_link_preserving_tar_preserve_file_timestamps'))
final String testPreserveFileTimestamps =
Objects.requireNonNull(System.getProperty('tests.symbolic_link_preserving_tar_preserve_file_timestamps'))
switch (testPreserveFileTimestamps) {
case "true":
preserveFileTimestamps = true
break
case "false":
preserveFileTimestamps = false
break
default:
throw new IllegalArgumentException(
"tests.symbolic_link_preserving_tar_preserve_file_timestamps must be [true] or [false] but was ["
+ testPreserveFileTimestamps + "]")
case "true":
preserveFileTimestamps = true
break
case "false":
preserveFileTimestamps = false
break
default:
throw new IllegalArgumentException(
"tests.symbolic_link_preserving_tar_preserve_file_timestamps must be [true] or [false] but was ["
+ testPreserveFileTimestamps + "]")
}
task buildBZip2Tar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar ->
tar.archiveExtension = 'tar.bz2'
tar.compression = Compression.BZIP2
tar.preserveFileTimestamps = preserveFileTimestamps
from fileTree(source)
doLast {
println archiveFile.get().asFile.path
}
tar.archiveExtension = 'tar.bz2'
tar.compression = Compression.BZIP2
tar.preserveFileTimestamps = preserveFileTimestamps
from fileTree(source)
doLast {
println archiveFile.get().asFile.path
}
}
task buildGZipTar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar ->
tar.archiveExtension = 'tar.gz'
tar.compression = Compression.GZIP
tar.preserveFileTimestamps = preserveFileTimestamps
from fileTree(source)
doLast{
println archiveFile.get().asFile.path
}
tar.archiveExtension = 'tar.gz'
tar.compression = Compression.GZIP
tar.preserveFileTimestamps = preserveFileTimestamps
from fileTree(source)
doLast {
println archiveFile.get().asFile.path
}
}
task buildTar(type: SymbolicLinkPreservingTar) { SymbolicLinkPreservingTar tar ->
tar.archiveExtension = 'tar'
tar.preserveFileTimestamps = preserveFileTimestamps
from fileTree(source)
doLast{
println archiveFile.get().asFile.path
}
tar.archiveExtension = 'tar'
tar.preserveFileTimestamps = preserveFileTimestamps
from fileTree(source)
doLast {
println archiveFile.get().asFile.path
}
}

View File

@ -1,83 +1,83 @@
plugins {
id 'elasticsearch.build' apply false
id 'elasticsearch.build' apply false
}
allprojects {
apply plugin: 'java'
apply plugin: 'elasticsearch.build'
apply plugin: 'java'
apply plugin: 'elasticsearch.build'
repositories {
jcenter()
}
dependencies {
testCompile "junit:junit:4.12"
}
repositories {
jcenter()
}
dependencies {
testCompile "junit:junit:4.12"
}
ext.licenseFile = file("$buildDir/dummy/license")
ext.noticeFile = file("$buildDir/dummy/notice")
ext.licenseFile = file("$buildDir/dummy/license")
ext.noticeFile = file("$buildDir/dummy/notice")
testingConventions.naming {
// Reset default to no baseClass checks
Tests {
baseClasses = []
}
IT {
baseClasses = []
}
testingConventions.naming {
// Reset default to no baseClass checks
Tests {
baseClasses = []
}
IT {
baseClasses = []
}
}
}
project(':empty_test_task') {
task emptyTest(type: Test) {
task emptyTest(type: Test) {
}
}
}
project(':all_classes_in_tasks') {
test {
include "**/Convention*"
}
test {
include "**/Convention*"
}
}
project(':not_implementing_base') {
testingConventions.naming {
Tests {
baseClass 'org.elasticsearch.gradle.testkit.Unit'
}
IT {
baseClass 'org.elasticsearch.gradle.testkit.Integration'
}
testingConventions.naming {
Tests {
baseClass 'org.elasticsearch.gradle.testkit.Unit'
}
test {
include "**/*IT.class"
include "**/*Tests.class"
IT {
baseClass 'org.elasticsearch.gradle.testkit.Integration'
}
}
test {
include "**/*IT.class"
include "**/*Tests.class"
}
}
project(':valid_setup_no_base') {
test {
include "**/*IT.class"
include "**/*Tests.class"
}
test {
include "**/*IT.class"
include "**/*Tests.class"
}
}
project(':tests_in_main') {
}
project (':valid_setup_with_base') {
test {
include "**/*IT.class"
include "**/*Tests.class"
project(':valid_setup_with_base') {
test {
include "**/*IT.class"
include "**/*Tests.class"
}
testingConventions.naming {
Tests {
baseClass 'org.elasticsearch.gradle.testkit.Unit'
}
testingConventions.naming {
Tests {
baseClass 'org.elasticsearch.gradle.testkit.Unit'
}
IT {
baseClass 'org.elasticsearch.gradle.testkit.Integration'
}
IT {
baseClass 'org.elasticsearch.gradle.testkit.Integration'
}
}
}

View File

@ -1,39 +1,39 @@
import org.elasticsearch.gradle.precommit.ThirdPartyAuditTask
plugins {
id 'java'
// bring in build-tools onto the classpath
id 'elasticsearch.global-build-info' apply false
id 'java'
// bring in build-tools onto the classpath
id 'elasticsearch.global-build-info' apply false
}
repositories {
/**
* Local test repo contains dummy jars with different group names and versions.
* - broken-log4j creates a log4j logger but has no pom, so the class will be missing
* - dummy-io has a class that creates a new java.io.File ( something which third-party-audit-absurd.txt forbids )
* - version 0.0.2 has the same class and one extra file just to make the jar different
*/
maven {
name = "local-test"
url = file("sample_jars/build/testrepo")
}
jcenter()
/**
* Local test repo contains dummy jars with different group names and versions.
* - broken-log4j creates a log4j logger but has no pom, so the class will be missing
* - dummy-io has a class that creates a new java.io.File ( something which third-party-audit-absurd.txt forbids )
* - version 0.0.2 has the same class and one extra file just to make the jar different
*/
maven {
name = "local-test"
url = file("sample_jars/build/testrepo")
}
jcenter()
}
configurations.create("forbiddenApisCliJar")
dependencies {
forbiddenApisCliJar 'de.thetaphi:forbiddenapis:2.7'
compileOnly "org.${project.properties.compileOnlyGroup}:${project.properties.compileOnlyVersion}"
compile "org.${project.properties.compileGroup}:${project.properties.compileVersion}"
forbiddenApisCliJar 'de.thetaphi:forbiddenapis:2.7'
compileOnly "org.${project.properties.compileOnlyGroup}:${project.properties.compileOnlyVersion}"
compile "org.${project.properties.compileGroup}:${project.properties.compileVersion}"
}
task empty(type: ThirdPartyAuditTask) {
targetCompatibility = JavaVersion.VERSION_11
signatureFile = file('third-party-audit-empty.txt')
targetCompatibility = JavaVersion.VERSION_11
signatureFile = file('third-party-audit-empty.txt')
}
task absurd(type: ThirdPartyAuditTask) {
targetCompatibility = JavaVersion.VERSION_11
signatureFile = file('third-party-audit-absurd.txt')
}
targetCompatibility = JavaVersion.VERSION_11
signatureFile = file('third-party-audit-absurd.txt')
}

View File

@ -1,52 +1,52 @@
plugins {
id 'java'
id 'java'
}
repositories {
mavenCentral()
mavenCentral()
}
dependencies {
compile 'org.apache.logging.log4j:log4j-core:2.11.1'
compile 'org.apache.logging.log4j:log4j-core:2.11.1'
}
// Tests have to clean mid-test but we don't want the sample jars to go away
clean.enabled = false
["0.0.1", "0.0.2"].forEach { v ->
["elasticsearch", "other"].forEach { p ->
task "dummy-${p}-${v}"(type: Jar) {
destinationDir = file("${buildDir}/testrepo/org/${p}/gradle/dummy-io/${v}/")
archiveName = "dummy-io-${v}.jar"
from sourceSets.main.output
include "**/TestingIO.class"
if (v == "0.0.2") {
manifest {
attributes(
"X-Different": "Different manifest, different jar"
)
}
}
["elasticsearch", "other"].forEach { p ->
task "dummy-${p}-${v}"(type: Jar) {
destinationDir = file("${buildDir}/testrepo/org/${p}/gradle/dummy-io/${v}/")
archiveName = "dummy-io-${v}.jar"
from sourceSets.main.output
include "**/TestingIO.class"
if (v == "0.0.2") {
manifest {
attributes(
"X-Different": "Different manifest, different jar"
)
}
build.dependsOn("dummy-${p}-${v}")
}
}
build.dependsOn("dummy-${p}-${v}")
}
}
["0.0.1"].forEach { v ->
["elasticsearch", "other"].forEach { p ->
task "broken-log4j-${p}-${v}"(type: Jar) {
destinationDir = file("${buildDir}/testrepo/org/${p}/gradle/broken-log4j/${v}/")
archiveName = "broken-log4j-${v}.jar"
from sourceSets.main.output
include "**/TestingLog4j.class"
}
build.dependsOn("broken-log4j-${p}-${v}")
["elasticsearch", "other"].forEach { p ->
task "broken-log4j-${p}-${v}"(type: Jar) {
destinationDir = file("${buildDir}/testrepo/org/${p}/gradle/broken-log4j/${v}/")
archiveName = "broken-log4j-${v}.jar"
from sourceSets.main.output
include "**/TestingLog4j.class"
}
build.dependsOn("broken-log4j-${p}-${v}")
}
}
task jarhellJdk(type: Jar) {
destinationDir = file("${buildDir}/testrepo/org/other/gradle/jarhellJdk/0.0.1/")
archiveName = "jarhellJdk-0.0.1.jar"
from sourceSets.main.output
include "**/String.class"
into "java/lang"
build.dependsOn("jarhellJdk")
}
destinationDir = file("${buildDir}/testrepo/org/other/gradle/jarhellJdk/0.0.1/")
archiveName = "jarhellJdk-0.0.1.jar"
from sourceSets.main.output
include "**/String.class"
into "java/lang"
build.dependsOn("jarhellJdk")
}

View File

@ -41,9 +41,9 @@ configurations {
idea {
module {
if (scopes.TEST != null) {
scopes.TEST.plus.add(project.configurations.restSpec)
}
if (scopes.TEST != null) {
scopes.TEST.plus.add(project.configurations.restSpec)
}
}
}
@ -65,7 +65,7 @@ dependencies {
// Needed for serialization tests:
// (In order to serialize a server side class to a client side class or the other way around)
testCompile(project(':x-pack:plugin:core')) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-rest-high-level-client'
exclude group: 'org.elasticsearch', module: 'elasticsearch-rest-high-level-client'
}
restSpec project(':rest-api-spec')
@ -136,8 +136,8 @@ testClusters.all {
keystore 'xpack.security.transport.ssl.truststore.secure_password', 'testnode'
extraConfigFile 'roles.yml', file('roles.yml')
user username: System.getProperty('tests.rest.cluster.username', 'test_user'),
password: System.getProperty('tests.rest.cluster.password', 'test-password'),
role: System.getProperty('tests.rest.cluster.role', 'admin')
password: System.getProperty('tests.rest.cluster.password', 'test-password'),
role: System.getProperty('tests.rest.cluster.role', 'admin')
user username: 'admin_user', password: 'admin-password'
extraConfigFile nodeCert.name, nodeCert

View File

@ -54,7 +54,7 @@ dependencies {
tasks.withType(CheckForbiddenApis) {
//client does not depend on server, so only jdk and http signatures should be checked
replaceSignatureFiles ('jdk-signatures', 'http-signatures')
replaceSignatureFiles('jdk-signatures', 'http-signatures')
}
forbiddenPatterns {
@ -69,7 +69,7 @@ forbiddenApisTest {
// JarHell is part of es server, which we don't want to pull in
// TODO: Not anymore. Now in :libs:elasticsearch-core
jarHell.enabled=false
jarHell.enabled = false
testingConventions {
naming.clear()
@ -80,7 +80,7 @@ testingConventions {
}
}
thirdPartyAudit.ignoreMissingClasses (
thirdPartyAudit.ignoreMissingClasses(
//commons-logging optional dependencies
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',

View File

@ -69,7 +69,7 @@ dependencyLicenses {
// JarHell is part of es server, which we don't want to pull in
// TODO: Not anymore. Now in :libs:elasticsearch-core
jarHell.enabled=false
jarHell.enabled = false
testingConventions {
naming.clear()
@ -87,7 +87,7 @@ dependencyLicenses {
}
}
thirdPartyAudit.ignoreMissingClasses (
thirdPartyAudit.ignoreMissingClasses(
//commons-logging optional dependencies
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',

View File

@ -45,7 +45,7 @@ forbiddenApisTest {
// JarHell is part of es server, which we don't want to pull in
// TODO: Not anymore. Now in :libs:elasticsearch-core
jarHell.enabled=false
jarHell.enabled = false
// TODO: should we have licenses for our test deps?
dependencyLicenses.enabled = false

View File

@ -29,6 +29,7 @@ import org.elasticsearch.gradle.tar.SymbolicLinkPreservingTar
import java.nio.file.Files
import java.nio.file.Path
// need this so Zip/Tar tasks get basic defaults...
apply plugin: 'base'
@ -40,7 +41,7 @@ task createLogsDir(type: EmptyDirTask) {
dir = "${logsDir}"
dirMode = 0755
}
ext.pluginsDir= new File(buildDir, 'plugins-hack/plugins')
ext.pluginsDir = new File(buildDir, 'plugins-hack/plugins')
task createPluginsDir(type: EmptyDirTask) {
dir = "${pluginsDir}"
dirMode = 0755

View File

@ -27,7 +27,7 @@ integTest.runner {
*/
if (System.getProperty("tests.rest.cluster") == null) {
nonInputProperties.systemProperty 'tests.logfile',
"${ -> testClusters.integTest.singleNode().getServerLog()}"
"${-> testClusters.integTest.singleNode().getServerLog()}"
} else {
systemProperty 'tests.logfile', '--external--'
}

View File

@ -36,9 +36,9 @@ apply plugin: 'elasticsearch.testclusters'
// Concatenates the dependencies CSV files into a single file
task generateDependenciesReport(type: ConcatFilesTask) {
files = fileTree(dir: project.rootDir, include: '**/dependencies.csv' )
files = fileTree(dir: project.rootDir, include: '**/dependencies.csv')
headerLine = "name,version,url,license"
target = new File(System.getProperty('csv')?: "${project.buildDir}/reports/dependencies/es-dependencies.csv")
target = new File(System.getProperty('csv') ?: "${project.buildDir}/reports/dependencies/es-dependencies.csv")
}
/*****************************************************************************
@ -138,7 +138,7 @@ void copyModule(Sync copyTask, Project module) {
exclude 'config/log4j2.properties'
eachFile { details ->
String name = module.plugins.hasPlugin('elasticsearch.esplugin') ? module.esplugin.name : module.es_meta_plugin.name
String name = module.plugins.hasPlugin('elasticsearch.esplugin') ? module.esplugin.name : module.es_meta_plugin.name
// Copy all non config/bin files
// Note these might be unde a subdirectory in the case of a meta plugin
if ((details.relativePath.pathString ==~ /([^\/]+\/)?(config|bin)\/.*/) == false) {
@ -228,7 +228,7 @@ Project xpack = project(':x-pack:plugin')
xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule ->
File licenses = new File(xpackModule.projectDir, 'licenses')
if (licenses.exists()) {
buildDefaultNotice.licensesDir licenses
buildDefaultNotice.licensesDir licenses
}
copyModule(processDefaultOutputs, xpackModule)
copyLog4jProperties(buildDefaultLog4jConfig, xpackModule)
@ -295,9 +295,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
}
List excludePlatforms = ['linux', 'windows', 'darwin']
if (platform != null) {
excludePlatforms.remove(excludePlatforms.indexOf(platform))
excludePlatforms.remove(excludePlatforms.indexOf(platform))
} else {
excludePlatforms = []
excludePlatforms = []
}
from(buildModules) {
for (String excludePlatform : excludePlatforms) {
@ -334,7 +334,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
binFiles = { distributionType, oss, jdk ->
copySpec {
// non-windows files, for all distributions
// non-windows files, for all distributions
with copySpec {
from '../src/bin'
exclude '*.exe'
@ -384,7 +384,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
} else {
from buildDefaultNoJdkNotice
}
}
}
}
}
@ -485,10 +485,10 @@ subprojects {
licenseText = rootProject.file('licenses/ELASTIC-LICENSE.txt').getText('UTF-8')
}
// license text needs to be indented with a single space
licenseText = ' ' + licenseText.replace('\n', '\n ')
licenseText = ' ' + licenseText.replace('\n', '\n ')
String footer = "# Built for ${project.name}-${project.version} " +
"(${distributionType})"
"(${distributionType})"
Map<String, Object> expansions = [
'project.name': project.name,
'project.version': version,
@ -511,9 +511,9 @@ subprojects {
'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; done',
],
'source.path.env': [
'deb': 'source /etc/default/elasticsearch',
'rpm': 'source /etc/sysconfig/elasticsearch',
'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; fi',
'deb': 'source /etc/default/elasticsearch',
'rpm': 'source /etc/sysconfig/elasticsearch',
'def': 'if [ -z "$ES_PATH_CONF" ]; then ES_PATH_CONF="$ES_HOME"/config; fi',
],
'path.logs': [
'deb': packagingPathLogs,
@ -590,7 +590,7 @@ subprojects {
return result
}
ext.assertLinesInFile = { Path path, List<String> expectedLines ->
ext.assertLinesInFile = { Path path, List<String> expectedLines ->
final List<String> actualLines = Files.readAllLines(path)
int line = 0
for (final String expectedLine : expectedLines) {
@ -603,12 +603,12 @@ subprojects {
}
}
['archives:windows-zip','archives:oss-windows-zip',
'archives:darwin-tar','archives:oss-darwin-tar',
'archives:linux-tar', 'archives:oss-linux-tar',
'archives:integ-test-zip',
'packages:rpm', 'packages:deb',
'packages:oss-rpm', 'packages:oss-deb',
['archives:windows-zip', 'archives:oss-windows-zip',
'archives:darwin-tar', 'archives:oss-darwin-tar',
'archives:linux-tar', 'archives:oss-linux-tar',
'archives:integ-test-zip',
'packages:rpm', 'packages:deb',
'packages:oss-rpm', 'packages:oss-deb',
].forEach { subName ->
Project subproject = project("${project.path}:${subName}")
Configuration configuration = configurations.create(subproject.name)

View File

@ -35,7 +35,8 @@ import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
* unreleased versions are when Gradle projects are set up, so we use "build-unreleased-version-*" as placeholders
* and configure them to build various versions here.
*/
bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleasedVersion -> project("${unreleasedVersion.gradleProjectPath}") {
bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleasedVersion ->
project("${unreleasedVersion.gradleProjectPath}") {
Version bwcVersion = unreleasedVersion.version
String bwcBranch = unreleasedVersion.branch
apply plugin: 'distribution'
@ -49,196 +50,196 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
boolean gitFetchLatest
final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true")
if ("true".equals(gitFetchLatestProperty)) {
gitFetchLatest = true
gitFetchLatest = true
} else if ("false".equals(gitFetchLatestProperty)) {
gitFetchLatest = false
gitFetchLatest = false
} else {
throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]")
throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]")
}
task createClone(type: LoggedExec) {
onlyIf { checkoutDir.exists() == false }
commandLine = ['git', 'clone', rootDir, checkoutDir]
onlyIf { checkoutDir.exists() == false }
commandLine = ['git', 'clone', rootDir, checkoutDir]
}
task findRemote(type: LoggedExec) {
dependsOn createClone
workingDir = checkoutDir
commandLine = ['git', 'remote', '-v']
ByteArrayOutputStream output = new ByteArrayOutputStream()
standardOutput = output
doLast {
project.ext.remoteExists = false
output.toString('UTF-8').eachLine {
if (it.contains("${remote}\t")) {
project.ext.remoteExists = true
}
}
dependsOn createClone
workingDir = checkoutDir
commandLine = ['git', 'remote', '-v']
ByteArrayOutputStream output = new ByteArrayOutputStream()
standardOutput = output
doLast {
project.ext.remoteExists = false
output.toString('UTF-8').eachLine {
if (it.contains("${remote}\t")) {
project.ext.remoteExists = true
}
}
}
}
task addRemote(type: LoggedExec) {
dependsOn findRemote
onlyIf { project.ext.remoteExists == false }
workingDir = checkoutDir
commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"]
dependsOn findRemote
onlyIf { project.ext.remoteExists == false }
workingDir = checkoutDir
commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"]
}
task fetchLatest(type: LoggedExec) {
onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest }
dependsOn addRemote
workingDir = checkoutDir
commandLine = ['git', 'fetch', '--all']
onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest }
dependsOn addRemote
workingDir = checkoutDir
commandLine = ['git', 'fetch', '--all']
}
Closure execGit = { Action<ExecSpec> action ->
new ByteArrayOutputStream().withStream { os ->
ExecResult result = project.exec { spec ->
workingDir = checkoutDir
standardOutput os
action.execute(spec)
}
result.assertNormalExitValue()
return os.toString().trim()
Closure execGit = { Action<ExecSpec> action ->
new ByteArrayOutputStream().withStream { os ->
ExecResult result = project.exec { spec ->
workingDir = checkoutDir
standardOutput os
action.execute(spec)
}
result.assertNormalExitValue()
return os.toString().trim()
}
}
task checkoutBwcBranch() {
dependsOn fetchLatest
doLast {
String refspec = System.getProperty("bwc.refspec.${bwcBranch}") ?: System.getProperty("tests.bwc.refspec.${bwcBranch}") ?: "${remote}/${bwcBranch}"
if (System.getProperty("bwc.checkout.align") != null) {
/*
We use a time based approach to make the bwc versions built deterministic and compatible with the current hash.
Most of the time we want to test against latest, but when running delayed exhaustive tests or wanting
reproducible builds we want this to be deterministic by using a hash that was the latest when the current
commit was made.
dependsOn fetchLatest
doLast {
String refspec = System.getProperty("bwc.refspec.${bwcBranch}") ?: System.getProperty("tests.bwc.refspec.${bwcBranch}") ?: "${remote}/${bwcBranch}"
if (System.getProperty("bwc.checkout.align") != null) {
/*
We use a time based approach to make the bwc versions built deterministic and compatible with the current hash.
Most of the time we want to test against latest, but when running delayed exhaustive tests or wanting
reproducible builds we want this to be deterministic by using a hash that was the latest when the current
commit was made.
This approach doesn't work with merge commits as these can introduce commits in the chronological order
after the fact e.x. a merge done today can add commits dated with yesterday so the result will no longer be
deterministic.
This approach doesn't work with merge commits as these can introduce commits in the chronological order
after the fact e.x. a merge done today can add commits dated with yesterday so the result will no longer be
deterministic.
We don't use merge commits, but for additional safety we check that no such commits exist in the time period
we are interested in.
We don't use merge commits, but for additional safety we check that no such commits exist in the time period
we are interested in.
Timestamps are at seconds resolution. rev-parse --before and --after are inclusive w.r.t the second
passed as input. This means the results might not be deterministic in the current second, but this
should not matter in practice.
*/
String timeOfCurrent = execGit { spec ->
spec.commandLine 'git', 'show', '--no-patch', '--no-notes', "--pretty='%cD'"
spec.workingDir project.rootDir
}
logger.lifecycle("Commit date of current: {}", timeOfCurrent)
String mergeCommits = execGit { spec ->
spec.commandLine "git", "rev-list", refspec, "--after", timeOfCurrent, "--merges"
}
if (mergeCommits.isEmpty() == false) {
throw new IllegalStateException(
"Found the following merge commits which prevent determining bwc commits: " + mergeCommits
)
}
refspec = execGit { spec ->
spec.commandLine "git", "rev-list", refspec, "-n", "1", "--before", timeOfCurrent, "--date-order"
}
}
logger.lifecycle("Performing checkout of ${refspec}...")
LoggedExec.exec(project) { spec ->
spec.workingDir = checkoutDir
spec.commandLine "git", "checkout", refspec
}
String checkoutHash = GlobalBuildInfoPlugin.gitRevision(checkoutDir)
logger.lifecycle("Checkout hash for ${project.path} is ${checkoutHash}")
file("${project.buildDir}/refspec").text = checkoutHash
Timestamps are at seconds resolution. rev-parse --before and --after are inclusive w.r.t the second
passed as input. This means the results might not be deterministic in the current second, but this
should not matter in practice.
*/
String timeOfCurrent = execGit { spec ->
spec.commandLine 'git', 'show', '--no-patch', '--no-notes', "--pretty='%cD'"
spec.workingDir project.rootDir
}
logger.lifecycle("Commit date of current: {}", timeOfCurrent)
String mergeCommits = execGit { spec ->
spec.commandLine "git", "rev-list", refspec, "--after", timeOfCurrent, "--merges"
}
if (mergeCommits.isEmpty() == false) {
throw new IllegalStateException(
"Found the following merge commits which prevent determining bwc commits: " + mergeCommits
)
}
refspec = execGit { spec ->
spec.commandLine "git", "rev-list", refspec, "-n", "1", "--before", timeOfCurrent, "--date-order"
}
}
logger.lifecycle("Performing checkout of ${refspec}...")
LoggedExec.exec(project) { spec ->
spec.workingDir = checkoutDir
spec.commandLine "git", "checkout", refspec
}
String checkoutHash = GlobalBuildInfoPlugin.gitRevision(checkoutDir)
logger.lifecycle("Checkout hash for ${project.path} is ${checkoutHash}")
file("${project.buildDir}/refspec").text = checkoutHash
}
}
Closure createRunBwcGradleTask = { name, extraConfig ->
return tasks.create(name: "$name", type: LoggedExec) {
dependsOn checkoutBwcBranch
spoolOutput = true
workingDir = checkoutDir
doFirst {
// Execution time so that the checkouts are available
List<String> lines = file("${checkoutDir}/.ci/java-versions.properties").readLines()
environment(
'JAVA_HOME',
getJavaHome(it, Integer.parseInt(
lines
.findAll({ it.startsWith("ES_BUILD_JAVA=") })
.collect({ it.replace("ES_BUILD_JAVA=java", "").trim() })
.collect({ it.replace("ES_BUILD_JAVA=openjdk", "").trim() })
.join("!!")
))
)
environment(
'RUNTIME_JAVA_HOME',
getJavaHome(it, Integer.parseInt(
lines
.findAll({ it.startsWith("ES_RUNTIME_JAVA=java") })
.collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() })
.join("!!")
))
)
}
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
executable 'cmd'
args '/C', 'call', new File(checkoutDir, 'gradlew').toString()
} else {
executable new File(checkoutDir, 'gradlew').toString()
}
if (gradle.startParameter.isOffline()) {
args "--offline"
}
args "-Dbuild.snapshot=true"
final LogLevel logLevel = gradle.startParameter.logLevel
if ([LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG].contains(logLevel)) {
args "--${logLevel.name().toLowerCase(Locale.ENGLISH)}"
}
final String showStacktraceName = gradle.startParameter.showStacktrace.name()
assert ["INTERNAL_EXCEPTIONS", "ALWAYS", "ALWAYS_FULL"].contains(showStacktraceName)
if (showStacktraceName.equals("ALWAYS")) {
args "--stacktrace"
} else if (showStacktraceName.equals("ALWAYS_FULL")) {
args "--full-stacktrace"
}
if (gradle.getStartParameter().isParallelProjectExecutionEnabled()) {
args "--parallel"
}
standardOutput = new IndentingOutputStream(System.out, bwcVersion)
errorOutput = new IndentingOutputStream(System.err, bwcVersion)
configure extraConfig
return tasks.create(name: "$name", type: LoggedExec) {
dependsOn checkoutBwcBranch
spoolOutput = true
workingDir = checkoutDir
doFirst {
// Execution time so that the checkouts are available
List<String> lines = file("${checkoutDir}/.ci/java-versions.properties").readLines()
environment(
'JAVA_HOME',
getJavaHome(it, Integer.parseInt(
lines
.findAll({ it.startsWith("ES_BUILD_JAVA=") })
.collect({ it.replace("ES_BUILD_JAVA=java", "").trim() })
.collect({ it.replace("ES_BUILD_JAVA=openjdk", "").trim() })
.join("!!")
))
)
environment(
'RUNTIME_JAVA_HOME',
getJavaHome(it, Integer.parseInt(
lines
.findAll({ it.startsWith("ES_RUNTIME_JAVA=java") })
.collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() })
.join("!!")
))
)
}
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
executable 'cmd'
args '/C', 'call', new File(checkoutDir, 'gradlew').toString()
} else {
executable new File(checkoutDir, 'gradlew').toString()
}
if (gradle.startParameter.isOffline()) {
args "--offline"
}
args "-Dbuild.snapshot=true"
final LogLevel logLevel = gradle.startParameter.logLevel
if ([LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG].contains(logLevel)) {
args "--${logLevel.name().toLowerCase(Locale.ENGLISH)}"
}
final String showStacktraceName = gradle.startParameter.showStacktrace.name()
assert ["INTERNAL_EXCEPTIONS", "ALWAYS", "ALWAYS_FULL"].contains(showStacktraceName)
if (showStacktraceName.equals("ALWAYS")) {
args "--stacktrace"
} else if (showStacktraceName.equals("ALWAYS_FULL")) {
args "--full-stacktrace"
}
if (gradle.getStartParameter().isParallelProjectExecutionEnabled()) {
args "--parallel"
}
standardOutput = new IndentingOutputStream(System.out, bwcVersion)
errorOutput = new IndentingOutputStream(System.err, bwcVersion)
configure extraConfig
}
}
Closure buildBwcTaskName = { projectName ->
return "buildBwc${projectName.replaceAll(/-\w/){ it[1].toUpperCase() }.capitalize()}"
Closure buildBwcTaskName = { projectName ->
return "buildBwc${projectName.replaceAll(/-\w/) { it[1].toUpperCase() }.capitalize()}"
}
task buildBwc {}
Closure createBuildBwcTask = { projectName, projectDir, projectArtifact ->
Task bwcTask = createRunBwcGradleTask(buildBwcTaskName(projectName)) {
inputs.file("${project.buildDir}/refspec")
outputs.files(projectArtifact)
outputs.cacheIf("BWC distribution caching is disabled on 'master' branch") {
// Don't bother caching in 'master' since the BWC branches move too quickly to make this cost worthwhile
BuildParams.ci && System.getenv('GIT_BRANCH')?.endsWith("master") == false
}
args ":${projectDir.replace('/', ':')}:assemble"
if (project.gradle.startParameter.buildCacheEnabled) {
args "--build-cache"
}
doLast {
if (projectArtifact.exists() == false) {
throw new InvalidUserDataException("Building ${bwcVersion} didn't generate expected file ${projectArtifact}")
}
}
Task bwcTask = createRunBwcGradleTask(buildBwcTaskName(projectName)) {
inputs.file("${project.buildDir}/refspec")
outputs.files(projectArtifact)
outputs.cacheIf("BWC distribution caching is disabled on 'master' branch") {
// Don't bother caching in 'master' since the BWC branches move too quickly to make this cost worthwhile
BuildParams.ci && System.getenv('GIT_BRANCH')?.endsWith("master") == false
}
buildBwc.dependsOn bwcTask
args ":${projectDir.replace('/', ':')}:assemble"
if (project.gradle.startParameter.buildCacheEnabled) {
args "--build-cache"
}
doLast {
if (projectArtifact.exists() == false) {
throw new InvalidUserDataException("Building ${bwcVersion} didn't generate expected file ${projectArtifact}")
}
}
}
buildBwc.dependsOn bwcTask
}
Map<String, File> artifactFiles = [:]
@ -251,90 +252,91 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
}
for (String projectName : projects) {
String baseDir = "distribution"
String classifier = ""
String extension = projectName
if (bwcVersion.onOrAfter('7.0.0') && (projectName.contains('zip') || projectName.contains('tar'))) {
int index = projectName.indexOf('-')
classifier = "-${projectName.substring(0, index)}-x86_64"
extension = projectName.substring(index + 1)
if (extension.equals('tar')) {
extension += '.gz'
}
String baseDir = "distribution"
String classifier = ""
String extension = projectName
if (bwcVersion.onOrAfter('7.0.0') && (projectName.contains('zip') || projectName.contains('tar'))) {
int index = projectName.indexOf('-')
classifier = "-${projectName.substring(0, index)}-x86_64"
extension = projectName.substring(index + 1)
if (extension.equals('tar')) {
extension += '.gz'
}
if (bwcVersion.onOrAfter('7.0.0') && projectName.contains('deb')) {
classifier = "-amd64"
}
if (bwcVersion.onOrAfter('7.0.0') && projectName.contains('rpm')) {
classifier = "-x86_64"
}
if (bwcVersion.onOrAfter('6.3.0')) {
baseDir += projectName.endsWith('zip') || projectName.endsWith('tar') ? '/archives' : '/packages'
// add oss variant first
projectDirs.add("${baseDir}/oss-${projectName}")
File ossProjectArtifact = file("${checkoutDir}/${baseDir}/oss-${projectName}/build/distributions/elasticsearch-oss-${bwcVersion}-SNAPSHOT${classifier}.${extension}")
artifactFiles.put("oss-" + projectName, ossProjectArtifact)
createBuildBwcTask("oss-${projectName}", "${baseDir}/oss-${projectName}", ossProjectArtifact)
}
projectDirs.add("${baseDir}/${projectName}")
File projectArtifact = file("${checkoutDir}/${baseDir}/${projectName}/build/distributions/elasticsearch-${bwcVersion}-SNAPSHOT${classifier}.${extension}")
artifactFiles.put(projectName, projectArtifact)
createBuildBwcTask(projectName, "${baseDir}/${projectName}", projectArtifact)
}
if (bwcVersion.onOrAfter('7.0.0') && projectName.contains('deb')) {
classifier = "-amd64"
}
if (bwcVersion.onOrAfter('7.0.0') && projectName.contains('rpm')) {
classifier = "-x86_64"
}
if (bwcVersion.onOrAfter('6.3.0')) {
baseDir += projectName.endsWith('zip') || projectName.endsWith('tar') ? '/archives' : '/packages'
// add oss variant first
projectDirs.add("${baseDir}/oss-${projectName}")
File ossProjectArtifact = file("${checkoutDir}/${baseDir}/oss-${projectName}/build/distributions/elasticsearch-oss-${bwcVersion}-SNAPSHOT${classifier}.${extension}")
artifactFiles.put("oss-" + projectName, ossProjectArtifact)
createBuildBwcTask("oss-${projectName}", "${baseDir}/oss-${projectName}", ossProjectArtifact)
}
projectDirs.add("${baseDir}/${projectName}")
File projectArtifact = file("${checkoutDir}/${baseDir}/${projectName}/build/distributions/elasticsearch-${bwcVersion}-SNAPSHOT${classifier}.${extension}")
artifactFiles.put(projectName, projectArtifact)
createBuildBwcTask(projectName, "${baseDir}/${projectName}", projectArtifact)
}
createRunBwcGradleTask("resolveAllBwcDependencies") {
args 'resolveAllDependencies'
args 'resolveAllDependencies'
}
Version currentVersion = Version.fromString(version)
if (currentVersion.getMinor() == 0 && currentVersion.getRevision() == 0) {
// We only want to resolve dependencies for live versions of master, without cascading this to older versions
resolveAllDependencies.dependsOn resolveAllBwcDependencies
// We only want to resolve dependencies for live versions of master, without cascading this to older versions
resolveAllDependencies.dependsOn resolveAllBwcDependencies
}
for (e in artifactFiles) {
String projectName = e.key
String buildBwcTask = buildBwcTaskName(projectName)
File artifactFile = e.value
String artifactFileName = artifactFile.name
String artifactName = artifactFileName.contains('oss') ? 'elasticsearch-oss' : 'elasticsearch'
String suffix = artifactFile.toString()[-3..-1]
int archIndex = artifactFileName.indexOf('x86_64')
String classifier = ''
if (archIndex != -1) {
int osIndex = artifactFileName.lastIndexOf('-', archIndex - 2)
classifier = "${artifactFileName.substring(osIndex + 1, archIndex - 1)}-x86_64"
}
configurations.create(projectName)
artifacts {
it.add(projectName, [file: artifactFile, name: artifactName, classifier: classifier, type: suffix, builtBy: buildBwcTask])
}
String projectName = e.key
String buildBwcTask = buildBwcTaskName(projectName)
File artifactFile = e.value
String artifactFileName = artifactFile.name
String artifactName = artifactFileName.contains('oss') ? 'elasticsearch-oss' : 'elasticsearch'
String suffix = artifactFile.toString()[-3..-1]
int archIndex = artifactFileName.indexOf('x86_64')
String classifier = ''
if (archIndex != -1) {
int osIndex = artifactFileName.lastIndexOf('-', archIndex - 2)
classifier = "${artifactFileName.substring(osIndex + 1, archIndex - 1)}-x86_64"
}
configurations.create(projectName)
artifacts {
it.add(projectName, [file: artifactFile, name: artifactName, classifier: classifier, type: suffix, builtBy: buildBwcTask])
}
}
// make sure no dependencies were added to assemble; we want it to be a no-op
assemble.dependsOn = []
}}
}
}
class IndentingOutputStream extends OutputStream {
public final byte[] indent
private final OutputStream delegate
public final byte[] indent
private final OutputStream delegate
public IndentingOutputStream(OutputStream delegate, Object version) {
this.delegate = delegate
indent = " [${version}] ".getBytes(StandardCharsets.UTF_8)
}
public IndentingOutputStream(OutputStream delegate, Object version) {
this.delegate = delegate
indent = " [${version}] ".getBytes(StandardCharsets.UTF_8)
}
@Override
public void write(int b) {
write([b] as int[], 0, 1)
}
@Override
public void write(int b) {
write([b] as int[], 0, 1)
}
public void write(int[] bytes, int offset, int length) {
for (int i = 0; i < bytes.length; i++) {
delegate.write(bytes[i])
if (bytes[i] == '\n') {
delegate.write(indent)
}
}
public void write(int[] bytes, int offset, int length) {
for (int i = 0; i < bytes.length; i++) {
delegate.write(bytes[i])
if (bytes[i] == '\n') {
delegate.write(indent)
}
}
}
}

View File

@ -23,7 +23,7 @@ dependencies {
}
ext.expansions = { oss, ubi, local ->
final String classifier = 'linux-x86_64'
final String classifier = 'linux-x86_64'
final String elasticsearch = oss ? "elasticsearch-oss-${VersionProperties.elasticsearch}-${classifier}.tar.gz" : "elasticsearch-${VersionProperties.elasticsearch}-${classifier}.tar.gz"
return [
'base_image' : ubi ? 'registry.access.redhat.com/ubi7/ubi-minimal:7.7' : 'centos:7',
@ -38,11 +38,11 @@ ext.expansions = { oss, ubi, local ->
}
private static String buildPath(final boolean oss, final boolean ubi) {
return "build/${ oss ? 'oss-' : ''}${ ubi ? 'ubi-' : ''}docker"
return "build/${oss ? 'oss-' : ''}${ubi ? 'ubi-' : ''}docker"
}
private static String taskName(final String prefix, final boolean oss, final boolean ubi, final String suffix) {
return "${prefix}${oss ? 'Oss' : ''}${ubi ? 'Ubi': ''}${suffix}"
return "${prefix}${oss ? 'Oss' : ''}${ubi ? 'Ubi' : ''}${suffix}"
}
project.ext {
@ -55,7 +55,7 @@ project.ext {
into('config') {
/*
* Oss and default distribution can have different configuration, therefore we want to allow overriding the default configuration
* by creating config files in oss or default build-context sub-modules.
* by creating config files in oss or default build-context sub-modules.
*/
from project.projectDir.toPath().resolve("src/docker/config")
if (oss) {
@ -89,7 +89,7 @@ void addCopyDockerContextTask(final boolean oss, final boolean ubi) {
}
}
def createAndSetWritable (Object... locations) {
def createAndSetWritable(Object... locations) {
locations.each { location ->
File file = file(location)
file.mkdirs()
@ -99,7 +99,7 @@ def createAndSetWritable (Object... locations) {
task copyKeystore(type: Sync) {
from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
into "${buildDir}/certs"
doLast {
file("${buildDir}/certs").setReadable(true, false)
@ -115,26 +115,26 @@ preProcessFixture {
doLast {
// tests expect to have an empty repo
project.delete(
"${buildDir}/repo",
"${buildDir}/oss-repo"
"${buildDir}/repo",
"${buildDir}/oss-repo"
)
createAndSetWritable(
"${buildDir}/repo",
"${buildDir}/oss-repo",
"${buildDir}/logs/default-1",
"${buildDir}/logs/default-2",
"${buildDir}/logs/oss-1",
"${buildDir}/logs/oss-2"
"${buildDir}/repo",
"${buildDir}/oss-repo",
"${buildDir}/logs/default-1",
"${buildDir}/logs/default-2",
"${buildDir}/logs/oss-1",
"${buildDir}/logs/oss-2"
)
}
}
processTestResources {
from ({ zipTree(configurations.restSpec.singleFile) }) {
from({ zipTree(configurations.restSpec.singleFile) }) {
include 'rest-api-spec/api/**'
}
from project(':x-pack:plugin:core')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
dependsOn configurations.restSpec
}
@ -154,7 +154,7 @@ void addBuildDockerImage(final boolean oss, final boolean ubi) {
dependsOn taskName("copy", oss, ubi, "DockerContext")
List<String> tags
if (oss) {
tags = [
tags = [
"docker.elastic.co/elasticsearch/elasticsearch-oss${ubi ? '-ubi7' : ''}:${VersionProperties.elasticsearch}",
"elasticsearch-oss${ubi ? '-ubi7' : ''}:test"
]
@ -186,10 +186,10 @@ for (final boolean oss : [false, true]) {
}
// We build the images used in compose locally, but the pull command insists on using a repository
// thus we must disable it to prevent it from doing so.
// thus we must disable it to prevent it from doing so.
// Everything will still be pulled since we will build the local images on a pull
if (tasks.findByName("composePull")) {
tasks.composePull.enabled = false
tasks.composePull.enabled = false
}
/*
@ -218,9 +218,9 @@ subprojects { Project subProject ->
exportDockerImageTask.dependsOn(parent.tasks.getByName(buildTaskName))
artifacts.add('default', file(tarFile)) {
type 'tar'
name "elasticsearch${oss ? '-oss' : ''}${ubi ? '-ubi7' : ''}"
builtBy exportTaskName
type 'tar'
name "elasticsearch${oss ? '-oss' : ''}${ubi ? '-ubi7' : ''}"
builtBy exportTaskName
}
assemble.dependsOn exportTaskName

View File

@ -1,11 +1,11 @@
apply plugin: 'base'
task buildDockerBuildContext(type: Tar) {
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch"
with dockerBuildContext(false, false, false)
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch"
with dockerBuildContext(false, false, false)
}
assemble.dependsOn buildDockerBuildContext

View File

@ -1,11 +1,11 @@
apply plugin: 'base'
task buildOssDockerBuildContext(type: Tar) {
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-oss"
with dockerBuildContext(true, false, false)
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-oss"
with dockerBuildContext(true, false, false)
}
assemble.dependsOn buildOssDockerBuildContext

View File

@ -1,11 +1,11 @@
apply plugin: 'base'
task buildOssUbiDockerBuildContext(type: Tar) {
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-oss-ubi7"
with dockerBuildContext(true, true, false)
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-oss-ubi7"
with dockerBuildContext(true, true, false)
}
assemble.dependsOn buildOssUbiDockerBuildContext

View File

@ -1,11 +1,11 @@
apply plugin: 'base'
task buildUbiDockerBuildContext(type: Tar) {
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-ubi7"
with dockerBuildContext(false, true, false)
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-ubi7"
with dockerBuildContext(false, true, false)
}
assemble.dependsOn buildUbiDockerBuildContext

View File

@ -92,6 +92,7 @@ void addProcessFilesTask(String type, boolean oss, boolean jdk) {
}
}
}
addProcessFilesTask('deb', true, true)
addProcessFilesTask('deb', true, false)
addProcessFilesTask('deb', false, true)
@ -111,7 +112,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) {
}
dependsOn "process${oss ? 'Oss' : ''}${jdk ? '' : 'NoJdk'}${type.capitalize()}Files"
packageName "elasticsearch${oss ? '-oss' : ''}"
arch (type == 'deb' ? 'amd64' : 'X86_64')
arch(type == 'deb' ? 'amd64' : 'X86_64')
// Follow elasticsearch's file naming convention
String jdkString = jdk ? "" : "no-jdk-"
archiveName "${packageName}-${project.version}-${jdkString}${archString}.${type}"
@ -193,16 +194,16 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) {
configurationFile '/etc/elasticsearch/users'
configurationFile '/etc/elasticsearch/users_roles'
}
from("${packagingFiles}") {
dirMode 02750
into('/etc')
permissionGroup 'elasticsearch'
includeEmptyDirs true
createDirectoryEntry true
include("elasticsearch") // empty dir, just to add directory entry
}
from("${packagingFiles}/etc/elasticsearch") {
into('/etc/elasticsearch')
from("${packagingFiles}") {
dirMode 02750
into('/etc')
permissionGroup 'elasticsearch'
includeEmptyDirs true
createDirectoryEntry true
include("elasticsearch") // empty dir, just to add directory entry
}
from("${packagingFiles}/etc/elasticsearch") {
into('/etc/elasticsearch')
dirMode 02750
fileMode 0660
permissionGroup 'elasticsearch'
@ -288,8 +289,8 @@ ospackage {
signingKeyId = project.hasProperty('signing.keyId') ? project.property('signing.keyId') : 'D88E42B4'
signingKeyPassphrase = project.property('signing.password')
signingKeyRingFile = project.hasProperty('signing.secretKeyRingFile') ?
project.file(project.property('signing.secretKeyRingFile')) :
new File(new File(System.getProperty('user.home'), '.gnupg'), 'secring.gpg')
project.file(project.property('signing.secretKeyRingFile')) :
new File(new File(System.getProperty('user.home'), '.gnupg'), 'secring.gpg')
}
// version found on oldest supported distro, centos-6
@ -442,16 +443,16 @@ subprojects {
onlyIf rpmExists
final File rpmDatabase = new File(extractionDir, 'rpm-database')
commandLine 'rpm',
'--badreloc',
'--nodeps',
'--noscripts',
'--notriggers',
'--dbpath',
rpmDatabase,
'--relocate',
"/=${packageExtractionDir}",
'-i',
"${-> buildDist.outputs.files.singleFile}"
'--badreloc',
'--nodeps',
'--noscripts',
'--notriggers',
'--dbpath',
rpmDatabase,
'--relocate',
"/=${packageExtractionDir}",
'-i',
"${-> buildDist.outputs.files.singleFile}"
}
}
@ -476,8 +477,8 @@ subprojects {
licenseFilename = "ELASTIC-LICENSE.txt"
}
final List<String> header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/",
"Copyright: Elasticsearch B.V. <info@elastic.co>",
"License: " + expectedLicense)
"Copyright: Elasticsearch B.V. <info@elastic.co>",
"License: " + expectedLicense)
final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename))
final List<String> expectedLines = header + licenseLines.collect { " " + it }
assertLinesInFile(copyrightPath, expectedLines)

View File

@ -4,7 +4,7 @@ targetCompatibility = JavaVersion.VERSION_1_7
// java_version_checker do not depend on core so only JDK signatures should be checked
forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures'
replaceSignatureFiles 'jdk-signatures'
}
test.enabled = false

View File

@ -21,25 +21,25 @@ import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
apply plugin: 'elasticsearch.build'
dependencies {
compile parent.project('java-version-checker')
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
compile parent.project('java-version-checker')
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
}
archivesBaseName = 'elasticsearch-launchers'
tasks.withType(CheckForbiddenApis) {
replaceSignatureFiles 'jdk-signatures'
replaceSignatureFiles 'jdk-signatures'
}
testingConventions {
naming.clear()
naming {
Tests {
baseClass 'org.elasticsearch.tools.launchers.LaunchersTestCase'
}
naming.clear()
naming {
Tests {
baseClass 'org.elasticsearch.tools.launchers.LaunchersTestCase'
}
}
}
javadoc.enabled = false

View File

@ -45,7 +45,7 @@ test {
thirdPartyAudit.onlyIf {
// FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit,
// rather than provide a long list of exclusions, disable the check on FIPS.
BuildParams.inFipsJvm
BuildParams.inFipsJvm
}
/*
@ -61,14 +61,14 @@ thirdPartyAudit.onlyIf {
*
*/
thirdPartyAudit.ignoreViolations(
'org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider$CoreSecureRandom',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$BaseTLSKeyGeneratorSpi',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator$2',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator$2',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSPRFKeyGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator$2'
'org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider$CoreSecureRandom',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$BaseTLSKeyGeneratorSpi',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSKeyMaterialGenerator$2',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSMasterSecretGenerator$2',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSPRFKeyGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator',
'org.bouncycastle.jcajce.provider.ProvSunTLSKDF$TLSRsaPreMasterSecretGenerator$2'
)

View File

@ -23,18 +23,18 @@ apply plugin: 'elasticsearch.docs-test'
/* List of files that have snippets that will not work until platinum tests can occur ... */
buildRestTests.expectedUnconvertedCandidates = [
'reference/ml/anomaly-detection/transforms.asciidoc',
'reference/ml/anomaly-detection/apis/delete-calendar-event.asciidoc',
'reference/ml/anomaly-detection/apis/get-bucket.asciidoc',
'reference/ml/anomaly-detection/apis/get-category.asciidoc',
'reference/ml/anomaly-detection/apis/get-influencer.asciidoc',
'reference/ml/anomaly-detection/apis/get-job-stats.asciidoc',
'reference/ml/anomaly-detection/apis/get-overall-buckets.asciidoc',
'reference/ml/anomaly-detection/apis/get-record.asciidoc',
'reference/ml/anomaly-detection/apis/get-snapshot.asciidoc',
'reference/ml/anomaly-detection/apis/post-data.asciidoc',
'reference/ml/anomaly-detection/apis/revert-snapshot.asciidoc',
'reference/ml/anomaly-detection/apis/update-snapshot.asciidoc',
'reference/ml/anomaly-detection/transforms.asciidoc',
'reference/ml/anomaly-detection/apis/delete-calendar-event.asciidoc',
'reference/ml/anomaly-detection/apis/get-bucket.asciidoc',
'reference/ml/anomaly-detection/apis/get-category.asciidoc',
'reference/ml/anomaly-detection/apis/get-influencer.asciidoc',
'reference/ml/anomaly-detection/apis/get-job-stats.asciidoc',
'reference/ml/anomaly-detection/apis/get-overall-buckets.asciidoc',
'reference/ml/anomaly-detection/apis/get-record.asciidoc',
'reference/ml/anomaly-detection/apis/get-snapshot.asciidoc',
'reference/ml/anomaly-detection/apis/post-data.asciidoc',
'reference/ml/anomaly-detection/apis/revert-snapshot.asciidoc',
'reference/ml/anomaly-detection/apis/update-snapshot.asciidoc',
]
testClusters.integTest {
@ -116,20 +116,20 @@ Closure setupTwitter = { String name, int count ->
index: twitter
refresh: true
body: |'''
for (int i = 0; i < count; i++) {
String user, text
if (i == 0) {
user = 'kimchy'
text = 'trying out Elasticsearch'
} else {
user = 'test'
text = "some message with the number $i"
}
buildRestTests.setups[name] += """
for (int i = 0; i < count; i++) {
String user, text
if (i == 0) {
user = 'kimchy'
text = 'trying out Elasticsearch'
} else {
user = 'test'
text = "some message with the number $i"
}
buildRestTests.setups[name] += """
{"index":{"_id": "$i"}}
{"user": "$user", "message": "$text", "date": "2009-11-15T14:12:12", "likes": $i}"""
}
}
}
setupTwitter('twitter', 5)
setupTwitter('big_twitter', 120)
setupTwitter('huge_twitter', 1200)
@ -501,7 +501,7 @@ buildRestTests.setups['latency'] = '''
for (int i = 0; i < 100; i++) {
def value = i
if (i % 10) {
value = i*10
value = i * 10
}
buildRestTests.setups['latency'] += """
{"index":{}}
@ -962,7 +962,7 @@ buildRestTests.setups['farequote_datafeed'] = buildRestTests.setups['farequote_j
"job_id":"farequote",
"indexes":"farequote"
}
'''
'''
buildRestTests.setups['server_metrics_index'] = '''
- do:
indices.create:

View File

@ -3,36 +3,36 @@ import java.nio.file.Files
String buildNumber = System.getenv('BUILD_NUMBER')
if (buildNumber) {
File uploadFile = file("build/${buildNumber}.tar.bz2")
project.gradle.buildFinished { result ->
println "build complete, generating: $uploadFile"
if (uploadFile.exists()) {
project.delete(uploadFile)
}
try {
ant.tar(destfile: uploadFile, compression: "bzip2", longfile: "gnu") {
fileset(dir: projectDir) {
fileTree(projectDir)
.include("**/*.hprof")
.include("**/reaper.log")
.include("**/build/testclusters/**")
.exclude("**/build/testclusters/**/data/**")
.exclude("**/build/testclusters/**/distro/**")
.exclude("**/build/testclusters/**/repo/**")
.exclude("**/build/testclusters/**/extract/**")
.filter { Files.isRegularFile(it.toPath()) }
.each {
include(name: projectDir.toPath().relativize(it.toPath()))
}
}
fileset(dir: "${gradle.gradleUserHomeDir}/daemon/${gradle.gradleVersion}", followsymlinks: false) {
include(name: "**/daemon-${ProcessHandle.current().pid()}*.log")
}
}
} catch (Exception e) {
logger.lifecycle("Failed to archive additional logs", e)
}
File uploadFile = file("build/${buildNumber}.tar.bz2")
project.gradle.buildFinished { result ->
println "build complete, generating: $uploadFile"
if (uploadFile.exists()) {
project.delete(uploadFile)
}
try {
ant.tar(destfile: uploadFile, compression: "bzip2", longfile: "gnu") {
fileset(dir: projectDir) {
fileTree(projectDir)
.include("**/*.hprof")
.include("**/reaper.log")
.include("**/build/testclusters/**")
.exclude("**/build/testclusters/**/data/**")
.exclude("**/build/testclusters/**/distro/**")
.exclude("**/build/testclusters/**/repo/**")
.exclude("**/build/testclusters/**/extract/**")
.filter { Files.isRegularFile(it.toPath()) }
.each {
include(name: projectDir.toPath().relativize(it.toPath()))
}
}
fileset(dir: "${gradle.gradleUserHomeDir}/daemon/${gradle.gradleVersion}", followsymlinks: false) {
include(name: "**/daemon-${ProcessHandle.current().pid()}*.log")
}
}
} catch (Exception e) {
logger.lifecycle("Failed to archive additional logs", e)
}
}
}

View File

@ -7,73 +7,73 @@ import java.util.concurrent.TimeUnit
long startTime = project.gradle.services.get(BuildRequestMetaData.class).getStartTime()
buildScan {
URL jenkinsUrl = System.getenv('JENKINS_URL') ? new URL(System.getenv('JENKINS_URL')) : null
String buildNumber = System.getenv('BUILD_NUMBER')
String buildUrl = System.getenv('BUILD_URL')
String jobName = System.getenv('JOB_NAME')
String nodeName = System.getenv('NODE_NAME')
URL jenkinsUrl = System.getenv('JENKINS_URL') ? new URL(System.getenv('JENKINS_URL')) : null
String buildNumber = System.getenv('BUILD_NUMBER')
String buildUrl = System.getenv('BUILD_URL')
String jobName = System.getenv('JOB_NAME')
String nodeName = System.getenv('NODE_NAME')
tag OS.current().name()
if (jobName) {
value 'Job name', jobName
tag OS.current().name()
if (jobName) {
value 'Job name', jobName
}
if (buildNumber) {
value 'Job number', buildNumber
}
if (jenkinsUrl?.host?.endsWith('elastic.co')) {
publishAlways()
buildScan.server = 'https://gradle-enterprise.elastic.co'
}
if (nodeName) {
link 'System logs', "https://infra-stats.elastic.co/app/infra#/logs?" +
"&logFilter=(expression:'host.name:${nodeName}',kind:kuery)"
buildFinished {
link 'System metrics', "https://infra-stats.elastic.co/app/infra#/metrics/host/" +
"${nodeName}?_g=()&metricTime=(autoReload:!f,refreshInterval:5000," +
"time:(from:${startTime - TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES)},interval:%3E%3D1m," +
"to:${System.currentTimeMillis() + TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES)}))"
}
if(buildNumber) {
value 'Job number', buildNumber
}
// Jenkins-specific build scan metadata
if (jenkinsUrl) {
tag 'CI'
tag jobName
link 'Jenkins Build', buildUrl
link 'GCP Upload', "https://console.cloud.google.com/storage/elasticsearch-ci-artifacts/jobs/${jobName}/build/${buildNumber}.tar.bz2"
System.getenv().getOrDefault('NODE_LABELS', '').split(' ').each {
value 'Jenkins Worker Label', it
}
if (jenkinsUrl?.host?.endsWith('elastic.co')) {
publishAlways()
buildScan.server = 'https://gradle-enterprise.elastic.co'
}
if (nodeName) {
link 'System logs', "https://infra-stats.elastic.co/app/infra#/logs?" +
"&logFilter=(expression:'host.name:${nodeName}',kind:kuery)"
buildFinished {
link 'System metrics', "https://infra-stats.elastic.co/app/infra#/metrics/host/" +
"${nodeName}?_g=()&metricTime=(autoReload:!f,refreshInterval:5000," +
"time:(from:${startTime - TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES)},interval:%3E%3D1m," +
"to:${System.currentTimeMillis() + TimeUnit.MILLISECONDS.convert(5, TimeUnit.MINUTES)}))"
}
}
// Jenkins-specific build scan metadata
if (jenkinsUrl) {
tag 'CI'
tag jobName
link 'Jenkins Build', buildUrl
link 'GCP Upload', "https://console.cloud.google.com/storage/elasticsearch-ci-artifacts/jobs/${jobName}/build/${buildNumber}.tar.bz2"
System.getenv().getOrDefault('NODE_LABELS', '').split(' ').each {
value 'Jenkins Worker Label', it
}
// Add SCM information
def isPrBuild = System.getenv('ROOT_BUILD_CAUSE_GHPRBCAUSE') != null
if (isPrBuild) {
value 'Git Commit ID', System.getenv('ghprbActualCommit')
value 'Git Branch', System.getenv('ghprbTargetBranch')
tag System.getenv('ghprbTargetBranch')
tag "pr/${System.getenv('ghprbPullId')}"
tag 'pull-request'
link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('ghprbActualCommit')}"
link 'Pull Request', System.getenv('ghprbPullLink')
} else {
if (System.getenv('GIT_BRANCH')) {
def branch = System.getenv('GIT_BRANCH').split('/').last()
value 'Git Branch', branch
tag branch
}
if (System.getenv('GIT_COMMIT')) {
value 'Git Commit ID', System.getenv('GIT_COMMIT')
link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('GIT_COMMIT')}"
background {
def changes = "git diff --name-only ${System.getenv('GIT_PREVIOUS_COMMIT')}..${System.getenv('GIT_COMMIT')}".execute().text.trim()
value 'Git Changes', changes
}
}
}
// Add SCM information
def isPrBuild = System.getenv('ROOT_BUILD_CAUSE_GHPRBCAUSE') != null
if (isPrBuild) {
value 'Git Commit ID', System.getenv('ghprbActualCommit')
value 'Git Branch', System.getenv('ghprbTargetBranch')
tag System.getenv('ghprbTargetBranch')
tag "pr/${System.getenv('ghprbPullId')}"
tag 'pull-request'
link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('ghprbActualCommit')}"
link 'Pull Request', System.getenv('ghprbPullLink')
} else {
tag 'LOCAL'
if (System.getenv('GIT_BRANCH')) {
def branch = System.getenv('GIT_BRANCH').split('/').last()
value 'Git Branch', branch
tag branch
}
if (System.getenv('GIT_COMMIT')) {
value 'Git Commit ID', System.getenv('GIT_COMMIT')
link 'Source', "https://github.com/elastic/elasticsearch/tree/${System.getenv('GIT_COMMIT')}"
background {
def changes = "git diff --name-only ${System.getenv('GIT_PREVIOUS_COMMIT')}..${System.getenv('GIT_COMMIT')}".execute().text.trim()
value 'Git Changes', changes
}
}
}
} else {
tag 'LOCAL'
}
}

View File

@ -18,29 +18,29 @@
*/
subprojects {
/*
* All subprojects are java projects using Elasticsearch's standard build
* tools.
*/
apply plugin: 'elasticsearch.build'
/*
* All subprojects are java projects using Elasticsearch's standard build
* tools.
*/
apply plugin: 'elasticsearch.build'
/*
* Subprojects may depend on the "core" lib but may not depend on any
* other libs. This keeps our dependencies simpler.
*/
project.afterEvaluate {
configurations.all { Configuration conf ->
dependencies.matching { it instanceof ProjectDependency }.all { ProjectDependency dep ->
Project depProject = dep.dependencyProject
if (depProject != null
&& false == depProject.path.equals(':libs:elasticsearch-core')
&& depProject.path.startsWith(':libs')) {
throw new InvalidUserDataException("projects in :libs "
+ "may not depend on other projects libs except "
+ ":libs:elasticsearch-core but "
+ "${project.path} depends on ${depProject.path}")
}
}
/*
* Subprojects may depend on the "core" lib but may not depend on any
* other libs. This keeps our dependencies simpler.
*/
project.afterEvaluate {
configurations.all { Configuration conf ->
dependencies.matching { it instanceof ProjectDependency }.all { ProjectDependency dep ->
Project depProject = dep.dependencyProject
if (depProject != null
&& false == depProject.path.equals(':libs:elasticsearch-core')
&& depProject.path.startsWith(':libs')) {
throw new InvalidUserDataException("projects in :libs "
+ "may not depend on other projects libs except "
+ ":libs:elasticsearch-core but "
+ "${project.path} depends on ${depProject.path}")
}
}
}
}
}

View File

@ -22,8 +22,8 @@ apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm'
dependencies {
compile 'net.sf.jopt-simple:jopt-simple:5.0.2'
compile project(':libs:elasticsearch-core')
compile 'net.sf.jopt-simple:jopt-simple:5.0.2'
compile project(':libs:elasticsearch-core')
}
test.enabled = false
@ -31,5 +31,5 @@ test.enabled = false
jarHell.enabled = false
forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures'
replaceSignatureFiles 'jdk-signatures'
}

View File

@ -27,85 +27,85 @@ archivesBaseName = 'elasticsearch-core'
// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 9 so we do not include this source set in our IDEs
if (!isEclipse && !isIdea) {
sourceSets {
java9 {
java {
srcDirs = ['src/main/java9']
}
}
sourceSets {
java9 {
java {
srcDirs = ['src/main/java9']
}
}
}
configurations {
java9Compile.extendsFrom(compile)
}
configurations {
java9Compile.extendsFrom(compile)
}
dependencies {
java9Compile sourceSets.main.output
}
dependencies {
java9Compile sourceSets.main.output
}
compileJava9Java {
sourceCompatibility = 9
targetCompatibility = 9
}
compileJava9Java {
sourceCompatibility = 9
targetCompatibility = 9
}
forbiddenApisJava9 {
rootProject.globalInfo.ready {
if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_1_9) {
targetCompatibility = JavaVersion.VERSION_1_9.getMajorVersion()
}
replaceSignatureFiles 'jdk-signatures'
}
forbiddenApisJava9 {
rootProject.globalInfo.ready {
if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_1_9) {
targetCompatibility = JavaVersion.VERSION_1_9.getMajorVersion()
}
replaceSignatureFiles 'jdk-signatures'
}
}
jar {
metaInf {
into 'versions/9'
from sourceSets.java9.output
}
manifest.attributes('Multi-Release': 'true')
jar {
metaInf {
into 'versions/9'
from sourceSets.java9.output
}
manifest.attributes('Multi-Release': 'true')
}
}
publishing {
publications {
nebula {
artifactId = archivesBaseName
}
publications {
nebula {
artifactId = archivesBaseName
}
}
}
dependencies {
// This dependency is used only by :libs:core for null-checking interop with other tools
compileOnly "com.google.code.findbugs:jsr305:3.0.2"
// This dependency is used only by :libs:core for null-checking interop with other tools
compileOnly "com.google.code.findbugs:jsr305:3.0.2"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
if (!isEclipse && !isIdea) {
java9Compile sourceSets.main.output
}
if (!isEclipse && !isIdea) {
java9Compile sourceSets.main.output
}
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-core'
}
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-core'
}
}
forbiddenApisMain {
// :libs:elasticsearch-core does not depend on server
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to server
replaceSignatureFiles 'jdk-signatures'
// :libs:elasticsearch-core does not depend on server
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to server
replaceSignatureFiles 'jdk-signatures'
}
thirdPartyAudit.ignoreMissingClasses (
// from log4j
'org/osgi/framework/AdaptPermission',
'org/osgi/framework/AdminPermission',
'org/osgi/framework/Bundle',
'org/osgi/framework/BundleActivator',
'org/osgi/framework/BundleContext',
'org/osgi/framework/BundleEvent',
'org/osgi/framework/SynchronousBundleListener',
'org/osgi/framework/wiring/BundleWire',
'org/osgi/framework/wiring/BundleWiring'
thirdPartyAudit.ignoreMissingClasses(
// from log4j
'org/osgi/framework/AdaptPermission',
'org/osgi/framework/AdminPermission',
'org/osgi/framework/Bundle',
'org/osgi/framework/BundleActivator',
'org/osgi/framework/BundleContext',
'org/osgi/framework/BundleEvent',
'org/osgi/framework/SynchronousBundleListener',
'org/osgi/framework/wiring/BundleWire',
'org/osgi/framework/wiring/BundleWiring'
)

View File

@ -18,15 +18,15 @@
*/
dependencies {
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-dissect'
}
testCompile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
testCompile "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
testCompile "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-dissect'
}
testCompile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
testCompile "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
testCompile "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
}
forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures'
replaceSignatureFiles 'jdk-signatures'
}

View File

@ -22,14 +22,14 @@ apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm'
dependencies {
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-geo'
}
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-geo'
}
}
forbiddenApisMain {
// geo does not depend on server
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to core
replaceSignatureFiles 'jdk-signatures'
// geo does not depend on server
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to core
replaceSignatureFiles 'jdk-signatures'
}

View File

@ -18,15 +18,15 @@
*/
dependencies {
compile 'org.jruby.joni:joni:2.1.29'
// joni dependencies:
compile 'org.jruby.jcodings:jcodings:1.0.44'
compile 'org.jruby.joni:joni:2.1.29'
// joni dependencies:
compile 'org.jruby.jcodings:jcodings:1.0.44'
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-grok'
}
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-grok'
}
}
forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures'
replaceSignatureFiles 'jdk-signatures'
}

View File

@ -27,7 +27,7 @@ dependencies {
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-nio'
exclude group: 'org.elasticsearch', module: 'elasticsearch-nio'
}
}

View File

@ -20,29 +20,29 @@ apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm'
dependencies {
// do not add non-test compile dependencies to secure-sm without a good reason to do so
// do not add non-test compile dependencies to secure-sm without a good reason to do so
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-secure-sm'
}
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-secure-sm'
}
}
forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures'
replaceSignatureFiles 'jdk-signatures'
}
// JAR hell is part of core which we do not want to add as a dependency
jarHell.enabled = false
testingConventions {
naming.clear()
naming {
Tests {
baseClass 'junit.framework.TestCase'
}
naming.clear()
naming {
Tests {
baseClass 'junit.framework.TestCase'
}
}
}

View File

@ -19,25 +19,25 @@
apply plugin: "nebula.maven-scm"
dependencies {
compile project(':libs:elasticsearch-core')
compile project(':libs:elasticsearch-core')
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-ssl-config'
}
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-ssl-config'
}
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
}
forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures'
replaceSignatureFiles 'jdk-signatures'
}
forbiddenPatterns {
exclude '**/*.key'
exclude '**/*.pem'
exclude '**/*.p12'
exclude '**/*.jks'
exclude '**/*.key'
exclude '**/*.pem'
exclude '**/*.p12'
exclude '**/*.jks'
}

View File

@ -22,33 +22,33 @@ apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm'
dependencies {
compile project(':libs:elasticsearch-core')
compile project(':libs:elasticsearch-core')
compile "org.yaml:snakeyaml:${versions.snakeyaml}"
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${versions.jackson}"
compile "org.yaml:snakeyaml:${versions.snakeyaml}"
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${versions.jackson}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-x-content'
}
testCompile(project(":test:framework")) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-x-content'
}
}
forbiddenApisMain {
// x-content does not depend on server
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to core
replaceSignatureFiles 'jdk-signatures'
// x-content does not depend on server
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to core
replaceSignatureFiles 'jdk-signatures'
}
thirdPartyAudit.ignoreMissingClasses (
// from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml)
'com.fasterxml.jackson.databind.ObjectMapper',
thirdPartyAudit.ignoreMissingClasses(
// from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml)
'com.fasterxml.jackson.databind.ObjectMapper',
)
dependencyLicenses {

View File

@ -18,7 +18,7 @@
*/
esplugin {
description 'Adds aggregations whose input are a list of numeric fields and output includes a matrix.'
classname 'org.elasticsearch.search.aggregations.matrix.MatrixAggregationPlugin'
hasClientJar = true
description 'Adds aggregations whose input are a list of numeric fields and output includes a matrix.'
classname 'org.elasticsearch.search.aggregations.matrix.MatrixAggregationPlugin'
hasClientJar = true
}

View File

@ -18,11 +18,11 @@
*/
esplugin {
description 'Adds "built in" analyzers to Elasticsearch.'
classname 'org.elasticsearch.analysis.common.CommonAnalysisPlugin'
extendedPlugins = ['lang-painless']
description 'Adds "built in" analyzers to Elasticsearch.'
classname 'org.elasticsearch.analysis.common.CommonAnalysisPlugin'
extendedPlugins = ['lang-painless']
}
dependencies {
compileOnly project(':modules:lang-painless')
compileOnly project(':modules:lang-painless')
}

View File

@ -19,7 +19,7 @@
configure(subprojects.findAll { it.parent.path == project.path }) {
group = 'org.elasticsearch.plugin' // for modules which publish client jars
apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.esplugin'
esplugin {
@ -28,12 +28,12 @@ configure(subprojects.findAll { it.parent.path == project.path }) {
}
if (project.file('src/main/packaging').exists()) {
throw new InvalidModelException("Modules cannot contain packaging files")
throw new InvalidModelException("Modules cannot contain packaging files")
}
if (project.file('src/main/bin').exists()) {
throw new InvalidModelException("Modules cannot contain bin files")
throw new InvalidModelException("Modules cannot contain bin files")
}
if (project.file('src/main/config').exists()) {
throw new InvalidModelException("Modules cannot contain config files")
throw new InvalidModelException("Modules cannot contain config files")
}
}

View File

@ -18,13 +18,13 @@
*/
esplugin {
description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources'
classname 'org.elasticsearch.ingest.common.IngestCommonPlugin'
extendedPlugins = ['lang-painless']
description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources'
classname 'org.elasticsearch.ingest.common.IngestCommonPlugin'
extendedPlugins = ['lang-painless']
}
dependencies {
compileOnly project(':modules:lang-painless')
compile project(':libs:elasticsearch-grok')
compile project(':libs:elasticsearch-dissect')
}
compileOnly project(':modules:lang-painless')
compile project(':libs:elasticsearch-grok')
compile project(':libs:elasticsearch-dissect')
}

View File

@ -26,7 +26,7 @@ esplugin {
dependencies {
// Upgrade to 2.10.0 or higher when jackson-core gets upgraded to 2.9.x. Blocked by #27032
compile ('com.maxmind.geoip2:geoip2:2.9.0')
compile('com.maxmind.geoip2:geoip2:2.9.0')
// geoip2 dependencies:
compile("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}")
compile("com.fasterxml.jackson.core:jackson-databind:2.8.11.3")
@ -36,7 +36,7 @@ dependencies {
}
task copyDefaultGeoIp2DatabaseFiles(type: Copy) {
from { zipTree(configurations.testCompile.files.find { it.name.contains('geolite2-databases')}) }
from { zipTree(configurations.testCompile.files.find { it.name.contains('geolite2-databases') }) }
into "${project.buildDir}/ingest-geoip"
include "*.mmdb"
}
@ -49,7 +49,7 @@ bundlePlugin {
}
}
thirdPartyAudit.ignoreMissingClasses (
thirdPartyAudit.ignoreMissingClasses(
// geoip WebServiceClient needs apache http client, but we're not using WebServiceClient:
'org.apache.http.HttpEntity',
'org.apache.http.HttpHost',

View File

@ -17,6 +17,7 @@
* under the License.
*/
import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask;
esplugin {
description 'An easy, safe and fast scripting language for Elasticsearch'
classname 'org.elasticsearch.painless.PainlessPlugin'
@ -75,12 +76,12 @@ dependencies {
testClusters {
generateContextCluster {
testDistribution = 'DEFAULT'
testDistribution = 'DEFAULT'
}
}
task generateContextDoc(type: DefaultTestClustersTask) {
useCluster testClusters.generateContextCluster
useCluster testClusters.generateContextCluster
doFirst {
project.javaexec {
main = 'org.elasticsearch.painless.ContextDocGenerator'
@ -123,9 +124,9 @@ task regenLexer(type: JavaExec) {
systemProperty 'user.country', 'US'
systemProperty 'user.variant', ''
args '-Werror',
'-package', 'org.elasticsearch.painless.antlr',
'-o', outputPath,
"${file(grammarPath)}/PainlessLexer.g4"
'-package', 'org.elasticsearch.painless.antlr',
'-o', outputPath,
"${file(grammarPath)}/PainlessLexer.g4"
}
task regenParser(type: JavaExec) {
@ -137,12 +138,12 @@ task regenParser(type: JavaExec) {
systemProperty 'user.country', 'US'
systemProperty 'user.variant', ''
args '-Werror',
'-package', 'org.elasticsearch.painless.antlr',
'-no-listener',
'-visitor',
// '-Xlog',
'-o', outputPath,
"${file(grammarPath)}/PainlessParser.g4"
'-package', 'org.elasticsearch.painless.antlr',
'-no-listener',
'-visitor',
// '-Xlog',
'-o', outputPath,
"${file(grammarPath)}/PainlessParser.g4"
}
task regen {
@ -153,20 +154,20 @@ task regen {
ant.move(file: "${outputPath}/PainlessParser.tokens", toDir: grammarPath)
// make the generated classes package private
ant.replaceregexp(match: 'public ((interface|class) \\QPainless\\E\\w+)',
replace: '\\1',
encoding: 'UTF-8') {
replace: '\\1',
encoding: 'UTF-8') {
fileset(dir: outputPath, includes: 'Painless*.java')
}
// make the lexer abstract
ant.replaceregexp(match: '(class \\QPainless\\ELexer)',
replace: 'abstract \\1',
encoding: 'UTF-8') {
replace: 'abstract \\1',
encoding: 'UTF-8') {
fileset(dir: outputPath, includes: 'PainlessLexer.java')
}
// nuke timestamps/filenames in generated files
ant.replaceregexp(match: '\\Q// Generated from \\E.*',
replace: '\\/\\/ ANTLR GENERATED CODE: DO NOT EDIT',
encoding: 'UTF-8') {
replace: '\\/\\/ ANTLR GENERATED CODE: DO NOT EDIT',
encoding: 'UTF-8') {
fileset(dir: outputPath, includes: 'Painless*.java')
}
// remove tabs in antlr generated files

View File

@ -18,7 +18,7 @@
*/
esplugin {
description 'Adds advanced field mappers'
classname 'org.elasticsearch.index.mapper.MapperExtrasPlugin'
hasClientJar = true
}
description 'Adds advanced field mappers'
classname 'org.elasticsearch.index.mapper.MapperExtrasPlugin'
hasClientJar = true
}

View File

@ -18,13 +18,13 @@
*/
esplugin {
description 'Percolator module adds capability to index queries and query these queries by specifying documents'
classname 'org.elasticsearch.percolator.PercolatorPlugin'
hasClientJar = true
description 'Percolator module adds capability to index queries and query these queries by specifying documents'
classname 'org.elasticsearch.percolator.PercolatorPlugin'
hasClientJar = true
}
dependencies {
testCompile project(path: ':modules:parent-join', configuration: 'runtime')
testCompile project(path: ':modules:parent-join', configuration: 'runtime')
}
dependencyLicenses {

View File

@ -56,7 +56,7 @@ dependencies {
testCompile project(path: ':modules:parent-join', configuration: 'runtime')
}
thirdPartyAudit.ignoreMissingClasses (
thirdPartyAudit.ignoreMissingClasses(
// Commons logging
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener',
@ -66,8 +66,8 @@ thirdPartyAudit.ignoreMissingClasses (
)
forbiddenPatterns {
// PKCS#12 file are not UTF-8
exclude '**/*.p12'
// PKCS#12 file are not UTF-8
exclude '**/*.p12'
}
// Support for testing reindex-from-remote against old Elasticsearch versions
@ -117,12 +117,12 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
dependsOn project.configurations.oldesFixture
dependsOn unzip
executable = "${BuildParams.runtimeJavaHome}/bin/java"
env 'CLASSPATH', "${ -> project.configurations.oldesFixture.asPath }"
env 'JAVA_HOME', "${ -> getJavaHome(it, 8)}"
env 'CLASSPATH', "${-> project.configurations.oldesFixture.asPath}"
env 'JAVA_HOME', "${-> getJavaHome(it, 8)}"
args 'oldes.OldElasticsearch',
baseDir,
unzip.temporaryDir,
version == '090'
baseDir,
unzip.temporaryDir,
version == '090'
waitCondition = { fixture, ant ->
// the fixture writes the ports file when Elasticsearch's HTTP service
// is ready, so we can just wait for the file to exist
@ -136,8 +136,8 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
systemProperty "tests.fromOld", "true"
/* Use a closure on the string to delay evaluation until right before we
* run the integration tests so that we can be sure that the file is
* ready. */
nonInputProperties.systemProperty "es${version}.port", "${ -> fixture.addressAndPort }"
* ready. */
nonInputProperties.systemProperty "es${version}.port", "${-> fixture.addressAndPort}"
}
}
}

View File

@ -22,8 +22,8 @@ import org.elasticsearch.gradle.info.BuildParams
import org.elasticsearch.gradle.test.AntFixture
esplugin {
description 'Module for URL repository'
classname 'org.elasticsearch.plugin.repository.url.URLRepositoryPlugin'
description 'Module for URL repository'
classname 'org.elasticsearch.plugin.repository.url.URLRepositoryPlugin'
}
// This directory is shared between two URL repositories and one FS repository in YAML integration tests
@ -31,21 +31,23 @@ File repositoryDir = new File(project.buildDir, "shared-repository")
/** A task to start the URLFixture which exposes the repositoryDir over HTTP **/
task urlFixture(type: AntFixture) {
dependsOn testClasses
doFirst {
repositoryDir.mkdirs()
}
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }"
executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.repositories.url.URLFixture', baseDir, "${repositoryDir.absolutePath}"
dependsOn testClasses
doFirst {
repositoryDir.mkdirs()
}
env 'CLASSPATH', "${-> project.sourceSets.test.runtimeClasspath.asPath}"
executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.repositories.url.URLFixture', baseDir, "${repositoryDir.absolutePath}"
}
integTest {
dependsOn urlFixture
dependsOn urlFixture
}
testClusters.integTest {
// repositoryDir is used by a FS repository to create snapshots
setting 'path.repo', "${repositoryDir.absolutePath}", PropertyNormalization.IGNORE_VALUE
// repositoryDir is used by two URL repositories to restore snapshots
setting 'repositories.url.allowed_urls', { "http://snapshot.test*,http://${urlFixture.addressAndPort}" }, PropertyNormalization.IGNORE_VALUE
// repositoryDir is used by a FS repository to create snapshots
setting 'path.repo', "${repositoryDir.absolutePath}", PropertyNormalization.IGNORE_VALUE
// repositoryDir is used by two URL repositories to restore snapshots
setting 'repositories.url.allowed_urls', {
"http://snapshot.test*,http://${urlFixture.addressAndPort}"
}, PropertyNormalization.IGNORE_VALUE
}

View File

@ -18,8 +18,8 @@
*/
esplugin {
description 'Integrates Elasticsearch with systemd'
classname 'org.elasticsearch.systemd.SystemdPlugin'
description 'Integrates Elasticsearch with systemd'
classname 'org.elasticsearch.systemd.SystemdPlugin'
}
integTest.enabled = false

View File

@ -28,22 +28,22 @@ import org.elasticsearch.gradle.test.RestIntegTestTask
* maybe figure out a way to run all tests from core with netty4/network?
*/
esplugin {
description 'Netty 4 based transport implementation'
classname 'org.elasticsearch.transport.Netty4Plugin'
hasClientJar = true
description 'Netty 4 based transport implementation'
classname 'org.elasticsearch.transport.Netty4Plugin'
hasClientJar = true
}
compileTestJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked"
dependencies {
// network stack
compile "io.netty:netty-buffer:${versions.netty}"
compile "io.netty:netty-codec:${versions.netty}"
compile "io.netty:netty-codec-http:${versions.netty}"
compile "io.netty:netty-common:${versions.netty}"
compile "io.netty:netty-handler:${versions.netty}"
compile "io.netty:netty-resolver:${versions.netty}"
compile "io.netty:netty-transport:${versions.netty}"
// network stack
compile "io.netty:netty-buffer:${versions.netty}"
compile "io.netty:netty-codec:${versions.netty}"
compile "io.netty:netty-codec-http:${versions.netty}"
compile "io.netty:netty-common:${versions.netty}"
compile "io.netty:netty-handler:${versions.netty}"
compile "io.netty:netty-resolver:${versions.netty}"
compile "io.netty:netty-transport:${versions.netty}"
}
dependencyLicenses {
@ -51,19 +51,19 @@ dependencyLicenses {
}
test {
/*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty.
*/
systemProperty 'es.set.netty.runtime.available.processors', 'false'
/*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty.
*/
systemProperty 'es.set.netty.runtime.available.processors', 'false'
}
integTestRunner {
/*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty.
*/
systemProperty 'es.set.netty.runtime.available.processors', 'false'
/*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty.
*/
systemProperty 'es.set.netty.runtime.available.processors', 'false'
}
TaskProvider<Test> pooledTest = tasks.register("pooledTest", Test) {
@ -83,126 +83,126 @@ testClusters.pooledIntegTest {
check.dependsOn(pooledTest, pooledIntegTest)
thirdPartyAudit {
ignoreMissingClasses (
// classes are missing
ignoreMissingClasses(
// classes are missing
// from io.netty.handler.codec.protobuf.ProtobufDecoder (netty)
'com.google.protobuf.ExtensionRegistry',
'com.google.protobuf.MessageLite$Builder',
'com.google.protobuf.MessageLite',
'com.google.protobuf.Parser',
// from io.netty.handler.codec.protobuf.ProtobufDecoder (netty)
'com.google.protobuf.ExtensionRegistry',
'com.google.protobuf.MessageLite$Builder',
'com.google.protobuf.MessageLite',
'com.google.protobuf.Parser',
// from io.netty.logging.CommonsLoggerFactory (netty)
'org.apache.commons.logging.Log',
'org.apache.commons.logging.LogFactory',
// from io.netty.logging.CommonsLoggerFactory (netty)
'org.apache.commons.logging.Log',
'org.apache.commons.logging.LogFactory',
// from Log4j (deliberate, Netty will fallback to Log4j 2)
'org.apache.log4j.Level',
'org.apache.log4j.Logger',
// from Log4j (deliberate, Netty will fallback to Log4j 2)
'org.apache.log4j.Level',
'org.apache.log4j.Logger',
// from io.netty.handler.ssl.OpenSslEngine (netty)
'io.netty.internal.tcnative.Buffer',
'io.netty.internal.tcnative.Library',
'io.netty.internal.tcnative.SSL',
'io.netty.internal.tcnative.SSLContext',
'io.netty.internal.tcnative.SSLPrivateKeyMethod',
// from io.netty.handler.ssl.OpenSslEngine (netty)
'io.netty.internal.tcnative.Buffer',
'io.netty.internal.tcnative.Library',
'io.netty.internal.tcnative.SSL',
'io.netty.internal.tcnative.SSLContext',
'io.netty.internal.tcnative.SSLPrivateKeyMethod',
// from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty)
'org.bouncycastle.cert.X509v3CertificateBuilder',
'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter',
'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder',
'org.bouncycastle.jce.provider.BouncyCastleProvider',
'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder',
// from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty)
'org.bouncycastle.cert.X509v3CertificateBuilder',
'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter',
'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder',
'org.bouncycastle.jce.provider.BouncyCastleProvider',
'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder',
// from io.netty.handler.ssl.JettyNpnSslEngine (netty)
'org.eclipse.jetty.npn.NextProtoNego$ClientProvider',
'org.eclipse.jetty.npn.NextProtoNego$ServerProvider',
'org.eclipse.jetty.npn.NextProtoNego',
// from io.netty.handler.ssl.JettyNpnSslEngine (netty)
'org.eclipse.jetty.npn.NextProtoNego$ClientProvider',
'org.eclipse.jetty.npn.NextProtoNego$ServerProvider',
'org.eclipse.jetty.npn.NextProtoNego',
// from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty)
'org.jboss.marshalling.ByteInput',
// from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty)
'org.jboss.marshalling.ByteInput',
// from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty)
'org.jboss.marshalling.ByteOutput',
// from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty)
'org.jboss.marshalling.ByteOutput',
// from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty)
'org.jboss.marshalling.Marshaller',
// from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty)
'org.jboss.marshalling.Marshaller',
// from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty)
'org.jboss.marshalling.MarshallerFactory',
'org.jboss.marshalling.MarshallingConfiguration',
'org.jboss.marshalling.Unmarshaller',
// from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty)
'org.jboss.marshalling.MarshallerFactory',
'org.jboss.marshalling.MarshallingConfiguration',
'org.jboss.marshalling.Unmarshaller',
// from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional
'org.slf4j.helpers.FormattingTuple',
'org.slf4j.helpers.MessageFormatter',
'org.slf4j.Logger',
'org.slf4j.LoggerFactory',
'org.slf4j.spi.LocationAwareLogger',
// from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional
'org.slf4j.helpers.FormattingTuple',
'org.slf4j.helpers.MessageFormatter',
'org.slf4j.Logger',
'org.slf4j.LoggerFactory',
'org.slf4j.spi.LocationAwareLogger',
'com.google.protobuf.ExtensionRegistryLite',
'com.google.protobuf.MessageLiteOrBuilder',
'com.google.protobuf.nano.CodedOutputByteBufferNano',
'com.google.protobuf.nano.MessageNano',
'com.jcraft.jzlib.Deflater',
'com.jcraft.jzlib.Inflater',
'com.jcraft.jzlib.JZlib$WrapperType',
'com.jcraft.jzlib.JZlib',
'com.ning.compress.BufferRecycler',
'com.ning.compress.lzf.ChunkDecoder',
'com.ning.compress.lzf.ChunkEncoder',
'com.ning.compress.lzf.LZFEncoder',
'com.ning.compress.lzf.util.ChunkDecoderFactory',
'com.ning.compress.lzf.util.ChunkEncoderFactory',
'lzma.sdk.lzma.Encoder',
'net.jpountz.lz4.LZ4Compressor',
'net.jpountz.lz4.LZ4Factory',
'net.jpountz.lz4.LZ4FastDecompressor',
'net.jpountz.xxhash.XXHash32',
'net.jpountz.xxhash.XXHashFactory',
'io.netty.internal.tcnative.CertificateCallback',
'io.netty.internal.tcnative.CertificateVerifier',
'io.netty.internal.tcnative.SessionTicketKey',
'io.netty.internal.tcnative.SniHostNameMatcher',
'io.netty.internal.tcnative.SSL',
'org.eclipse.jetty.alpn.ALPN$ClientProvider',
'org.eclipse.jetty.alpn.ALPN$ServerProvider',
'org.eclipse.jetty.alpn.ALPN',
'com.google.protobuf.ExtensionRegistryLite',
'com.google.protobuf.MessageLiteOrBuilder',
'com.google.protobuf.nano.CodedOutputByteBufferNano',
'com.google.protobuf.nano.MessageNano',
'com.jcraft.jzlib.Deflater',
'com.jcraft.jzlib.Inflater',
'com.jcraft.jzlib.JZlib$WrapperType',
'com.jcraft.jzlib.JZlib',
'com.ning.compress.BufferRecycler',
'com.ning.compress.lzf.ChunkDecoder',
'com.ning.compress.lzf.ChunkEncoder',
'com.ning.compress.lzf.LZFEncoder',
'com.ning.compress.lzf.util.ChunkDecoderFactory',
'com.ning.compress.lzf.util.ChunkEncoderFactory',
'lzma.sdk.lzma.Encoder',
'net.jpountz.lz4.LZ4Compressor',
'net.jpountz.lz4.LZ4Factory',
'net.jpountz.lz4.LZ4FastDecompressor',
'net.jpountz.xxhash.XXHash32',
'net.jpountz.xxhash.XXHashFactory',
'io.netty.internal.tcnative.CertificateCallback',
'io.netty.internal.tcnative.CertificateVerifier',
'io.netty.internal.tcnative.SessionTicketKey',
'io.netty.internal.tcnative.SniHostNameMatcher',
'io.netty.internal.tcnative.SSL',
'org.eclipse.jetty.alpn.ALPN$ClientProvider',
'org.eclipse.jetty.alpn.ALPN$ServerProvider',
'org.eclipse.jetty.alpn.ALPN',
'org.conscrypt.AllocatedBuffer',
'org.conscrypt.BufferAllocator',
'org.conscrypt.Conscrypt',
'org.conscrypt.HandshakeListener'
)
'org.conscrypt.AllocatedBuffer',
'org.conscrypt.BufferAllocator',
'org.conscrypt.Conscrypt',
'org.conscrypt.HandshakeListener'
)
ignoreViolations (
'io.netty.util.internal.PlatformDependent0',
'io.netty.util.internal.PlatformDependent0$1',
'io.netty.util.internal.PlatformDependent0$2',
'io.netty.util.internal.PlatformDependent0$3',
'io.netty.util.internal.PlatformDependent0$5',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess',
'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator'
)
ignoreViolations(
'io.netty.util.internal.PlatformDependent0',
'io.netty.util.internal.PlatformDependent0$1',
'io.netty.util.internal.PlatformDependent0$2',
'io.netty.util.internal.PlatformDependent0$3',
'io.netty.util.internal.PlatformDependent0$5',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess',
'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator'
)
}
rootProject.globalInfo.ready {
if (BuildParams.inFipsJvm == false) {
// BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in
// a FIPS JVM with BouncyCastleFIPS Provider
thirdPartyAudit.ignoreMissingClasses(
'org.bouncycastle.asn1.x500.X500Name'
)
}
if (BuildParams.inFipsJvm == false) {
// BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in
// a FIPS JVM with BouncyCastleFIPS Provider
thirdPartyAudit.ignoreMissingClasses(
'org.bouncycastle.asn1.x500.X500Name'
)
}
}

View File

@ -34,7 +34,7 @@ dependencyLicenses {
mapping from: /morfologik-.*/, to: 'lucene'
}
thirdPartyAudit.ignoreMissingClasses (
thirdPartyAudit.ignoreMissingClasses(
// we don't use the morfologik-fsa polish stemmer
'morfologik.stemming.polish.PolishStemmer'
)

View File

@ -17,8 +17,8 @@
* under the License.
*/
subprojects {
apply plugin: 'elasticsearch.testclusters'
subprojects {
apply plugin: 'elasticsearch.testclusters'
}
// only configure immediate children of plugins dir
@ -29,7 +29,7 @@ configure(subprojects.findAll { it.parent.path == project.path }) {
esplugin {
// for local ES plugins, the name of the plugin is the same as the directory
name project.name
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt')
}

View File

@ -45,12 +45,12 @@ dependencies {
compile "com.sun.jersey:jersey-client:${versions.jersey}"
compile "com.sun.jersey:jersey-core:${versions.jersey}"
compile "com.sun.jersey:jersey-json:${versions.jersey}"
compile 'org.codehaus.jettison:jettison:1.1'
compile 'org.codehaus.jettison:jettison:1.1'
compile 'com.sun.xml.bind:jaxb-impl:2.2.3-1'
compile 'org.codehaus.jackson:jackson-core-asl:1.9.2'
compile 'org.codehaus.jackson:jackson-mapper-asl:1.9.2'
compile 'org.codehaus.jackson:jackson-jaxrs:1.9.2'
compile 'org.codehaus.jackson:jackson-xc:1.9.2'
compile 'org.codehaus.jackson:jackson-xc:1.9.2'
// HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here,
// and whitelist this hack in JarHell
@ -73,14 +73,14 @@ task createKey(type: LoggedExec) {
executable = "${BuildParams.runtimeJavaHome}/bin/keytool"
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
args '-genkey',
'-alias', 'test-node',
'-keystore', keystore,
'-keyalg', 'RSA',
'-keysize', '2048',
'-validity', '712',
'-dname', 'CN=' + host,
'-keypass', 'keypass',
'-storepass', 'keypass'
'-alias', 'test-node',
'-keystore', keystore,
'-keyalg', 'RSA',
'-keysize', '2048',
'-validity', '712',
'-dname', 'CN=' + host,
'-keypass', 'keypass',
'-storepass', 'keypass'
}
// add keystore to test classpath: it expects it there
@ -101,38 +101,38 @@ dependencyLicenses {
mapping from: /jaxb-.*/, to: 'jaxb'
}
thirdPartyAudit.ignoreMissingClasses (
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger',
'org.eclipse.persistence.descriptors.ClassDescriptor',
'org.eclipse.persistence.internal.oxm.MappingNodeValue',
'org.eclipse.persistence.internal.oxm.TreeObjectBuilder',
'org.eclipse.persistence.internal.oxm.XPathFragment',
'org.eclipse.persistence.internal.oxm.XPathNode',
'org.eclipse.persistence.internal.queries.ContainerPolicy',
'org.eclipse.persistence.jaxb.JAXBContext',
'org.eclipse.persistence.jaxb.JAXBHelper',
'org.eclipse.persistence.mappings.DatabaseMapping',
'org.eclipse.persistence.mappings.converters.TypeConversionConverter',
'org.eclipse.persistence.mappings.foundation.AbstractCompositeDirectCollectionMapping',
'org.eclipse.persistence.oxm.XMLContext',
'org.eclipse.persistence.oxm.XMLDescriptor',
'org.eclipse.persistence.oxm.XMLField',
'org.eclipse.persistence.oxm.mappings.XMLCompositeCollectionMapping',
'org.eclipse.persistence.sessions.DatabaseSession',
'org.jvnet.fastinfoset.VocabularyApplicationData',
'org.jvnet.staxex.Base64Data',
'org.jvnet.staxex.XMLStreamReaderEx',
'org.jvnet.staxex.XMLStreamWriterEx',
'org.osgi.framework.Bundle',
'org.osgi.framework.BundleActivator',
'org.osgi.framework.BundleContext',
'org.osgi.framework.BundleEvent',
thirdPartyAudit.ignoreMissingClasses(
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger',
'org.eclipse.persistence.descriptors.ClassDescriptor',
'org.eclipse.persistence.internal.oxm.MappingNodeValue',
'org.eclipse.persistence.internal.oxm.TreeObjectBuilder',
'org.eclipse.persistence.internal.oxm.XPathFragment',
'org.eclipse.persistence.internal.oxm.XPathNode',
'org.eclipse.persistence.internal.queries.ContainerPolicy',
'org.eclipse.persistence.jaxb.JAXBContext',
'org.eclipse.persistence.jaxb.JAXBHelper',
'org.eclipse.persistence.mappings.DatabaseMapping',
'org.eclipse.persistence.mappings.converters.TypeConversionConverter',
'org.eclipse.persistence.mappings.foundation.AbstractCompositeDirectCollectionMapping',
'org.eclipse.persistence.oxm.XMLContext',
'org.eclipse.persistence.oxm.XMLDescriptor',
'org.eclipse.persistence.oxm.XMLField',
'org.eclipse.persistence.oxm.mappings.XMLCompositeCollectionMapping',
'org.eclipse.persistence.sessions.DatabaseSession',
'org.jvnet.fastinfoset.VocabularyApplicationData',
'org.jvnet.staxex.Base64Data',
'org.jvnet.staxex.XMLStreamReaderEx',
'org.jvnet.staxex.XMLStreamWriterEx',
'org.osgi.framework.Bundle',
'org.osgi.framework.BundleActivator',
'org.osgi.framework.BundleContext',
'org.osgi.framework.BundleEvent',
'org.osgi.framework.SynchronousBundleListener',
'com.sun.xml.fastinfoset.stax.StAXDocumentParser',
'com.sun.xml.fastinfoset.stax.StAXDocumentParser',
'com.sun.xml.fastinfoset.stax.StAXDocumentSerializer'
)
@ -140,118 +140,118 @@ thirdPartyAudit.ignoreMissingClasses (
rootProject.globalInfo.ready {
if (BuildParams.runtimeJavaVersion <= JavaVersion.VERSION_1_8) {
thirdPartyAudit.ignoreJarHellWithJDK(
'javax.xml.bind.Binder',
'javax.xml.bind.ContextFinder$1',
'javax.xml.bind.ContextFinder',
'javax.xml.bind.DataBindingException',
'javax.xml.bind.DatatypeConverter',
'javax.xml.bind.DatatypeConverterImpl$CalendarFormatter',
'javax.xml.bind.DatatypeConverterImpl',
'javax.xml.bind.DatatypeConverterInterface',
'javax.xml.bind.Element',
'javax.xml.bind.GetPropertyAction',
'javax.xml.bind.JAXB$Cache',
'javax.xml.bind.JAXB',
'javax.xml.bind.JAXBContext',
'javax.xml.bind.JAXBElement$GlobalScope',
'javax.xml.bind.JAXBElement',
'javax.xml.bind.JAXBException',
'javax.xml.bind.JAXBIntrospector',
'javax.xml.bind.JAXBPermission',
'javax.xml.bind.MarshalException',
'javax.xml.bind.Marshaller$Listener',
'javax.xml.bind.Marshaller',
'javax.xml.bind.Messages',
'javax.xml.bind.NotIdentifiableEvent',
'javax.xml.bind.ParseConversionEvent',
'javax.xml.bind.PrintConversionEvent',
'javax.xml.bind.PropertyException',
'javax.xml.bind.SchemaOutputResolver',
'javax.xml.bind.TypeConstraintException',
'javax.xml.bind.UnmarshalException',
'javax.xml.bind.Unmarshaller$Listener',
'javax.xml.bind.Unmarshaller',
'javax.xml.bind.UnmarshallerHandler',
'javax.xml.bind.ValidationEvent',
'javax.xml.bind.ValidationEventHandler',
'javax.xml.bind.ValidationEventLocator',
'javax.xml.bind.ValidationException',
'javax.xml.bind.Validator',
'javax.xml.bind.WhiteSpaceProcessor',
'javax.xml.bind.annotation.DomHandler',
'javax.xml.bind.annotation.W3CDomHandler',
'javax.xml.bind.annotation.XmlAccessOrder',
'javax.xml.bind.annotation.XmlAccessType',
'javax.xml.bind.annotation.XmlAccessorOrder',
'javax.xml.bind.annotation.XmlAccessorType',
'javax.xml.bind.annotation.XmlAnyAttribute',
'javax.xml.bind.annotation.XmlAnyElement',
'javax.xml.bind.annotation.XmlAttachmentRef',
'javax.xml.bind.annotation.XmlAttribute',
'javax.xml.bind.annotation.XmlElement$DEFAULT',
'javax.xml.bind.annotation.XmlElement',
'javax.xml.bind.annotation.XmlElementDecl$GLOBAL',
'javax.xml.bind.annotation.XmlElementDecl',
'javax.xml.bind.annotation.XmlElementRef$DEFAULT',
'javax.xml.bind.annotation.XmlElementRef',
'javax.xml.bind.annotation.XmlElementRefs',
'javax.xml.bind.annotation.XmlElementWrapper',
'javax.xml.bind.annotation.XmlElements',
'javax.xml.bind.annotation.XmlEnum',
'javax.xml.bind.annotation.XmlEnumValue',
'javax.xml.bind.annotation.XmlID',
'javax.xml.bind.annotation.XmlIDREF',
'javax.xml.bind.annotation.XmlInlineBinaryData',
'javax.xml.bind.annotation.XmlList',
'javax.xml.bind.annotation.XmlMimeType',
'javax.xml.bind.annotation.XmlMixed',
'javax.xml.bind.annotation.XmlNs',
'javax.xml.bind.annotation.XmlNsForm',
'javax.xml.bind.annotation.XmlRegistry',
'javax.xml.bind.annotation.XmlRootElement',
'javax.xml.bind.annotation.XmlSchema',
'javax.xml.bind.annotation.XmlSchemaType$DEFAULT',
'javax.xml.bind.annotation.XmlSchemaType',
'javax.xml.bind.annotation.XmlSchemaTypes',
'javax.xml.bind.annotation.XmlSeeAlso',
'javax.xml.bind.annotation.XmlTransient',
'javax.xml.bind.annotation.XmlType$DEFAULT',
'javax.xml.bind.annotation.XmlType',
'javax.xml.bind.annotation.XmlValue',
'javax.xml.bind.annotation.adapters.CollapsedStringAdapter',
'javax.xml.bind.annotation.adapters.HexBinaryAdapter',
'javax.xml.bind.annotation.adapters.NormalizedStringAdapter',
'javax.xml.bind.annotation.adapters.XmlAdapter',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters',
'javax.xml.bind.attachment.AttachmentMarshaller',
'javax.xml.bind.attachment.AttachmentUnmarshaller',
'javax.xml.bind.helpers.AbstractMarshallerImpl',
'javax.xml.bind.helpers.AbstractUnmarshallerImpl',
'javax.xml.bind.helpers.DefaultValidationEventHandler',
'javax.xml.bind.helpers.Messages',
'javax.xml.bind.helpers.NotIdentifiableEventImpl',
'javax.xml.bind.helpers.ParseConversionEventImpl',
'javax.xml.bind.helpers.PrintConversionEventImpl',
'javax.xml.bind.helpers.ValidationEventImpl',
'javax.xml.bind.helpers.ValidationEventLocatorImpl',
'javax.xml.bind.util.JAXBResult',
'javax.xml.bind.util.JAXBSource$1',
'javax.xml.bind.util.JAXBSource',
'javax.xml.bind.util.Messages',
'javax.xml.bind.util.ValidationEventCollector'
'javax.xml.bind.Binder',
'javax.xml.bind.ContextFinder$1',
'javax.xml.bind.ContextFinder',
'javax.xml.bind.DataBindingException',
'javax.xml.bind.DatatypeConverter',
'javax.xml.bind.DatatypeConverterImpl$CalendarFormatter',
'javax.xml.bind.DatatypeConverterImpl',
'javax.xml.bind.DatatypeConverterInterface',
'javax.xml.bind.Element',
'javax.xml.bind.GetPropertyAction',
'javax.xml.bind.JAXB$Cache',
'javax.xml.bind.JAXB',
'javax.xml.bind.JAXBContext',
'javax.xml.bind.JAXBElement$GlobalScope',
'javax.xml.bind.JAXBElement',
'javax.xml.bind.JAXBException',
'javax.xml.bind.JAXBIntrospector',
'javax.xml.bind.JAXBPermission',
'javax.xml.bind.MarshalException',
'javax.xml.bind.Marshaller$Listener',
'javax.xml.bind.Marshaller',
'javax.xml.bind.Messages',
'javax.xml.bind.NotIdentifiableEvent',
'javax.xml.bind.ParseConversionEvent',
'javax.xml.bind.PrintConversionEvent',
'javax.xml.bind.PropertyException',
'javax.xml.bind.SchemaOutputResolver',
'javax.xml.bind.TypeConstraintException',
'javax.xml.bind.UnmarshalException',
'javax.xml.bind.Unmarshaller$Listener',
'javax.xml.bind.Unmarshaller',
'javax.xml.bind.UnmarshallerHandler',
'javax.xml.bind.ValidationEvent',
'javax.xml.bind.ValidationEventHandler',
'javax.xml.bind.ValidationEventLocator',
'javax.xml.bind.ValidationException',
'javax.xml.bind.Validator',
'javax.xml.bind.WhiteSpaceProcessor',
'javax.xml.bind.annotation.DomHandler',
'javax.xml.bind.annotation.W3CDomHandler',
'javax.xml.bind.annotation.XmlAccessOrder',
'javax.xml.bind.annotation.XmlAccessType',
'javax.xml.bind.annotation.XmlAccessorOrder',
'javax.xml.bind.annotation.XmlAccessorType',
'javax.xml.bind.annotation.XmlAnyAttribute',
'javax.xml.bind.annotation.XmlAnyElement',
'javax.xml.bind.annotation.XmlAttachmentRef',
'javax.xml.bind.annotation.XmlAttribute',
'javax.xml.bind.annotation.XmlElement$DEFAULT',
'javax.xml.bind.annotation.XmlElement',
'javax.xml.bind.annotation.XmlElementDecl$GLOBAL',
'javax.xml.bind.annotation.XmlElementDecl',
'javax.xml.bind.annotation.XmlElementRef$DEFAULT',
'javax.xml.bind.annotation.XmlElementRef',
'javax.xml.bind.annotation.XmlElementRefs',
'javax.xml.bind.annotation.XmlElementWrapper',
'javax.xml.bind.annotation.XmlElements',
'javax.xml.bind.annotation.XmlEnum',
'javax.xml.bind.annotation.XmlEnumValue',
'javax.xml.bind.annotation.XmlID',
'javax.xml.bind.annotation.XmlIDREF',
'javax.xml.bind.annotation.XmlInlineBinaryData',
'javax.xml.bind.annotation.XmlList',
'javax.xml.bind.annotation.XmlMimeType',
'javax.xml.bind.annotation.XmlMixed',
'javax.xml.bind.annotation.XmlNs',
'javax.xml.bind.annotation.XmlNsForm',
'javax.xml.bind.annotation.XmlRegistry',
'javax.xml.bind.annotation.XmlRootElement',
'javax.xml.bind.annotation.XmlSchema',
'javax.xml.bind.annotation.XmlSchemaType$DEFAULT',
'javax.xml.bind.annotation.XmlSchemaType',
'javax.xml.bind.annotation.XmlSchemaTypes',
'javax.xml.bind.annotation.XmlSeeAlso',
'javax.xml.bind.annotation.XmlTransient',
'javax.xml.bind.annotation.XmlType$DEFAULT',
'javax.xml.bind.annotation.XmlType',
'javax.xml.bind.annotation.XmlValue',
'javax.xml.bind.annotation.adapters.CollapsedStringAdapter',
'javax.xml.bind.annotation.adapters.HexBinaryAdapter',
'javax.xml.bind.annotation.adapters.NormalizedStringAdapter',
'javax.xml.bind.annotation.adapters.XmlAdapter',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters',
'javax.xml.bind.attachment.AttachmentMarshaller',
'javax.xml.bind.attachment.AttachmentUnmarshaller',
'javax.xml.bind.helpers.AbstractMarshallerImpl',
'javax.xml.bind.helpers.AbstractUnmarshallerImpl',
'javax.xml.bind.helpers.DefaultValidationEventHandler',
'javax.xml.bind.helpers.Messages',
'javax.xml.bind.helpers.NotIdentifiableEventImpl',
'javax.xml.bind.helpers.ParseConversionEventImpl',
'javax.xml.bind.helpers.PrintConversionEventImpl',
'javax.xml.bind.helpers.ValidationEventImpl',
'javax.xml.bind.helpers.ValidationEventLocatorImpl',
'javax.xml.bind.util.JAXBResult',
'javax.xml.bind.util.JAXBSource$1',
'javax.xml.bind.util.JAXBSource',
'javax.xml.bind.util.Messages',
'javax.xml.bind.util.ValidationEventCollector'
)
} else {
thirdPartyAudit.ignoreMissingClasses(
'javax.activation.ActivationDataFlavor',
'javax.activation.DataContentHandler',
'javax.activation.DataHandler',
'javax.activation.DataSource',
'javax.activation.FileDataSource',
'javax.activation.FileTypeMap',
'javax.activation.MimeType',
'javax.activation.MimeTypeParseException',
'javax.activation.ActivationDataFlavor',
'javax.activation.DataContentHandler',
'javax.activation.DataHandler',
'javax.activation.DataSource',
'javax.activation.FileDataSource',
'javax.activation.FileTypeMap',
'javax.activation.MimeType',
'javax.activation.MimeTypeParseException',
)
}
}
}

View File

@ -59,11 +59,11 @@ task writeTestJavaPolicy {
}
final File javaPolicy = file("${tmp}/java.policy")
javaPolicy.write(
[
"grant {",
" permission java.util.PropertyPermission \"com.amazonaws.sdk.ec2MetadataServiceEndpointOverride\", \"write\";",
"};"
].join("\n"))
[
"grant {",
" permission java.util.PropertyPermission \"com.amazonaws.sdk.ec2MetadataServiceEndpointOverride\", \"write\";",
"};"
].join("\n"))
}
}
@ -71,7 +71,7 @@ test {
dependsOn writeTestJavaPolicy
// this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name
// this is needed to manipulate com.amazonaws.sdk.ec2MetadataServiceEndpointOverride system property
// it is better rather disable security manager at all with `systemProperty 'tests.security.manager', 'false'`
systemProperty 'java.security.policy', "file://${buildDir}/tmp/java.policy"
@ -82,7 +82,7 @@ check {
dependsOn 'qa:amazon-ec2:check'
}
thirdPartyAudit.ignoreMissingClasses (
thirdPartyAudit.ignoreMissingClasses(
// classes are missing
'com.amazonaws.jmespath.JmesPathEvaluationVisitor',
'com.amazonaws.jmespath.JmesPathExpression',
@ -104,18 +104,18 @@ thirdPartyAudit.ignoreMissingClasses (
'software.amazon.ion.system.IonSystemBuilder',
'software.amazon.ion.system.IonTextWriterBuilder',
'software.amazon.ion.system.IonWriterBuilder',
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger'
)
rootProject.globalInfo.ready {
if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
thirdPartyAudit.ignoreMissingClasses(
'javax.xml.bind.DatatypeConverter',
'javax.xml.bind.JAXBContext'
'javax.xml.bind.DatatypeConverter',
'javax.xml.bind.JAXBContext'
)
}
}

View File

@ -29,18 +29,18 @@ apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
dependencies {
testCompile project(path: ':plugins:discovery-ec2', configuration: 'runtime')
testCompile project(path: ':plugins:discovery-ec2', configuration: 'runtime')
}
final int ec2NumberOfNodes = 3
Map<String, Object> expansions = [
'expected_nodes': ec2NumberOfNodes
'expected_nodes': ec2NumberOfNodes
]
processTestResources {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
}
// disable default test task, use spezialized ones below
@ -58,55 +58,55 @@ integTest.enabled = false
* custom Java security policy to work.
*/
['KeyStore', 'EnvVariables', 'SystemProperties', 'ContainerCredentials', 'InstanceProfile'].forEach { action ->
AntFixture fixture = tasks.create(name: "ec2Fixture${action}", type: AntFixture) {
dependsOn compileTestJava
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }"
executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/integTest${action}-1/config/unicast_hosts.txt"
}
AntFixture fixture = tasks.create(name: "ec2Fixture${action}", type: AntFixture) {
dependsOn compileTestJava
env 'CLASSPATH', "${-> project.sourceSets.test.runtimeClasspath.asPath}"
executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/integTest${action}-1/config/unicast_hosts.txt"
}
tasks.create(name: "integTest${action}", type: RestIntegTestTask) {
dependsOn fixture, project(':plugins:discovery-ec2').bundlePlugin
}
tasks.create(name: "integTest${action}", type: RestIntegTestTask) {
dependsOn fixture, project(':plugins:discovery-ec2').bundlePlugin
}
check.dependsOn("integTest${action}")
check.dependsOn("integTest${action}")
testClusters."integTest${action}" {
numberOfNodes = ec2NumberOfNodes
plugin file(project(':plugins:discovery-ec2').bundlePlugin.archiveFile)
testClusters."integTest${action}" {
numberOfNodes = ec2NumberOfNodes
plugin file(project(':plugins:discovery-ec2').bundlePlugin.archiveFile)
setting 'discovery.seed_providers', 'ec2'
setting 'network.host', '_ec2_'
setting 'discovery.ec2.endpoint', { "http://${-> fixture.addressAndPort}" }, IGNORE_VALUE
setting 'discovery.seed_providers', 'ec2'
setting 'network.host', '_ec2_'
setting 'discovery.ec2.endpoint', { "http://${-> fixture.addressAndPort}" }, IGNORE_VALUE
systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.addressAndPort}" }, IGNORE_VALUE
}
systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.addressAndPort}" }, IGNORE_VALUE
}
}
// Extra config for KeyStore
testClusters.integTestKeyStore {
keystore 'discovery.ec2.access_key', 'ec2_integration_test_access_key'
keystore 'discovery.ec2.secret_key', 'ec2_integration_test_secret_key'
keystore 'discovery.ec2.access_key', 'ec2_integration_test_access_key'
keystore 'discovery.ec2.secret_key', 'ec2_integration_test_secret_key'
}
// Extra config for EnvVariables
testClusters.integTestEnvVariables {
environment 'AWS_ACCESS_KEY_ID', 'ec2_integration_test_access_key'
environment 'AWS_SECRET_ACCESS_KEY', 'ec2_integration_test_secret_key'
environment 'AWS_ACCESS_KEY_ID', 'ec2_integration_test_access_key'
environment 'AWS_SECRET_ACCESS_KEY', 'ec2_integration_test_secret_key'
}
// Extra config for SystemProperties
testClusters.integTestSystemProperties {
systemProperty 'aws.accessKeyId', 'ec2_integration_test_access_key'
systemProperty 'aws.secretKey', 'ec2_integration_test_secret_key'
systemProperty 'aws.accessKeyId', 'ec2_integration_test_access_key'
systemProperty 'aws.secretKey', 'ec2_integration_test_secret_key'
}
// Extra config for ContainerCredentials
ec2FixtureContainerCredentials.env 'ACTIVATE_CONTAINER_CREDENTIALS', true
testClusters.integTestContainerCredentials {
environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI',
{ "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" }, IGNORE_VALUE
environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI',
{ "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" }, IGNORE_VALUE
}
// Extra config for InstanceProfile

View File

@ -32,16 +32,16 @@ check {
test {
// this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name
systemProperty 'tests.artifact', project.name
}
thirdPartyAudit.ignoreMissingClasses (
thirdPartyAudit.ignoreMissingClasses(
// classes are missing
'com.google.common.base.Splitter',
'com.google.common.collect.Lists',
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'com.google.common.base.Splitter',
'com.google.common.collect.Lists',
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger'
)

View File

@ -30,41 +30,41 @@ apply plugin: 'elasticsearch.rest-test'
final int gceNumberOfNodes = 3
dependencies {
testCompile project(path: ':plugins:discovery-gce', configuration: 'runtime')
testCompile project(path: ':plugins:discovery-gce', configuration: 'runtime')
}
/** A task to start the GCEFixture which emulates a GCE service **/
task gceFixture(type: AntFixture) {
dependsOn compileTestJava
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }"
executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.cloud.gce.GCEFixture', baseDir, "${buildDir}/testclusters/integTest-1/config/unicast_hosts.txt"
dependsOn compileTestJava
env 'CLASSPATH', "${-> project.sourceSets.test.runtimeClasspath.asPath}"
executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.cloud.gce.GCEFixture', baseDir, "${buildDir}/testclusters/integTest-1/config/unicast_hosts.txt"
}
Map<String, Object> expansions = [
'expected_nodes': gceNumberOfNodes
'expected_nodes': gceNumberOfNodes
]
processTestResources {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
}
integTest {
dependsOn gceFixture, project(':plugins:discovery-gce').bundlePlugin
dependsOn gceFixture, project(':plugins:discovery-gce').bundlePlugin
}
testClusters.integTest {
numberOfNodes = gceNumberOfNodes
plugin file(project(':plugins:discovery-gce').bundlePlugin.archiveFile)
// use gce fixture for Auth calls instead of http://metadata.google.internal
environment 'GCE_METADATA_HOST', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
// allows to configure hidden settings (`cloud.gce.host` and `cloud.gce.root_url`)
systemProperty 'es.allow_reroute_gce_settings', 'true'
numberOfNodes = gceNumberOfNodes
plugin file(project(':plugins:discovery-gce').bundlePlugin.archiveFile)
// use gce fixture for Auth calls instead of http://metadata.google.internal
environment 'GCE_METADATA_HOST', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
// allows to configure hidden settings (`cloud.gce.host` and `cloud.gce.root_url`)
systemProperty 'es.allow_reroute_gce_settings', 'true'
setting 'discovery.seed_providers', 'gce'
// use gce fixture for metadata server calls instead of http://metadata.google.internal
setting 'cloud.gce.host', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
// use gce fixture for API calls instead of https://www.googleapis.com
setting 'cloud.gce.root_url', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
setting 'discovery.seed_providers', 'gce'
// use gce fixture for metadata server calls instead of http://metadata.google.internal
setting 'cloud.gce.host', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
// use gce fixture for API calls instead of https://www.googleapis.com
setting 'cloud.gce.root_url', { "http://${gceFixture.addressAndPort}" }, IGNORE_VALUE
}

View File

@ -9,20 +9,20 @@ gradle.projectsEvaluated {
}
configure(project('painless-whitelist')) {
configurations.all {
resolutionStrategy.dependencySubstitution {
substitute module('org.elasticsearch.plugin:elasticsearch-scripting-painless-spi') with project(':modules:lang-painless:spi')
substitute module('org.elasticsearch.test:logger-usage') with project(':test:logger-usage')
}
configurations.all {
resolutionStrategy.dependencySubstitution {
substitute module('org.elasticsearch.plugin:elasticsearch-scripting-painless-spi') with project(':modules:lang-painless:spi')
substitute module('org.elasticsearch.test:logger-usage') with project(':test:logger-usage')
}
}
}
configure(project('security-authorization-engine')) {
configurations.all {
resolutionStrategy.dependencySubstitution {
substitute module('org.elasticsearch.plugin:x-pack-core') with project(':x-pack:plugin:core')
substitute module('org.elasticsearch.client:x-pack-transport') with project(':x-pack:transport-client')
substitute module('org.elasticsearch.test:logger-usage') with project(':test:logger-usage')
}
configurations.all {
resolutionStrategy.dependencySubstitution {
substitute module('org.elasticsearch.plugin:x-pack-core') with project(':x-pack:plugin:core')
substitute module('org.elasticsearch.client:x-pack-transport') with project(':x-pack:transport-client')
substitute module('org.elasticsearch.test:logger-usage') with project(':test:logger-usage')
}
}
}
}

View File

@ -20,15 +20,15 @@ apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.esplugin'
esplugin {
name 'custom-suggester'
description 'An example plugin showing how to write and register a custom suggester'
classname 'org.elasticsearch.example.customsuggester.CustomSuggesterPlugin'
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt')
name 'custom-suggester'
description 'An example plugin showing how to write and register a custom suggester'
classname 'org.elasticsearch.example.customsuggester.CustomSuggesterPlugin'
licenseFile rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
noticeFile rootProject.file('NOTICE.txt')
}
testClusters.integTest {
numberOfNodes = 2
numberOfNodes = 2
}
// this plugin has no unit tests, only rest tests

View File

@ -34,7 +34,7 @@ test.enabled = false
task exampleFixture(type: org.elasticsearch.gradle.test.AntFixture) {
dependsOn testClasses
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }"
env 'CLASSPATH', "${-> project.sourceSets.test.runtimeClasspath.asPath}"
executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.example.resthandler.ExampleFixture', baseDir, 'TEST'
}
@ -42,7 +42,7 @@ task exampleFixture(type: org.elasticsearch.gradle.test.AntFixture) {
integTest {
dependsOn exampleFixture
runner {
nonInputProperties.systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }"
nonInputProperties.systemProperty 'external.address', "${-> exampleFixture.addressAndPort}"
}
}
@ -50,4 +50,4 @@ testingConventions.naming {
IT {
baseClass 'org.elasticsearch.test.ESTestCase'
}
}
}

View File

@ -84,12 +84,12 @@ forbiddenPatterns {
exclude '**/*.vsdx'
}
thirdPartyAudit{
ignoreMissingClasses()
thirdPartyAudit {
ignoreMissingClasses()
}
thirdPartyAudit.onlyIf {
// FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit,
// rather than provide a long list of exclusions, disable the check on FIPS.
BuildParams.inFipsJvm == false
}
}

View File

@ -41,26 +41,26 @@ dependencyLicenses {
}
thirdPartyAudit {
ignoreMissingClasses (
// Optional and not enabled by Elasticsearch
'org.slf4j.Logger',
'org.slf4j.LoggerFactory'
)
ignoreMissingClasses(
// Optional and not enabled by Elasticsearch
'org.slf4j.Logger',
'org.slf4j.LoggerFactory'
)
ignoreViolations (
// uses internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1'
)
ignoreViolations(
// uses internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1'
)
}
check {
@ -69,10 +69,10 @@ check {
}
testClusters {
integTest {
keystore 'azure.client.integration_test.account', 'azure_account'
keystore 'azure.client.integration_test.key', 'azure_key'
}
integTest {
keystore 'azure.client.integration_test.account', 'azure_account'
keystore 'azure.client.integration_test.key', 'azure_key'
}
}
String azureAccount = System.getenv("azure_storage_account")

View File

@ -40,49 +40,49 @@ String azureBasePath = System.getenv("azure_storage_base_path")
String azureSasToken = System.getenv("azure_storage_sas_token")
if (!azureAccount && !azureKey && !azureContainer && !azureBasePath && !azureSasToken) {
azureAccount = 'azure_integration_test_account'
azureKey = 'YXp1cmVfaW50ZWdyYXRpb25fdGVzdF9rZXk=' // The key is "azure_integration_test_key" encoded using base64
azureContainer = 'container'
azureBasePath = ''
azureSasToken = ''
useFixture = true
azureAccount = 'azure_integration_test_account'
azureKey = 'YXp1cmVfaW50ZWdyYXRpb25fdGVzdF9rZXk=' // The key is "azure_integration_test_key" encoded using base64
azureContainer = 'container'
azureBasePath = ''
azureSasToken = ''
useFixture = true
}
Map<String, Object> expansions = [
'container': azureContainer,
'base_path': azureBasePath + "_integration_tests"
'container': azureContainer,
'base_path': azureBasePath + "_integration_tests"
]
processTestResources {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
}
integTest {
dependsOn project(':plugins:repository-azure').bundlePlugin
}
dependsOn project(':plugins:repository-azure').bundlePlugin
}
testClusters.integTest {
plugin file(project(':plugins:repository-azure').bundlePlugin.archiveFile)
keystore 'azure.client.integration_test.account', azureAccount
if (azureKey != null && azureKey.isEmpty() == false) {
keystore 'azure.client.integration_test.key', azureKey
}
if (azureSasToken != null && azureSasToken.isEmpty() == false) {
keystore 'azure.client.integration_test.sas_token', azureSasToken
}
plugin file(project(':plugins:repository-azure').bundlePlugin.archiveFile)
keystore 'azure.client.integration_test.account', azureAccount
if (azureKey != null && azureKey.isEmpty() == false) {
keystore 'azure.client.integration_test.key', azureKey
}
if (azureSasToken != null && azureSasToken.isEmpty() == false) {
keystore 'azure.client.integration_test.sas_token', azureSasToken
}
if (useFixture) {
def azureAddress = {
int ephemeralPort = project(':test:fixtures:azure-fixture').postProcessFixture.ext."test.fixtures.azure-fixture.tcp.8091"
assert ephemeralPort > 0
'http://127.0.0.1:' + ephemeralPort
}
// Use a closure on the string to delay evaluation until tests are executed. The endpoint_suffix is used
// in a hacky way to change the protocol and endpoint. We must fix that.
setting 'azure.client.integration_test.endpoint_suffix',
{ "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=${ -> azureAddress() }" }, IGNORE_VALUE
String firstPartOfSeed = BuildParams.testSeed.tokenize(':').get(0)
setting 'thread_pool.repository_azure.max', (Math.abs(Long.parseUnsignedLong(firstPartOfSeed, 16) % 10) + 1).toString(), System.getProperty('ignore.tests.seed') == null ? DEFAULT : IGNORE_VALUE
if (useFixture) {
def azureAddress = {
int ephemeralPort = project(':test:fixtures:azure-fixture').postProcessFixture.ext."test.fixtures.azure-fixture.tcp.8091"
assert ephemeralPort > 0
'http://127.0.0.1:' + ephemeralPort
}
// Use a closure on the string to delay evaluation until tests are executed. The endpoint_suffix is used
// in a hacky way to change the protocol and endpoint. We must fix that.
setting 'azure.client.integration_test.endpoint_suffix',
{ "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=${-> azureAddress()}" }, IGNORE_VALUE
String firstPartOfSeed = BuildParams.testSeed.tokenize(':').get(0)
setting 'thread_pool.repository_azure.max', (Math.abs(Long.parseUnsignedLong(firstPartOfSeed, 16) % 10) + 1).toString(), System.getProperty('ignore.tests.seed') == null ? DEFAULT : IGNORE_VALUE
}
}

View File

@ -69,57 +69,57 @@ dependencyLicenses {
}
thirdPartyAudit {
ignoreViolations (
// uses internal java api: sun.misc.Unsafe
'com.google.protobuf.UnsafeUtil',
'com.google.protobuf.UnsafeUtil$1',
'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor',
'com.google.protobuf.UnsafeUtil$MemoryAccessor',
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.hash.Striped64',
'com.google.common.hash.Striped64$1',
'com.google.common.hash.Striped64$Cell',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
)
ignoreViolations(
// uses internal java api: sun.misc.Unsafe
'com.google.protobuf.UnsafeUtil',
'com.google.protobuf.UnsafeUtil$1',
'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor',
'com.google.protobuf.UnsafeUtil$MemoryAccessor',
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.hash.Striped64',
'com.google.common.hash.Striped64$1',
'com.google.common.hash.Striped64$Cell',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
)
ignoreMissingClasses (
'com.google.appengine.api.datastore.Blob',
'com.google.appengine.api.datastore.DatastoreService',
'com.google.appengine.api.datastore.DatastoreServiceFactory',
'com.google.appengine.api.datastore.Entity',
'com.google.appengine.api.datastore.Key',
'com.google.appengine.api.datastore.KeyFactory',
'com.google.appengine.api.datastore.PreparedQuery',
'com.google.appengine.api.datastore.Query',
'com.google.appengine.api.memcache.Expiration',
'com.google.appengine.api.memcache.MemcacheService',
'com.google.appengine.api.memcache.MemcacheServiceFactory',
'com.google.appengine.api.urlfetch.FetchOptions$Builder',
'com.google.appengine.api.urlfetch.FetchOptions',
'com.google.appengine.api.urlfetch.HTTPHeader',
'com.google.appengine.api.urlfetch.HTTPMethod',
'com.google.appengine.api.urlfetch.HTTPRequest',
'com.google.appengine.api.urlfetch.HTTPResponse',
'com.google.appengine.api.urlfetch.URLFetchService',
'com.google.appengine.api.urlfetch.URLFetchServiceFactory',
// commons-logging optional dependencies
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger',
// commons-logging provided dependencies
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener'
)
ignoreMissingClasses(
'com.google.appengine.api.datastore.Blob',
'com.google.appengine.api.datastore.DatastoreService',
'com.google.appengine.api.datastore.DatastoreServiceFactory',
'com.google.appengine.api.datastore.Entity',
'com.google.appengine.api.datastore.Key',
'com.google.appengine.api.datastore.KeyFactory',
'com.google.appengine.api.datastore.PreparedQuery',
'com.google.appengine.api.datastore.Query',
'com.google.appengine.api.memcache.Expiration',
'com.google.appengine.api.memcache.MemcacheService',
'com.google.appengine.api.memcache.MemcacheServiceFactory',
'com.google.appengine.api.urlfetch.FetchOptions$Builder',
'com.google.appengine.api.urlfetch.FetchOptions',
'com.google.appengine.api.urlfetch.HTTPHeader',
'com.google.appengine.api.urlfetch.HTTPMethod',
'com.google.appengine.api.urlfetch.HTTPRequest',
'com.google.appengine.api.urlfetch.HTTPResponse',
'com.google.appengine.api.urlfetch.URLFetchService',
'com.google.appengine.api.urlfetch.URLFetchServiceFactory',
// commons-logging optional dependencies
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger',
// commons-logging provided dependencies
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener'
)
}
check {

View File

@ -33,7 +33,7 @@ apply plugin: 'elasticsearch.test.fixtures'
// TODO think about flattening qa:google-cloud-storage project into parent
dependencies {
testCompile project(path: ':plugins:repository-gcs')
testCompile project(path: ':plugins:repository-gcs')
}
testFixtures.useFixture(':test:fixtures:gcs-fixture')
@ -45,100 +45,100 @@ String gcsBasePath = System.getenv("google_storage_base_path")
File serviceAccountFile = null
if (!gcsServiceAccount && !gcsBucket && !gcsBasePath) {
serviceAccountFile = new File(project.buildDir, 'generated-resources/service_account_test.json')
gcsBucket = 'bucket'
gcsBasePath = 'integration_test'
useFixture = true
serviceAccountFile = new File(project.buildDir, 'generated-resources/service_account_test.json')
gcsBucket = 'bucket'
gcsBasePath = 'integration_test'
useFixture = true
} else if (!gcsServiceAccount || !gcsBucket || !gcsBasePath) {
throw new IllegalArgumentException("not all options specified to run tests against external GCS service are present")
throw new IllegalArgumentException("not all options specified to run tests against external GCS service are present")
} else {
serviceAccountFile = new File(gcsServiceAccount)
serviceAccountFile = new File(gcsServiceAccount)
}
def encodedCredentials = {
Base64.encoder.encodeToString(Files.readAllBytes(serviceAccountFile.toPath()))
Base64.encoder.encodeToString(Files.readAllBytes(serviceAccountFile.toPath()))
}
def fixtureAddress = { fixture ->
assert useFixture : 'closure should not be used without a fixture'
int ephemeralPort = project(':test:fixtures:gcs-fixture').postProcessFixture.ext."test.fixtures.${fixture}.tcp.80"
assert ephemeralPort > 0
'http://127.0.0.1:' + ephemeralPort
assert useFixture: 'closure should not be used without a fixture'
int ephemeralPort = project(':test:fixtures:gcs-fixture').postProcessFixture.ext."test.fixtures.${fixture}.tcp.80"
assert ephemeralPort > 0
'http://127.0.0.1:' + ephemeralPort
}
/** A service account file that points to the Google Cloud Storage service emulated by the fixture **/
task createServiceAccountFile() {
doLast {
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA")
keyPairGenerator.initialize(1024)
KeyPair keyPair = keyPairGenerator.generateKeyPair()
String encodedKey = Base64.getEncoder().encodeToString(keyPair.private.getEncoded())
doLast {
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA")
keyPairGenerator.initialize(1024)
KeyPair keyPair = keyPairGenerator.generateKeyPair()
String encodedKey = Base64.getEncoder().encodeToString(keyPair.private.getEncoded())
serviceAccountFile.parentFile.mkdirs()
serviceAccountFile.setText("{\n" +
' "type": "service_account",\n' +
' "project_id": "integration_test",\n' +
' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' +
' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' +
' "client_email": "integration_test@appspot.gserviceaccount.com",\n' +
' "client_id": "123456789101112130594"\n' +
'}', 'UTF-8')
}
serviceAccountFile.parentFile.mkdirs()
serviceAccountFile.setText("{\n" +
' "type": "service_account",\n' +
' "project_id": "integration_test",\n' +
' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' +
' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' +
' "client_email": "integration_test@appspot.gserviceaccount.com",\n' +
' "client_id": "123456789101112130594"\n' +
'}', 'UTF-8')
}
}
task thirdPartyTest (type: Test) {
if (useFixture) {
thirdPartyTest.dependsOn createServiceAccountFile
nonInputProperties.systemProperty 'test.google.endpoint', "${ -> fixtureAddress('gcs-fixture-third-party') }"
nonInputProperties.systemProperty 'test.google.tokenURI', "${ -> fixtureAddress('gcs-fixture-third-party') }/o/oauth2/token"
task thirdPartyTest(type: Test) {
if (useFixture) {
thirdPartyTest.dependsOn createServiceAccountFile
nonInputProperties.systemProperty 'test.google.endpoint', "${-> fixtureAddress('gcs-fixture-third-party')}"
nonInputProperties.systemProperty 'test.google.tokenURI', "${-> fixtureAddress('gcs-fixture-third-party')}/o/oauth2/token"
gradle.taskGraph.whenReady {
if (it.hasTask(gcsThirdPartyTests)) {
throw new IllegalStateException("Tried to run third party tests but not all of the necessary environment variables " +
"'google_storage_service_account', 'google_storage_bucket', 'google_storage_base_path' are set.")
}
}
gradle.taskGraph.whenReady {
if (it.hasTask(gcsThirdPartyTests)) {
throw new IllegalStateException("Tried to run third party tests but not all of the necessary environment variables " +
"'google_storage_service_account', 'google_storage_bucket', 'google_storage_base_path' are set.")
}
}
}
include '**/GoogleCloudStorageThirdPartyTests.class'
systemProperty 'tests.security.manager', false
systemProperty 'test.google.bucket', gcsBucket
systemProperty 'test.google.base', gcsBasePath + "_third_party_tests_" + BuildParams.testSeed
nonInputProperties.systemProperty 'test.google.account', "${ -> encodedCredentials.call() }"
include '**/GoogleCloudStorageThirdPartyTests.class'
systemProperty 'tests.security.manager', false
systemProperty 'test.google.bucket', gcsBucket
systemProperty 'test.google.base', gcsBasePath + "_third_party_tests_" + BuildParams.testSeed
nonInputProperties.systemProperty 'test.google.account', "${-> encodedCredentials.call()}"
}
task gcsThirdPartyTests {
dependsOn check
dependsOn check
}
integTest.mustRunAfter(thirdPartyTest)
check.dependsOn thirdPartyTest
Map<String, Object> expansions = [
'bucket': gcsBucket,
'base_path': gcsBasePath + "_integration_tests"
'bucket': gcsBucket,
'base_path': gcsBasePath + "_integration_tests"
]
processTestResources {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
}
integTest {
dependsOn project(':plugins:repository-gcs').bundlePlugin
dependsOn project(':plugins:repository-gcs').bundlePlugin
}
testClusters.integTest {
plugin file(project(':plugins:repository-gcs').bundlePlugin.archiveFile)
plugin file(project(':plugins:repository-gcs').bundlePlugin.archiveFile)
keystore 'gcs.client.integration_test.credentials_file', serviceAccountFile, IGNORE_VALUE
keystore 'gcs.client.integration_test.credentials_file', serviceAccountFile, IGNORE_VALUE
if (useFixture) {
tasks.integTest.dependsOn createServiceAccountFile
/* Use a closure on the string to delay evaluation until tests are executed */
setting 'gcs.client.integration_test.endpoint', { "${ -> fixtureAddress('gcs-fixture') }" }, IGNORE_VALUE
setting 'gcs.client.integration_test.token_uri', { "${ -> fixtureAddress('gcs-fixture') }/o/oauth2/token" }, IGNORE_VALUE
} else {
println "Using an external service to test the repository-gcs plugin"
}
if (useFixture) {
tasks.integTest.dependsOn createServiceAccountFile
/* Use a closure on the string to delay evaluation until tests are executed */
setting 'gcs.client.integration_test.endpoint', { "${-> fixtureAddress('gcs-fixture')}" }, IGNORE_VALUE
setting 'gcs.client.integration_test.token_uri', { "${-> fixtureAddress('gcs-fixture')}/o/oauth2/token" }, IGNORE_VALUE
} else {
println "Using an external service to test the repository-gcs plugin"
}
}

View File

@ -70,17 +70,17 @@ dependencies {
// Set the keytab files in the classpath so that we can access them from test code without the security manager
// freaking out.
if (isEclipse == false) {
testRuntime files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab").parent)
testRuntime files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab").parent)
}
}
normalization {
runtimeClasspath {
// ignore generated keytab files for the purposes of build avoidance
ignore '*.keytab'
// ignore fixture ports file which is on the classpath primarily to pacify the security manager
ignore '*HdfsFixture/**'
}
runtimeClasspath {
// ignore generated keytab files for the purposes of build avoidance
ignore '*.keytab'
// ignore fixture ports file which is on the classpath primarily to pacify the security manager
ignore '*HdfsFixture/**'
}
}
dependencyLicenses {
@ -95,7 +95,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) {
dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture
executable = "${BuildParams.runtimeJavaHome}/bin/java"
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }"
env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
maxWaitInSeconds 60
onlyIf { project(':test:fixtures:krb5kdc-fixture').buildFixture.enabled && BuildParams.inFipsJvm == false }
waitCondition = { fixture, ant ->
@ -127,7 +127,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
miniHDFSArgs.add(
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
)
}
@ -159,15 +159,15 @@ for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSec
if (integTestTaskName.contains("Ha")) {
if (integTestTaskName.contains("Secure")) {
Path path = buildDir.toPath()
.resolve("fixtures")
.resolve("secureHaHdfsFixture")
.resolve("ports")
.resolve("fixtures")
.resolve("secureHaHdfsFixture")
.resolve("ports")
nonInputProperties.systemProperty "test.hdfs-fixture.ports", path
} else {
Path path = buildDir.toPath()
.resolve("fixtures")
.resolve("haHdfsFixture")
.resolve("ports")
.resolve("fixtures")
.resolve("haHdfsFixture")
.resolve("ports")
nonInputProperties.systemProperty "test.hdfs-fixture.ports", path
}
classpath += files("$buildDir/fixtures")
@ -175,13 +175,13 @@ for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSec
if (integTestTaskName.contains("Secure")) {
if (disabledIntegTestTaskNames.contains(integTestTaskName) == false) {
nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
nonInputProperties.systemProperty (
"test.krb5.keytab.hdfs",
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab")
)
nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
nonInputProperties.systemProperty(
"test.krb5.keytab.hdfs",
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
)
}
}
}
@ -192,8 +192,8 @@ for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSec
if (integTestTaskName.contains("Secure")) {
systemProperty "java.security.krb5.conf", krb5conf
extraConfigFile(
"repository-hdfs/krb5.keytab",
file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
"repository-hdfs/krb5.keytab",
file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
)
}
}
@ -207,9 +207,9 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
if (nativePath != null) {
Path path = Paths.get(nativePath);
if (Files.isDirectory(path) &&
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
fixtureSupported = true
} else {
throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
@ -279,27 +279,27 @@ integTestSecureHa.runner {
}
thirdPartyAudit {
ignoreMissingClasses()
ignoreViolations (
// internal java api: sun.net.dns.ResolverConfiguration
// internal java api: sun.net.util.IPAddressUtil
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
ignoreMissingClasses()
ignoreViolations(
// internal java api: sun.net.dns.ResolverConfiguration
// internal java api: sun.net.util.IPAddressUtil
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
// internal java api: sun.misc.Unsafe
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
'org.apache.hadoop.io.nativeio.NativeIO',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
// internal java api: sun.misc.Unsafe
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
'org.apache.hadoop.io.nativeio.NativeIO',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
// internal java api: sun.nio.ch.DirectBuffer
// internal java api: sun.misc.Cleaner
'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
'org.apache.hadoop.crypto.CryptoStreamUtils',
// internal java api: sun.misc.SignalHandler
'org.apache.hadoop.util.SignalLogger$Handler',
)
// internal java api: sun.nio.ch.DirectBuffer
// internal java api: sun.misc.Cleaner
'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
'org.apache.hadoop.crypto.CryptoStreamUtils',
// internal java api: sun.misc.SignalHandler
'org.apache.hadoop.util.SignalLogger$Handler',
)
}

View File

@ -167,7 +167,7 @@ if (useFixture) {
thirdPartyTest {
dependsOn tasks.bundlePlugin
nonInputProperties.systemProperty 'test.s3.endpoint', "${ -> minioAddress.call() }"
nonInputProperties.systemProperty 'test.s3.endpoint', "${-> minioAddress.call()}"
}
task integTestMinio(type: RestIntegTestTask) {
@ -176,9 +176,9 @@ if (useFixture) {
runner {
// Minio only supports a single access key, see https://github.com/minio/minio/pull/5968
systemProperty 'tests.rest.blacklist', [
'repository_s3/30_repository_temporary_credentials/*',
'repository_s3/40_repository_ec2_credentials/*',
'repository_s3/50_repository_ecs_credentials/*'
'repository_s3/30_repository_temporary_credentials/*',
'repository_s3/40_repository_ec2_credentials/*',
'repository_s3/50_repository_ecs_credentials/*'
].join(",")
}
}
@ -197,11 +197,11 @@ if (useFixture) {
} else {
integTest.runner {
systemProperty 'tests.rest.blacklist',
[
'repository_s3/30_repository_temporary_credentials/*',
'repository_s3/40_repository_ec2_credentials/*',
'repository_s3/50_repository_ecs_credentials/*'
].join(",")
[
'repository_s3/30_repository_temporary_credentials/*',
'repository_s3/40_repository_ec2_credentials/*',
'repository_s3/50_repository_ecs_credentials/*'
].join(",")
}
}
@ -213,15 +213,15 @@ File s3FixtureFile = new File(parentFixtures, 's3Fixture.properties')
task s3FixtureProperties {
outputs.file(s3FixtureFile)
def s3FixtureOptions = [
"tests.seed" : BuildParams.testSeed,
"s3Fixture.permanent_bucket_name" : s3PermanentBucket,
"s3Fixture.permanent_key" : s3PermanentAccessKey,
"s3Fixture.temporary_bucket_name" : s3TemporaryBucket,
"s3Fixture.temporary_key" : s3TemporaryAccessKey,
"s3Fixture.temporary_session_token": s3TemporarySessionToken,
"s3Fixture.ec2_bucket_name" : s3EC2Bucket,
"s3Fixture.ecs_bucket_name" : s3ECSBucket,
"s3Fixture.disableChunkedEncoding" : s3DisableChunkedEncoding
"tests.seed": BuildParams.testSeed,
"s3Fixture.permanent_bucket_name": s3PermanentBucket,
"s3Fixture.permanent_key": s3PermanentAccessKey,
"s3Fixture.temporary_bucket_name": s3TemporaryBucket,
"s3Fixture.temporary_key": s3TemporaryAccessKey,
"s3Fixture.temporary_session_token": s3TemporarySessionToken,
"s3Fixture.ec2_bucket_name": s3EC2Bucket,
"s3Fixture.ecs_bucket_name": s3ECSBucket,
"s3Fixture.disableChunkedEncoding": s3DisableChunkedEncoding
]
doLast {
@ -235,22 +235,22 @@ task s3Fixture(type: AntFixture) {
dependsOn s3FixtureProperties
inputs.file(s3FixtureFile)
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }"
env 'CLASSPATH', "${-> project.sourceSets.test.runtimeClasspath.asPath}"
executable = "${BuildParams.runtimeJavaHome}/bin/java"
args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3FixtureFile.getAbsolutePath()
}
processTestResources {
Map<String, Object> expansions = [
'permanent_bucket': s3PermanentBucket,
'permanent_base_path': s3PermanentBasePath + "_integration_tests",
'temporary_bucket': s3TemporaryBucket,
'temporary_base_path': s3TemporaryBasePath + "_integration_tests",
'ec2_bucket': s3EC2Bucket,
'ec2_base_path': s3EC2BasePath,
'ecs_bucket': s3ECSBucket,
'ecs_base_path': s3ECSBasePath,
'disable_chunked_encoding': s3DisableChunkedEncoding,
'permanent_bucket': s3PermanentBucket,
'permanent_base_path': s3PermanentBasePath + "_integration_tests",
'temporary_bucket': s3TemporaryBucket,
'temporary_base_path': s3TemporaryBasePath + "_integration_tests",
'ec2_bucket': s3EC2Bucket,
'ec2_base_path': s3EC2BasePath,
'ecs_bucket': s3ECSBucket,
'ecs_base_path': s3ECSBasePath,
'disable_chunked_encoding': s3DisableChunkedEncoding,
]
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
@ -290,10 +290,10 @@ if (useFixture) {
dependsOn(project.s3Fixture)
runner {
systemProperty 'tests.rest.blacklist', [
'repository_s3/10_basic/*',
'repository_s3/20_repository_permanent_credentials/*',
'repository_s3/30_repository_temporary_credentials/*',
'repository_s3/40_repository_ec2_credentials/*'
'repository_s3/10_basic/*',
'repository_s3/20_repository_permanent_credentials/*',
'repository_s3/30_repository_temporary_credentials/*',
'repository_s3/40_repository_ec2_credentials/*'
].join(",")
}
}
@ -313,12 +313,12 @@ if (useFixture) {
}
}
thirdPartyAudit.ignoreMissingClasses (
thirdPartyAudit.ignoreMissingClasses(
// classes are missing
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener',
'org.apache.avalon.framework.logger.Logger',
'org.apache.log.Hierarchy',
'org.apache.log.Logger',
'software.amazon.ion.IonReader',
'software.amazon.ion.IonSystem',
@ -344,107 +344,107 @@ thirdPartyAudit.ignoreMissingClasses (
rootProject.globalInfo.ready {
if (BuildParams.runtimeJavaVersion <= JavaVersion.VERSION_1_8) {
thirdPartyAudit.ignoreJarHellWithJDK(
'javax.xml.bind.Binder',
'javax.xml.bind.ContextFinder$1',
'javax.xml.bind.ContextFinder',
'javax.xml.bind.DataBindingException',
'javax.xml.bind.DatatypeConverter',
'javax.xml.bind.DatatypeConverterImpl$CalendarFormatter',
'javax.xml.bind.DatatypeConverterImpl',
'javax.xml.bind.DatatypeConverterInterface',
'javax.xml.bind.Element',
'javax.xml.bind.GetPropertyAction',
'javax.xml.bind.JAXB$Cache',
'javax.xml.bind.JAXB',
'javax.xml.bind.JAXBContext',
'javax.xml.bind.JAXBElement$GlobalScope',
'javax.xml.bind.JAXBElement',
'javax.xml.bind.JAXBException',
'javax.xml.bind.JAXBIntrospector',
'javax.xml.bind.JAXBPermission',
'javax.xml.bind.MarshalException',
'javax.xml.bind.Marshaller$Listener',
'javax.xml.bind.Marshaller',
'javax.xml.bind.Messages',
'javax.xml.bind.NotIdentifiableEvent',
'javax.xml.bind.ParseConversionEvent',
'javax.xml.bind.PrintConversionEvent',
'javax.xml.bind.PropertyException',
'javax.xml.bind.SchemaOutputResolver',
'javax.xml.bind.TypeConstraintException',
'javax.xml.bind.UnmarshalException',
'javax.xml.bind.Unmarshaller$Listener',
'javax.xml.bind.Unmarshaller',
'javax.xml.bind.UnmarshallerHandler',
'javax.xml.bind.ValidationEvent',
'javax.xml.bind.ValidationEventHandler',
'javax.xml.bind.ValidationEventLocator',
'javax.xml.bind.ValidationException',
'javax.xml.bind.Validator',
'javax.xml.bind.WhiteSpaceProcessor',
'javax.xml.bind.annotation.DomHandler',
'javax.xml.bind.annotation.W3CDomHandler',
'javax.xml.bind.annotation.XmlAccessOrder',
'javax.xml.bind.annotation.XmlAccessType',
'javax.xml.bind.annotation.XmlAccessorOrder',
'javax.xml.bind.annotation.XmlAccessorType',
'javax.xml.bind.annotation.XmlAnyAttribute',
'javax.xml.bind.annotation.XmlAnyElement',
'javax.xml.bind.annotation.XmlAttachmentRef',
'javax.xml.bind.annotation.XmlAttribute',
'javax.xml.bind.annotation.XmlElement$DEFAULT',
'javax.xml.bind.annotation.XmlElement',
'javax.xml.bind.annotation.XmlElementDecl$GLOBAL',
'javax.xml.bind.annotation.XmlElementDecl',
'javax.xml.bind.annotation.XmlElementRef$DEFAULT',
'javax.xml.bind.annotation.XmlElementRef',
'javax.xml.bind.annotation.XmlElementRefs',
'javax.xml.bind.annotation.XmlElementWrapper',
'javax.xml.bind.annotation.XmlElements',
'javax.xml.bind.annotation.XmlEnum',
'javax.xml.bind.annotation.XmlEnumValue',
'javax.xml.bind.annotation.XmlID',
'javax.xml.bind.annotation.XmlIDREF',
'javax.xml.bind.annotation.XmlInlineBinaryData',
'javax.xml.bind.annotation.XmlList',
'javax.xml.bind.annotation.XmlMimeType',
'javax.xml.bind.annotation.XmlMixed',
'javax.xml.bind.annotation.XmlNs',
'javax.xml.bind.annotation.XmlNsForm',
'javax.xml.bind.annotation.XmlRegistry',
'javax.xml.bind.annotation.XmlRootElement',
'javax.xml.bind.annotation.XmlSchema',
'javax.xml.bind.annotation.XmlSchemaType$DEFAULT',
'javax.xml.bind.annotation.XmlSchemaType',
'javax.xml.bind.annotation.XmlSchemaTypes',
'javax.xml.bind.annotation.XmlSeeAlso',
'javax.xml.bind.annotation.XmlTransient',
'javax.xml.bind.annotation.XmlType$DEFAULT',
'javax.xml.bind.annotation.XmlType',
'javax.xml.bind.annotation.XmlValue',
'javax.xml.bind.annotation.adapters.CollapsedStringAdapter',
'javax.xml.bind.annotation.adapters.HexBinaryAdapter',
'javax.xml.bind.annotation.adapters.NormalizedStringAdapter',
'javax.xml.bind.annotation.adapters.XmlAdapter',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters',
'javax.xml.bind.attachment.AttachmentMarshaller',
'javax.xml.bind.attachment.AttachmentUnmarshaller',
'javax.xml.bind.helpers.AbstractMarshallerImpl',
'javax.xml.bind.helpers.AbstractUnmarshallerImpl',
'javax.xml.bind.helpers.DefaultValidationEventHandler',
'javax.xml.bind.helpers.Messages',
'javax.xml.bind.helpers.NotIdentifiableEventImpl',
'javax.xml.bind.helpers.ParseConversionEventImpl',
'javax.xml.bind.helpers.PrintConversionEventImpl',
'javax.xml.bind.helpers.ValidationEventImpl',
'javax.xml.bind.helpers.ValidationEventLocatorImpl',
'javax.xml.bind.util.JAXBResult',
'javax.xml.bind.util.JAXBSource$1',
'javax.xml.bind.util.JAXBSource',
'javax.xml.bind.util.Messages',
'javax.xml.bind.util.ValidationEventCollector'
'javax.xml.bind.Binder',
'javax.xml.bind.ContextFinder$1',
'javax.xml.bind.ContextFinder',
'javax.xml.bind.DataBindingException',
'javax.xml.bind.DatatypeConverter',
'javax.xml.bind.DatatypeConverterImpl$CalendarFormatter',
'javax.xml.bind.DatatypeConverterImpl',
'javax.xml.bind.DatatypeConverterInterface',
'javax.xml.bind.Element',
'javax.xml.bind.GetPropertyAction',
'javax.xml.bind.JAXB$Cache',
'javax.xml.bind.JAXB',
'javax.xml.bind.JAXBContext',
'javax.xml.bind.JAXBElement$GlobalScope',
'javax.xml.bind.JAXBElement',
'javax.xml.bind.JAXBException',
'javax.xml.bind.JAXBIntrospector',
'javax.xml.bind.JAXBPermission',
'javax.xml.bind.MarshalException',
'javax.xml.bind.Marshaller$Listener',
'javax.xml.bind.Marshaller',
'javax.xml.bind.Messages',
'javax.xml.bind.NotIdentifiableEvent',
'javax.xml.bind.ParseConversionEvent',
'javax.xml.bind.PrintConversionEvent',
'javax.xml.bind.PropertyException',
'javax.xml.bind.SchemaOutputResolver',
'javax.xml.bind.TypeConstraintException',
'javax.xml.bind.UnmarshalException',
'javax.xml.bind.Unmarshaller$Listener',
'javax.xml.bind.Unmarshaller',
'javax.xml.bind.UnmarshallerHandler',
'javax.xml.bind.ValidationEvent',
'javax.xml.bind.ValidationEventHandler',
'javax.xml.bind.ValidationEventLocator',
'javax.xml.bind.ValidationException',
'javax.xml.bind.Validator',
'javax.xml.bind.WhiteSpaceProcessor',
'javax.xml.bind.annotation.DomHandler',
'javax.xml.bind.annotation.W3CDomHandler',
'javax.xml.bind.annotation.XmlAccessOrder',
'javax.xml.bind.annotation.XmlAccessType',
'javax.xml.bind.annotation.XmlAccessorOrder',
'javax.xml.bind.annotation.XmlAccessorType',
'javax.xml.bind.annotation.XmlAnyAttribute',
'javax.xml.bind.annotation.XmlAnyElement',
'javax.xml.bind.annotation.XmlAttachmentRef',
'javax.xml.bind.annotation.XmlAttribute',
'javax.xml.bind.annotation.XmlElement$DEFAULT',
'javax.xml.bind.annotation.XmlElement',
'javax.xml.bind.annotation.XmlElementDecl$GLOBAL',
'javax.xml.bind.annotation.XmlElementDecl',
'javax.xml.bind.annotation.XmlElementRef$DEFAULT',
'javax.xml.bind.annotation.XmlElementRef',
'javax.xml.bind.annotation.XmlElementRefs',
'javax.xml.bind.annotation.XmlElementWrapper',
'javax.xml.bind.annotation.XmlElements',
'javax.xml.bind.annotation.XmlEnum',
'javax.xml.bind.annotation.XmlEnumValue',
'javax.xml.bind.annotation.XmlID',
'javax.xml.bind.annotation.XmlIDREF',
'javax.xml.bind.annotation.XmlInlineBinaryData',
'javax.xml.bind.annotation.XmlList',
'javax.xml.bind.annotation.XmlMimeType',
'javax.xml.bind.annotation.XmlMixed',
'javax.xml.bind.annotation.XmlNs',
'javax.xml.bind.annotation.XmlNsForm',
'javax.xml.bind.annotation.XmlRegistry',
'javax.xml.bind.annotation.XmlRootElement',
'javax.xml.bind.annotation.XmlSchema',
'javax.xml.bind.annotation.XmlSchemaType$DEFAULT',
'javax.xml.bind.annotation.XmlSchemaType',
'javax.xml.bind.annotation.XmlSchemaTypes',
'javax.xml.bind.annotation.XmlSeeAlso',
'javax.xml.bind.annotation.XmlTransient',
'javax.xml.bind.annotation.XmlType$DEFAULT',
'javax.xml.bind.annotation.XmlType',
'javax.xml.bind.annotation.XmlValue',
'javax.xml.bind.annotation.adapters.CollapsedStringAdapter',
'javax.xml.bind.annotation.adapters.HexBinaryAdapter',
'javax.xml.bind.annotation.adapters.NormalizedStringAdapter',
'javax.xml.bind.annotation.adapters.XmlAdapter',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter',
'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters',
'javax.xml.bind.attachment.AttachmentMarshaller',
'javax.xml.bind.attachment.AttachmentUnmarshaller',
'javax.xml.bind.helpers.AbstractMarshallerImpl',
'javax.xml.bind.helpers.AbstractUnmarshallerImpl',
'javax.xml.bind.helpers.DefaultValidationEventHandler',
'javax.xml.bind.helpers.Messages',
'javax.xml.bind.helpers.NotIdentifiableEventImpl',
'javax.xml.bind.helpers.ParseConversionEventImpl',
'javax.xml.bind.helpers.PrintConversionEventImpl',
'javax.xml.bind.helpers.ValidationEventImpl',
'javax.xml.bind.helpers.ValidationEventLocatorImpl',
'javax.xml.bind.util.JAXBResult',
'javax.xml.bind.util.JAXBSource$1',
'javax.xml.bind.util.JAXBSource',
'javax.xml.bind.util.Messages',
'javax.xml.bind.util.ValidationEventCollector'
)
} else {
thirdPartyAudit.ignoreMissingClasses 'javax.activation.DataHandler'

View File

@ -21,146 +21,146 @@ import org.elasticsearch.gradle.info.BuildParams
apply plugin: "nebula.maven-scm"
esplugin {
description 'The nio transport.'
classname 'org.elasticsearch.transport.nio.NioTransportPlugin'
hasClientJar = true
description 'The nio transport.'
classname 'org.elasticsearch.transport.nio.NioTransportPlugin'
hasClientJar = true
}
dependencies {
compile project(':libs:elasticsearch-nio')
compile project(':libs:elasticsearch-nio')
// network stack
compile "io.netty:netty-buffer:${versions.netty}"
compile "io.netty:netty-codec:${versions.netty}"
compile "io.netty:netty-codec-http:${versions.netty}"
compile "io.netty:netty-common:${versions.netty}"
compile "io.netty:netty-handler:${versions.netty}"
compile "io.netty:netty-resolver:${versions.netty}"
compile "io.netty:netty-transport:${versions.netty}"
// network stack
compile "io.netty:netty-buffer:${versions.netty}"
compile "io.netty:netty-codec:${versions.netty}"
compile "io.netty:netty-codec-http:${versions.netty}"
compile "io.netty:netty-common:${versions.netty}"
compile "io.netty:netty-handler:${versions.netty}"
compile "io.netty:netty-resolver:${versions.netty}"
compile "io.netty:netty-transport:${versions.netty}"
}
dependencyLicenses {
mapping from: /netty-.*/, to: 'netty'
mapping from: /netty-.*/, to: 'netty'
}
thirdPartyAudit {
ignoreMissingClasses (
// from io.netty.handler.codec.protobuf.ProtobufDecoder (netty)
'com.google.protobuf.ExtensionRegistry',
'com.google.protobuf.MessageLite$Builder',
'com.google.protobuf.MessageLite',
'com.google.protobuf.Parser',
ignoreMissingClasses(
// from io.netty.handler.codec.protobuf.ProtobufDecoder (netty)
'com.google.protobuf.ExtensionRegistry',
'com.google.protobuf.MessageLite$Builder',
'com.google.protobuf.MessageLite',
'com.google.protobuf.Parser',
// from io.netty.logging.CommonsLoggerFactory (netty)
'org.apache.commons.logging.Log',
'org.apache.commons.logging.LogFactory',
// from io.netty.logging.CommonsLoggerFactory (netty)
'org.apache.commons.logging.Log',
'org.apache.commons.logging.LogFactory',
// from Log4j (deliberate, Netty will fallback to Log4j 2)
'org.apache.log4j.Level',
'org.apache.log4j.Logger',
// from Log4j (deliberate, Netty will fallback to Log4j 2)
'org.apache.log4j.Level',
'org.apache.log4j.Logger',
// from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty)
'org.bouncycastle.cert.X509v3CertificateBuilder',
'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter',
'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder',
'org.bouncycastle.jce.provider.BouncyCastleProvider',
'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder',
// from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty)
'org.bouncycastle.cert.X509v3CertificateBuilder',
'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter',
'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder',
'org.bouncycastle.jce.provider.BouncyCastleProvider',
'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder',
// from io.netty.handler.ssl.JettyNpnSslEngine (netty)
'org.eclipse.jetty.npn.NextProtoNego$ClientProvider',
'org.eclipse.jetty.npn.NextProtoNego$ServerProvider',
'org.eclipse.jetty.npn.NextProtoNego',
// from io.netty.handler.ssl.JettyNpnSslEngine (netty)
'org.eclipse.jetty.npn.NextProtoNego$ClientProvider',
'org.eclipse.jetty.npn.NextProtoNego$ServerProvider',
'org.eclipse.jetty.npn.NextProtoNego',
// from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty)
'org.jboss.marshalling.ByteInput',
// from io.netty.handler.codec.marshalling.ChannelBufferByteInput (netty)
'org.jboss.marshalling.ByteInput',
// from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty)
'org.jboss.marshalling.ByteOutput',
// from io.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty)
'org.jboss.marshalling.ByteOutput',
// from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty)
'org.jboss.marshalling.Marshaller',
// from io.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty)
'org.jboss.marshalling.Marshaller',
// from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty)
'org.jboss.marshalling.MarshallerFactory',
'org.jboss.marshalling.MarshallingConfiguration',
'org.jboss.marshalling.Unmarshaller',
// from io.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty)
'org.jboss.marshalling.MarshallerFactory',
'org.jboss.marshalling.MarshallingConfiguration',
'org.jboss.marshalling.Unmarshaller',
// from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional
'org.slf4j.helpers.FormattingTuple',
'org.slf4j.helpers.MessageFormatter',
'org.slf4j.Logger',
'org.slf4j.LoggerFactory',
'org.slf4j.spi.LocationAwareLogger',
// from io.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional
'org.slf4j.helpers.FormattingTuple',
'org.slf4j.helpers.MessageFormatter',
'org.slf4j.Logger',
'org.slf4j.LoggerFactory',
'org.slf4j.spi.LocationAwareLogger',
'com.google.protobuf.ExtensionRegistryLite',
'com.google.protobuf.MessageLiteOrBuilder',
'com.google.protobuf.nano.CodedOutputByteBufferNano',
'com.google.protobuf.nano.MessageNano',
'com.jcraft.jzlib.Deflater',
'com.jcraft.jzlib.Inflater',
'com.jcraft.jzlib.JZlib$WrapperType',
'com.jcraft.jzlib.JZlib',
'com.ning.compress.BufferRecycler',
'com.ning.compress.lzf.ChunkDecoder',
'com.ning.compress.lzf.ChunkEncoder',
'com.ning.compress.lzf.LZFEncoder',
'com.ning.compress.lzf.util.ChunkDecoderFactory',
'com.ning.compress.lzf.util.ChunkEncoderFactory',
'lzma.sdk.lzma.Encoder',
'net.jpountz.lz4.LZ4Compressor',
'net.jpountz.lz4.LZ4Factory',
'net.jpountz.lz4.LZ4FastDecompressor',
'net.jpountz.xxhash.XXHash32',
'net.jpountz.xxhash.XXHashFactory',
'org.eclipse.jetty.alpn.ALPN$ClientProvider',
'org.eclipse.jetty.alpn.ALPN$ServerProvider',
'org.eclipse.jetty.alpn.ALPN',
'com.google.protobuf.ExtensionRegistryLite',
'com.google.protobuf.MessageLiteOrBuilder',
'com.google.protobuf.nano.CodedOutputByteBufferNano',
'com.google.protobuf.nano.MessageNano',
'com.jcraft.jzlib.Deflater',
'com.jcraft.jzlib.Inflater',
'com.jcraft.jzlib.JZlib$WrapperType',
'com.jcraft.jzlib.JZlib',
'com.ning.compress.BufferRecycler',
'com.ning.compress.lzf.ChunkDecoder',
'com.ning.compress.lzf.ChunkEncoder',
'com.ning.compress.lzf.LZFEncoder',
'com.ning.compress.lzf.util.ChunkDecoderFactory',
'com.ning.compress.lzf.util.ChunkEncoderFactory',
'lzma.sdk.lzma.Encoder',
'net.jpountz.lz4.LZ4Compressor',
'net.jpountz.lz4.LZ4Factory',
'net.jpountz.lz4.LZ4FastDecompressor',
'net.jpountz.xxhash.XXHash32',
'net.jpountz.xxhash.XXHashFactory',
'org.eclipse.jetty.alpn.ALPN$ClientProvider',
'org.eclipse.jetty.alpn.ALPN$ServerProvider',
'org.eclipse.jetty.alpn.ALPN',
'org.conscrypt.AllocatedBuffer',
'org.conscrypt.BufferAllocator',
'org.conscrypt.Conscrypt',
'org.conscrypt.HandshakeListener',
'org.conscrypt.AllocatedBuffer',
'org.conscrypt.BufferAllocator',
'org.conscrypt.Conscrypt',
'org.conscrypt.HandshakeListener',
// from io.netty.handler.ssl.OpenSslEngine (netty)
'io.netty.internal.tcnative.Buffer',
'io.netty.internal.tcnative.Library',
'io.netty.internal.tcnative.SSL',
'io.netty.internal.tcnative.SSLContext',
'io.netty.internal.tcnative.SSLPrivateKeyMethod',
'io.netty.internal.tcnative.CertificateCallback',
'io.netty.internal.tcnative.CertificateVerifier',
'io.netty.internal.tcnative.SessionTicketKey',
'io.netty.internal.tcnative.SniHostNameMatcher',
)
// from io.netty.handler.ssl.OpenSslEngine (netty)
'io.netty.internal.tcnative.Buffer',
'io.netty.internal.tcnative.Library',
'io.netty.internal.tcnative.SSL',
'io.netty.internal.tcnative.SSLContext',
'io.netty.internal.tcnative.SSLPrivateKeyMethod',
'io.netty.internal.tcnative.CertificateCallback',
'io.netty.internal.tcnative.CertificateVerifier',
'io.netty.internal.tcnative.SessionTicketKey',
'io.netty.internal.tcnative.SniHostNameMatcher',
)
ignoreViolations (
ignoreViolations(
'io.netty.util.internal.PlatformDependent0',
'io.netty.util.internal.PlatformDependent0$1',
'io.netty.util.internal.PlatformDependent0$2',
'io.netty.util.internal.PlatformDependent0$3',
'io.netty.util.internal.PlatformDependent0$5',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess',
'io.netty.util.internal.PlatformDependent0',
'io.netty.util.internal.PlatformDependent0$1',
'io.netty.util.internal.PlatformDependent0$2',
'io.netty.util.internal.PlatformDependent0$3',
'io.netty.util.internal.PlatformDependent0$5',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess',
'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator'
)
'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator'
)
}
rootProject.globalInfo.ready {
if (BuildParams.inFipsJvm == false) {
// BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in
// a FIPS JVM with BouncyCastleFIPS Provider
thirdPartyAudit.ignoreMissingClasses(
'org.bouncycastle.asn1.x500.X500Name'
)
}
if (BuildParams.inFipsJvm == false) {
// BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in
// a FIPS JVM with BouncyCastleFIPS Provider
thirdPartyAudit.ignoreMissingClasses(
'org.bouncycastle.asn1.x500.X500Name'
)
}
}

View File

@ -1,4 +1,3 @@
import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.testclusters.TestClustersPlugin

View File

@ -22,5 +22,5 @@ apply plugin: 'elasticsearch.rest-test'
apply plugin: 'elasticsearch.test-with-dependencies'
dependencies {
testCompile project(":client:rest-high-level")
testCompile project(":client:rest-high-level")
}

View File

@ -23,19 +23,19 @@ apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.esplugin'
esplugin {
description 'Die with dignity plugin'
classname 'org.elasticsearch.DieWithDignityPlugin'
description 'Die with dignity plugin'
classname 'org.elasticsearch.DieWithDignityPlugin'
}
integTest.runner {
systemProperty 'tests.security.manager', 'false'
systemProperty 'tests.system_call_filter', 'false'
nonInputProperties.systemProperty 'log', "${-> testClusters.integTest.singleNode().getServerLog()}"
systemProperty 'runtime.java.home', BuildParams.runtimeJavaHome
systemProperty 'tests.security.manager', 'false'
systemProperty 'tests.system_call_filter', 'false'
nonInputProperties.systemProperty 'log', "${-> testClusters.integTest.singleNode().getServerLog()}"
systemProperty 'runtime.java.home', BuildParams.runtimeJavaHome
}
testClusters.integTest {
systemProperty "die.with.dignity.test", "whatever"
systemProperty "die.with.dignity.test", "whatever"
}
test.enabled = false

View File

@ -37,16 +37,16 @@ test {
}
thirdPartyAudit {
ignoreMissingClasses (
'com.ibm.icu.lang.UCharacter'
)
ignoreMissingClasses(
'com.ibm.icu.lang.UCharacter'
)
ignoreViolations (
// uses internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1'
)
ignoreViolations(
// uses internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1'
)
}

View File

@ -36,7 +36,7 @@ for (Version bwcVersion : bwcVersions.indexCompatible) {
testClusters {
"${baseName}" {
versions = [ bwcVersion.toString(), project.version ]
versions = [bwcVersion.toString(), project.version]
numberOfNodes = 2
// some tests rely on the translog not being flushed
setting 'indices.memory.shard_inactive_time', '20m'
@ -68,15 +68,15 @@ for (Version bwcVersion : bwcVersions.indexCompatible) {
tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach {
it.systemProperty 'tests.old_cluster_version', bwcVersion.toString().minus("-SNAPSHOT")
it.systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }")
it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }")
it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
}
if (project.bwc_tests_enabled) {
bwcTest.dependsOn(
tasks.register("${baseName}#bwcTest") {
dependsOn tasks.named("${baseName}#upgradedClusterTest")
}
tasks.register("${baseName}#bwcTest") {
dependsOn tasks.named("${baseName}#upgradedClusterTest")
}
)
}
}
@ -104,4 +104,4 @@ artifacts {
testArtifacts testJar
}
test.enabled = false
test.enabled = false

View File

@ -1,40 +1,40 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
apply plugin: 'elasticsearch.standalone-test'
testClusters.integTest {
/**
* Provide a custom log4j configuration where layout is an old style pattern and confirm that Elasticsearch
* can successfully startup.
*/
extraConfigFile 'log4j2.properties', file('custom-log4j2.properties')
testClusters.integTest {
/**
* Provide a custom log4j configuration where layout is an old style pattern and confirm that Elasticsearch
* can successfully startup.
*/
extraConfigFile 'log4j2.properties', file('custom-log4j2.properties')
}
integTest.runner {
nonInputProperties.systemProperty 'tests.logfile',
"${ -> testClusters.integTest.singleNode().getServerLog().absolutePath.replaceAll(".json", ".log")}"
nonInputProperties.systemProperty 'tests.logfile',
"${-> testClusters.integTest.singleNode().getServerLog().absolutePath.replaceAll(".json", ".log")}"
}
test {
systemProperty 'tests.security.manager', 'false'
systemProperty 'tests.security.manager', 'false'
}

View File

@ -39,14 +39,14 @@ dependencies {
}
processTestResources {
from ({ zipTree(configurations.restSpec.singleFile) })
from({ zipTree(configurations.restSpec.singleFile) })
dependsOn configurations.restSpec
}
for (Version bwcVersion : bwcVersions.wireCompatible) {
if (bwcVersion == VersionProperties.getElasticsearchVersion()) {
// Not really a mixed cluster
continue ;
continue;
}
String baseName = "v${bwcVersion}"
@ -55,7 +55,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
the nodes has a different minor. */
testClusters {
"${baseName}" {
versions = [ bwcVersion.toString(), project.version ]
versions = [bwcVersion.toString(), project.version]
numberOfNodes = 4
setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
@ -69,16 +69,16 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
doFirst {
project.delete("${buildDir}/cluster/shared/repo/${baseName}")
// Getting the endpoints causes a wait for the cluster
println "Test cluster endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",") }"
println "Test cluster endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",")}"
println "Upgrading one node to create a mixed cluster"
testClusters."${baseName}".nextNodeToNextVersion()
// Getting the endpoints causes a wait for the cluster
println "Upgrade complete, endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",") }"
println "Upgrade complete, endpoints are: ${-> testClusters."${baseName}".allHttpSocketURI.join(",")}"
println "Upgrading another node to create a mixed cluster"
testClusters."${baseName}".nextNodeToNextVersion()
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }")
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
}
systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
onlyIf { project.bwc_tests_enabled }

View File

@ -48,7 +48,7 @@ task mixedClusterTest(type: RestIntegTestTask) {
testClusters.mixedClusterTest {
setting 'cluster.remote.my_remote_cluster.seeds',
{ "\"${testClusters.'remote-cluster'.getAllTransportPortURI().get(0)}\"" }
{ "\"${testClusters.'remote-cluster'.getAllTransportPortURI().get(0)}\"" }
setting 'cluster.remote.connections_per_cluster', '1'
setting 'cluster.remote.connect', 'true'
}

View File

@ -51,7 +51,7 @@ testingConventions.enabled = false
tasks.dependencyLicenses.enabled = false
tasks.dependenciesInfo.enabled = false
tasks.thirdPartyAudit.ignoreMissingClasses ()
tasks.thirdPartyAudit.ignoreMissingClasses()
tasks.register('destructivePackagingTest') {
dependsOn 'destructiveDistroTest', 'destructiveBatsTest.oss', 'destructiveBatsTest.default'

View File

@ -40,7 +40,7 @@ dependencies {
}
processTestResources {
from ({ zipTree(configurations.restSpec.singleFile) }) {
from({ zipTree(configurations.restSpec.singleFile) }) {
include 'rest-api-spec/api/**'
}
dependsOn configurations.restSpec
@ -64,7 +64,7 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
testClusters {
"${baseName}" {
versions = [ bwcVersion.toString(), project.version ]
versions = [bwcVersion.toString(), project.version]
numberOfNodes = 3
setting 'repositories.url.allowed_urls', 'http://snapshot.test*'
@ -82,8 +82,8 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
project.delete("${buildDir}/cluster/shared/repo/${baseName}")
}
systemProperty 'tests.rest.suite', 'old_cluster'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }")
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
}
tasks.register("${baseName}#oneThirdUpgradedTest", RestTestRunnerTask) {
@ -95,8 +95,8 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.upgrade_from_version', project.version.replace("-SNAPSHOT", "")
systemProperty 'tests.first_round', 'true'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }")
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
}
tasks.register("${baseName}#twoThirdsUpgradedTest", RestTestRunnerTask) {
@ -108,8 +108,8 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
systemProperty 'tests.rest.suite', 'mixed_cluster'
systemProperty 'tests.upgrade_from_version', project.version.replace("-SNAPSHOT", "")
systemProperty 'tests.first_round', 'false'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }")
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
}
tasks.register("${baseName}#upgradedClusterTest", RestTestRunnerTask) {
@ -120,15 +120,15 @@ for (Version bwcVersion : bwcVersions.wireCompatible) {
useCluster testClusters."${baseName}"
systemProperty 'tests.rest.suite', 'upgraded_cluster'
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }")
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
}
if (project.bwc_tests_enabled) {
bwcTest.dependsOn(
tasks.register("${baseName}#bwcTest") {
dependsOn tasks.named("${baseName}#upgradedClusterTest")
}
tasks.register("${baseName}#bwcTest") {
dependsOn tasks.named("${baseName}#upgradedClusterTest")
}
)
}
}

View File

@ -25,29 +25,29 @@ apply plugin: 'elasticsearch.rest-test'
// TODO: this test works, but it isn't really a rest test...should we have another plugin for "non rest test that just needs N clusters?"
dependencies {
testCompile project(path: ':client:transport', configuration: 'runtime') // randomly swapped in as a transport
testCompile project(path: ':client:transport', configuration: 'runtime') // randomly swapped in as a transport
}
task singleNodeIntegTest(type: RestIntegTestTask) {
mustRunAfter(precommit)
mustRunAfter(precommit)
}
testClusters.singleNodeIntegTest {
setting 'discovery.type', 'single-node'
setting 'discovery.type', 'single-node'
}
integTest {
dependsOn singleNodeIntegTest
dependsOn singleNodeIntegTest
}
check.dependsOn(integTest)
testingConventions {
naming.clear()
naming {
IT {
baseClass 'org.elasticsearch.smoketest.ESSmokeClientTestCase'
}
naming.clear()
naming {
IT {
baseClass 'org.elasticsearch.smoketest.ESSmokeClientTestCase'
}
}
}

View File

@ -23,14 +23,14 @@ apply plugin: 'elasticsearch.rest-test'
apply plugin: 'elasticsearch.test-with-dependencies'
dependencies {
testCompile project(path: ':modules:transport-netty4', configuration: 'runtime') // for http
testCompile project(path: ':plugins:transport-nio', configuration: 'runtime') // for http
testCompile project(path: ':modules:transport-netty4', configuration: 'runtime') // for http
testCompile project(path: ':plugins:transport-nio', configuration: 'runtime') // for http
}
integTest.runner {
/*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty.
*/
systemProperty 'es.set.netty.runtime.available.processors', 'false'
/*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty.
*/
systemProperty 'es.set.netty.runtime.available.processors', 'false'
}

View File

@ -22,9 +22,9 @@ apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
dependencies {
testCompile project(path: ':modules:ingest-common', configuration: 'runtime')
testCompile project(path: ':modules:ingest-common', configuration: 'runtime')
}
testClusters.integTest {
setting 'node.ingest', 'false'
setting 'node.ingest', 'false'
}

View File

@ -22,17 +22,17 @@ apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
dependencies {
testCompile project(path: ':modules:ingest-common', configuration: 'runtime')
testCompile project(path: ':modules:ingest-geoip', configuration: 'runtime')
testCompile project(path: ':modules:lang-mustache', configuration: 'runtime')
testCompile project(path: ':modules:lang-painless', configuration: 'runtime')
testCompile project(path: ':modules:reindex', configuration: 'runtime')
testCompile project(path: ':modules:ingest-common', configuration: 'runtime')
testCompile project(path: ':modules:ingest-geoip', configuration: 'runtime')
testCompile project(path: ':modules:lang-mustache', configuration: 'runtime')
testCompile project(path: ':modules:lang-painless', configuration: 'runtime')
testCompile project(path: ':modules:reindex', configuration: 'runtime')
}
testingConventions {
naming {
IT {
baseClass 'org.elasticsearch.ingest.AbstractScriptTestCase'
}
naming {
IT {
baseClass 'org.elasticsearch.ingest.AbstractScriptTestCase'
}
}
}

View File

@ -38,9 +38,9 @@ integTest.runner {
}
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'oss'))) {
systemProperty 'tests.rest.blacklist', [
'cat.templates/10_basic/No templates',
'cat.templates/10_basic/Sort templates',
'cat.templates/10_basic/Multiple template',
].join(',')
'cat.templates/10_basic/No templates',
'cat.templates/10_basic/Sort templates',
'cat.templates/10_basic/Multiple template',
].join(',')
}
}

View File

@ -29,5 +29,5 @@ testClusters.integTest {
integTest.runner {
nonInputProperties.systemProperty 'tests.logfile',
"${ -> testClusters.integTest.singleNode().getServerLog() }"
"${-> testClusters.integTest.singleNode().getServerLog()}"
}

View File

@ -26,38 +26,38 @@ apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.standalone-test'
tasks.register("bwcTest") {
description = 'Runs backwards compatibility tests.'
group = 'verification'
description = 'Runs backwards compatibility tests.'
group = 'verification'
}
for (Version bwcVersion : bwcVersions.indexCompatible) {
String baseName = "v${bwcVersion}"
String baseName = "v${bwcVersion}"
testClusters {
"${baseName}" {
version = bwcVersion.toString()
setting 'http.content_type.required', 'true'
javaHome = BuildParams.runtimeJavaHome
}
testClusters {
"${baseName}" {
version = bwcVersion.toString()
setting 'http.content_type.required', 'true'
javaHome = BuildParams.runtimeJavaHome
}
}
tasks.register("${baseName}#integTest", RestTestRunnerTask) {
useCluster testClusters."${baseName}"
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",") }")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName() }")
}
tasks.register("${baseName}#integTest", RestTestRunnerTask) {
useCluster testClusters."${baseName}"
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}")
}
tasks.register("${baseName}#bwcTest") {
dependsOn "${baseName}#integTest"
}
tasks.register("${baseName}#bwcTest") {
dependsOn "${baseName}#integTest"
}
bwcTest.dependsOn("${baseName}#bwcTest")
bwcTest.dependsOn("${baseName}#bwcTest")
}
task bwcTestSnapshots {
if (project.bwc_tests_enabled) {
for (version in bwcVersions.unreleasedIndexCompatible) {
dependsOn "v${version}#bwcTest"
dependsOn "v${version}#bwcTest"
}
}
}
@ -86,4 +86,4 @@ task verifyDocsLuceneVersion {
check.dependsOn bwcTestSnapshots, verifyDocsLuceneVersion
test.enabled = false
test.enabled = false

View File

@ -37,184 +37,184 @@ final String wildflyInstall = "${buildDir}/wildfly/wildfly-${wildflyVersion}"
int managementPort
repositories {
// the Wildfly distribution is not available via a repository, so we fake an Ivy repository on top of the download site
ivy {
name "wildfly"
url "https://download.jboss.org"
metadataSources {
artifact()
}
patternLayout {
artifact 'wildfly/[revision]/[module]-[revision].[ext]'
}
// the Wildfly distribution is not available via a repository, so we fake an Ivy repository on top of the download site
ivy {
name "wildfly"
url "https://download.jboss.org"
metadataSources {
artifact()
}
patternLayout {
artifact 'wildfly/[revision]/[module]-[revision].[ext]'
}
}
}
configurations {
wildfly
wildfly
}
dependencies {
providedCompile 'javax.enterprise:cdi-api:1.2'
providedCompile 'org.jboss.spec.javax.annotation:jboss-annotations-api_1.2_spec:1.0.0.Final'
providedCompile 'org.jboss.spec.javax.ws.rs:jboss-jaxrs-api_2.0_spec:1.0.0.Final'
compile ('org.jboss.resteasy:resteasy-jackson2-provider:3.0.19.Final') {
exclude module: 'jackson-annotations'
exclude module: 'jackson-core'
exclude module: 'jackson-databind'
exclude module: 'jackson-jaxrs-json-provider'
}
compile "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
compile "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
compile "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:${versions.jackson}"
compile "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:${versions.jackson}"
compile "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}"
compile "org.apache.logging.log4j:log4j-api:${versions.log4j}"
compile "org.apache.logging.log4j:log4j-core:${versions.log4j}"
compile project(path: ':client:transport', configuration: 'runtime')
wildfly "org.jboss:wildfly:${wildflyVersion}@zip"
testCompile project(':test:framework')
providedCompile 'javax.enterprise:cdi-api:1.2'
providedCompile 'org.jboss.spec.javax.annotation:jboss-annotations-api_1.2_spec:1.0.0.Final'
providedCompile 'org.jboss.spec.javax.ws.rs:jboss-jaxrs-api_2.0_spec:1.0.0.Final'
compile('org.jboss.resteasy:resteasy-jackson2-provider:3.0.19.Final') {
exclude module: 'jackson-annotations'
exclude module: 'jackson-core'
exclude module: 'jackson-databind'
exclude module: 'jackson-jaxrs-json-provider'
}
compile "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}"
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
compile "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}"
compile "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:${versions.jackson}"
compile "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:${versions.jackson}"
compile "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}"
compile "org.apache.logging.log4j:log4j-api:${versions.log4j}"
compile "org.apache.logging.log4j:log4j-core:${versions.log4j}"
compile project(path: ':client:transport', configuration: 'runtime')
wildfly "org.jboss:wildfly:${wildflyVersion}@zip"
testCompile project(':test:framework')
}
task unzipWildfly(type: Sync) {
into wildflyDir
from { zipTree(configurations.wildfly.singleFile) }
into wildflyDir
from { zipTree(configurations.wildfly.singleFile) }
}
task deploy(type: Copy) {
dependsOn unzipWildfly, war
from war
into "${wildflyInstall}/standalone/deployments"
dependsOn unzipWildfly, war
from war
into "${wildflyInstall}/standalone/deployments"
}
task writeElasticsearchProperties(type: DefaultTestClustersTask) {
onlyIf { !Os.isFamily(Os.FAMILY_WINDOWS) }
useCluster testClusters.integTest
dependsOn deploy
doLast {
final File elasticsearchProperties = file("${wildflyInstall}/standalone/configuration/elasticsearch.properties")
elasticsearchProperties.write(
[
"transport.uri=${-> testClusters.integTest.getAllTransportPortURI().get(0)}",
"cluster.name=integTest"
].join("\n"))
}
onlyIf { !Os.isFamily(Os.FAMILY_WINDOWS) }
useCluster testClusters.integTest
dependsOn deploy
doLast {
final File elasticsearchProperties = file("${wildflyInstall}/standalone/configuration/elasticsearch.properties")
elasticsearchProperties.write(
[
"transport.uri=${-> testClusters.integTest.getAllTransportPortURI().get(0)}",
"cluster.name=integTest"
].join("\n"))
}
}
// the default configuration ships with IPv6 disabled but our cluster could be bound to IPv6 if the host supports it
task enableIPv6 {
dependsOn unzipWildfly
doLast {
final File standaloneConf = file("${wildflyInstall}/bin/standalone.conf")
final List<String> lines =
Files.readAllLines(standaloneConf.toPath())
.collect { line -> line.replace("-Djava.net.preferIPv4Stack=true", "-Djava.net.preferIPv4Stack=false") }
standaloneConf.write(lines.join("\n"))
}
dependsOn unzipWildfly
doLast {
final File standaloneConf = file("${wildflyInstall}/bin/standalone.conf")
final List<String> lines =
Files.readAllLines(standaloneConf.toPath())
.collect { line -> line.replace("-Djava.net.preferIPv4Stack=true", "-Djava.net.preferIPv4Stack=false") }
standaloneConf.write(lines.join("\n"))
}
}
task startWildfly {
dependsOn enableIPv6, writeElasticsearchProperties
doLast {
// we skip these tests on Windows so we do no need to worry about compatibility here
final ProcessBuilder wildfly = new ProcessBuilder(
"${wildflyInstall}/bin/standalone.sh",
"-Djboss.http.port=0",
"-Djboss.https.port=0",
"-Djboss.management.http.port=0")
final Process process = wildfly.start()
new BufferedReader(new InputStreamReader(process.getInputStream())).withReader { br ->
String line
int httpPort = 0
while ((line = br.readLine()) != null) {
logger.info(line)
if (line.matches('.*Undertow HTTP listener default listening on .*:\\d+$')) {
assert httpPort == 0
final int index = line.lastIndexOf(":")
assert index >= 0
httpPort = Integer.parseInt(line.substring(index + 1))
// set this system property so the test runner knows the port Wildfly is listening for HTTP requests on
integTestRunner.systemProperty("tests.jboss.root", "http://localhost:$httpPort/wildfly-$version/transport")
} else if (line.matches('.*Http management interface listening on http://.*:\\d+/management$')) {
assert managementPort == 0
final int colonIndex = line.lastIndexOf(":")
assert colonIndex >= 0
final int slashIndex = line.lastIndexOf("/")
assert slashIndex >= 0
managementPort = Integer.parseInt(line.substring(colonIndex + 1, slashIndex))
dependsOn enableIPv6, writeElasticsearchProperties
doLast {
// we skip these tests on Windows so we do no need to worry about compatibility here
final ProcessBuilder wildfly = new ProcessBuilder(
"${wildflyInstall}/bin/standalone.sh",
"-Djboss.http.port=0",
"-Djboss.https.port=0",
"-Djboss.management.http.port=0")
final Process process = wildfly.start()
new BufferedReader(new InputStreamReader(process.getInputStream())).withReader { br ->
String line
int httpPort = 0
while ((line = br.readLine()) != null) {
logger.info(line)
if (line.matches('.*Undertow HTTP listener default listening on .*:\\d+$')) {
assert httpPort == 0
final int index = line.lastIndexOf(":")
assert index >= 0
httpPort = Integer.parseInt(line.substring(index + 1))
// set this system property so the test runner knows the port Wildfly is listening for HTTP requests on
integTestRunner.systemProperty("tests.jboss.root", "http://localhost:$httpPort/wildfly-$version/transport")
} else if (line.matches('.*Http management interface listening on http://.*:\\d+/management$')) {
assert managementPort == 0
final int colonIndex = line.lastIndexOf(":")
assert colonIndex >= 0
final int slashIndex = line.lastIndexOf("/")
assert slashIndex >= 0
managementPort = Integer.parseInt(line.substring(colonIndex + 1, slashIndex))
/*
* As soon as we know the management port, we fork a process that will ensure the Wildfly process is killed if we
* teardown abnormally. We skip these tests on Windows so we do not need to worry about CLI compatibility here.
*/
final File script = new File(project.buildDir, "wildfly/wildfly.killer.sh")
script.setText(
["function shutdown {",
" ${wildflyInstall}/bin/jboss-cli.sh --controller=localhost:${-> managementPort} --connect command=shutdown",
"}",
"trap shutdown EXIT",
// will wait indefinitely for input, but we never pass input, and the pipe is only closed when the build dies
"read line\n"].join('\n'), 'UTF-8')
final ProcessBuilder killer = new ProcessBuilder("bash", script.absolutePath)
killer.start()
/*
* As soon as we know the management port, we fork a process that will ensure the Wildfly process is killed if we
* teardown abnormally. We skip these tests on Windows so we do not need to worry about CLI compatibility here.
*/
final File script = new File(project.buildDir, "wildfly/wildfly.killer.sh")
script.setText(
["function shutdown {",
" ${wildflyInstall}/bin/jboss-cli.sh --controller=localhost:${-> managementPort} --connect command=shutdown",
"}",
"trap shutdown EXIT",
// will wait indefinitely for input, but we never pass input, and the pipe is only closed when the build dies
"read line\n"].join('\n'), 'UTF-8')
final ProcessBuilder killer = new ProcessBuilder("bash", script.absolutePath)
killer.start()
} else if (line.matches(".*WildFly Full \\d+\\.\\d+\\.\\d+\\.Final \\(WildFly Core \\d+\\.\\d+\\.\\d+\\.Final\\) started.*")) {
break
}
}
assert httpPort > 0
assert managementPort > 0
} else if (line.matches(".*WildFly Full \\d+\\.\\d+\\.\\d+\\.Final \\(WildFly Core \\d+\\.\\d+\\.\\d+\\.Final\\) started.*")) {
break
}
}
assert httpPort > 0
assert managementPort > 0
}
}
}
task configureTransportClient(type: LoggedExec) {
dependsOn startWildfly
// we skip these tests on Windows so we do not need to worry about compatibility here
commandLine "${wildflyInstall}/bin/jboss-cli.sh",
"--controller=localhost:${-> managementPort}",
"--connect",
"--command=/system-property=elasticsearch.properties:add(value=\${jboss.server.config.dir}/elasticsearch.properties)"
dependsOn startWildfly
// we skip these tests on Windows so we do not need to worry about compatibility here
commandLine "${wildflyInstall}/bin/jboss-cli.sh",
"--controller=localhost:${-> managementPort}",
"--connect",
"--command=/system-property=elasticsearch.properties:add(value=\${jboss.server.config.dir}/elasticsearch.properties)"
}
task stopWildfly(type: LoggedExec) {
// we skip these tests on Windows so we do not need to worry about CLI compatibility here
commandLine "${wildflyInstall}/bin/jboss-cli.sh", "--controller=localhost:${-> managementPort}", "--connect", "command=shutdown"
// we skip these tests on Windows so we do not need to worry about CLI compatibility here
commandLine "${wildflyInstall}/bin/jboss-cli.sh", "--controller=localhost:${-> managementPort}", "--connect", "command=shutdown"
}
if (!Os.isFamily(Os.FAMILY_WINDOWS)) {
integTestRunner.dependsOn(configureTransportClient)
final TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() {
@Override
void afterExecute(final Task task, final TaskState state) {
if (state.failure != null) {
final File logFile = new File(wildflyInstall, "standalone/log/server.log")
println("\nWildfly server log (from ${logFile}):")
println('-----------------------------------------')
final Stream<String> stream = Files.lines(logFile.toPath(), StandardCharsets.UTF_8)
try {
for (String line : stream) {
println(line)
}
} finally {
stream.close()
}
println('=========================================')
}
integTestRunner.dependsOn(configureTransportClient)
final TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() {
@Override
void afterExecute(final Task task, final TaskState state) {
if (state.failure != null) {
final File logFile = new File(wildflyInstall, "standalone/log/server.log")
println("\nWildfly server log (from ${logFile}):")
println('-----------------------------------------')
final Stream<String> stream = Files.lines(logFile.toPath(), StandardCharsets.UTF_8)
try {
for (String line : stream) {
println(line)
}
} finally {
stream.close()
}
println('=========================================')
}
}
integTestRunner.doFirst {
project.gradle.addListener(logDumpListener)
}
integTestRunner.doLast {
project.gradle.removeListener(logDumpListener)
}
integTestRunner.finalizedBy(stopWildfly)
}
integTestRunner.doFirst {
project.gradle.addListener(logDumpListener)
}
integTestRunner.doLast {
project.gradle.removeListener(logDumpListener)
}
integTestRunner.finalizedBy(stopWildfly)
} else {
integTest.enabled = false
testingConventions.enabled = false
integTest.enabled = false
testingConventions.enabled = false
}
check.dependsOn(integTest)
@ -228,11 +228,11 @@ thirdPartyAudit.enabled = false
testingConventions {
naming.clear()
// We only have one "special" integration test here to connect to wildfly
naming {
IT {
baseClass 'org.apache.lucene.util.LuceneTestCase'
}
naming.clear()
// We only have one "special" integration test here to connect to wildfly
naming {
IT {
baseClass 'org.apache.lucene.util.LuceneTestCase'
}
}
}

View File

@ -43,11 +43,11 @@ if (!isEclipse && !isIdea) {
}
}
}
configurations {
java9Compile.extendsFrom(compile)
}
dependencies {
java9Compile sourceSets.main.output
}
@ -208,7 +208,7 @@ processResources {
dependsOn generateModulesList, generatePluginsList
}
thirdPartyAudit.ignoreMissingClasses (
thirdPartyAudit.ignoreMissingClasses(
// from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml)
'com.fasterxml.jackson.databind.ObjectMapper',
@ -328,9 +328,9 @@ dependencyLicenses {
mapping from: /lucene-.*/, to: 'lucene'
dependencies = project.configurations.runtime.fileCollection {
it.group.startsWith('org.elasticsearch') == false ||
// keep the following org.elasticsearch jars in
(it.name == 'jna' ||
it.name == 'securesm')
// keep the following org.elasticsearch jars in
(it.name == 'jna' ||
it.name == 'securesm')
}
}
@ -339,13 +339,13 @@ task integTest(type: Test) {
description = 'Multi-node tests'
mustRunAfter test
include '**/*IT.class'
include '**/*IT.class'
}
check.dependsOn integTest
task internalClusterTest {
dependsOn integTest
dependsOn integTest
}

View File

@ -62,11 +62,11 @@ List projects = [
]
/**
* Iterates over sub directories, looking for build.gradle, and adds a project if found
* for that dir with the given path prefix. Note that this requires each level
* of the dir hierarchy to have a build.gradle. Otherwise we would have to iterate
* all files/directories in the source tree to find all projects.
*/
* Iterates over sub directories, looking for build.gradle, and adds a project if found
* for that dir with the given path prefix. Note that this requires each level
* of the dir hierarchy to have a build.gradle. Otherwise we would have to iterate
* all files/directories in the source tree to find all projects.
*/
void addSubProjects(String path, File dir) {
if (dir.isDirectory() == false) return;
if (dir.name == 'buildSrc') return;
@ -76,12 +76,12 @@ void addSubProjects(String path, File dir) {
final String projectName = "${path}:${dir.name}"
include projectName
if (path.isEmpty() || path.startsWith(':example-plugins')) {
project(projectName).projectDir = dir
}
for (File subdir : dir.listFiles()) {
addSubProjects(projectName, subdir)
}
project(projectName).projectDir = dir
}
for (File subdir : dir.listFiles()) {
addSubProjects(projectName, subdir)
}
}
// include example plugins first, so adding plugin dirs below won't muck with :example-plugins
@ -107,7 +107,7 @@ project(':build-tools').projectDir = new File(rootProject.projectDir, 'buildSrc'
project(':build-tools:reaper').projectDir = new File(rootProject.projectDir, 'buildSrc/reaper')
project(":libs").children.each { libsProject ->
libsProject.name = "elasticsearch-${libsProject.name}"
libsProject.name = "elasticsearch-${libsProject.name}"
}
// look for extra plugins for elasticsearch

View File

@ -23,17 +23,17 @@ description = 'Fixture for Azure external service'
test.enabled = false
dependencies {
compile project(':server')
compile project(':server')
}
preProcessFixture {
dependsOn jar
doLast {
file("${testFixturesDir}/shared").mkdirs()
project.copy {
from jar
from configurations.runtimeClasspath
into "${testFixturesDir}/shared"
}
dependsOn jar
doLast {
file("${testFixturesDir}/shared").mkdirs()
project.copy {
from jar
from configurations.runtimeClasspath
into "${testFixturesDir}/shared"
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More