Build: Split distributions into oss and default

This commit makes x-pack a module and adds it to the default
distrubtion. It also creates distributions for zip, tar, deb and rpm
which contain only oss code.
This commit is contained in:
Ryan Ernst 2018-02-23 08:03:17 -08:00
parent cb56bf49d5
commit fab5e21e7d
117 changed files with 830 additions and 570 deletions

View File

@ -206,9 +206,13 @@ subprojects {
"org.elasticsearch.test:framework:${version}": ':test:framework',
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:archives:integ-test-zip',
"org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:archives:zip',
"org.elasticsearch.distribution.zip:elasticsearch-oss:${version}": ':distribution:archives:oss-zip',
"org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:archives:tar',
"org.elasticsearch.distribution.tar:elasticsearch-oss:${version}": ':distribution:archives:oss-tar',
"org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:packages:rpm',
"org.elasticsearch.distribution.rpm:elasticsearch-oss:${version}": ':distribution:packages:oss-rpm',
"org.elasticsearch.distribution.deb:elasticsearch:${version}": ':distribution:packages:deb',
"org.elasticsearch.distribution.deb:elasticsearch-oss:${version}": ':distribution:packages:oss-deb',
"org.elasticsearch.test:logger-usage:${version}": ':test:logger-usage',
// for transport client
"org.elasticsearch.plugin:transport-netty4-client:${version}": ':modules:transport-netty4',
@ -228,6 +232,11 @@ subprojects {
ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch:${snapshot}"] = snapshotProject
ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch:${snapshot}"] = snapshotProject
ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch:${snapshot}"] = snapshotProject
if (snapshot.onOrAfter('6.3.0')) {
ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch-oss:${snapshot}"] = snapshotProject
ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch-oss:${snapshot}"] = snapshotProject
ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch-oss:${snapshot}"] = snapshotProject
}
}
}

View File

@ -37,10 +37,11 @@ class MetaPluginBuildPlugin implements Plugin<Project> {
project.plugins.apply(RestTestPlugin)
createBundleTask(project)
boolean isModule = project.path.startsWith(':modules:')
boolean isModule = project.path.startsWith(':modules:') || project.path.startsWith(':x-pack:plugin')
project.integTestCluster {
dependsOn(project.bundlePlugin)
distribution = 'integ-test-zip'
}
BuildPlugin.configurePomGeneration(project)
project.afterEvaluate {
@ -49,9 +50,9 @@ class MetaPluginBuildPlugin implements Plugin<Project> {
if (project.integTestCluster.distribution == 'integ-test-zip') {
project.integTestCluster.module(project)
}
} else {
} else {
project.integTestCluster.plugin(project.path)
}
}
}
RunTask run = project.tasks.create('run', RunTask)

View File

@ -50,7 +50,7 @@ public class PluginBuildPlugin extends BuildPlugin {
// this afterEvaluate must happen before the afterEvaluate added by integTest creation,
// so that the file name resolution for installing the plugin will be setup
project.afterEvaluate {
boolean isModule = project.path.startsWith(':modules:')
boolean isModule = project.path.startsWith(':modules:') || project.path.startsWith(':x-pack:plugin')
String name = project.pluginProperties.extension.name
project.archivesBaseName = name
@ -70,6 +70,7 @@ public class PluginBuildPlugin extends BuildPlugin {
if (isModule) {
project.integTestCluster.module(project)
project.tasks.run.clusterConfig.module(project)
project.tasks.run.clusterConfig.distribution = 'integ-test-zip'
} else {
project.integTestCluster.plugin(project.path)
project.tasks.run.clusterConfig.plugin(project.path)

View File

@ -131,13 +131,22 @@ class ClusterFormationTasks {
/** Adds a dependency on the given distribution */
static void configureDistributionDependency(Project project, String distro, Configuration configuration, Version elasticsearchVersion) {
if (elasticsearchVersion.before('6.3.0') && distro.startsWith('oss-')) {
distro = distro.substring('oss-'.length())
}
String packaging = distro
if (distro == 'tar') {
packaging = 'tar.gz'
} else if (distro == 'integ-test-zip') {
if (distro.contains('tar')) {
packaging = 'tar.gz'\
} else if (distro.contains('zip')) {
packaging = 'zip'
}
project.dependencies.add(configuration.name, "org.elasticsearch.distribution.${distro}:elasticsearch:${elasticsearchVersion}@${packaging}")
String subgroup = distro
String artifactName = 'elasticsearch'
if (distro.contains('oss')) {
artifactName += '-oss'
subgroup = distro.substring('oss-'.length())
}
project.dependencies.add(configuration.name, "org.elasticsearch.distribution.${subgroup}:${artifactName}:${elasticsearchVersion}@${packaging}")
}
/** Adds a dependency on a different version of the given plugin, which will be retrieved using gradle's dependency resolution */
@ -260,6 +269,7 @@ class ClusterFormationTasks {
switch (node.config.distribution) {
case 'integ-test-zip':
case 'zip':
case 'oss-zip':
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
from {
project.zipTree(configuration.singleFile)
@ -268,6 +278,7 @@ class ClusterFormationTasks {
}
break;
case 'tar':
case 'oss-tar':
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
from {
project.tarTree(project.resources.gzip(configuration.singleFile))

View File

@ -312,6 +312,8 @@ class NodeInfo {
case 'integ-test-zip':
case 'zip':
case 'tar':
case 'oss-zip':
case 'oss-tar':
path = "elasticsearch-${nodeVersion}"
break
case 'rpm':
@ -328,7 +330,9 @@ class NodeInfo {
switch (distro) {
case 'integ-test-zip':
case 'zip':
case 'oss-zip':
case 'tar':
case 'oss-tar':
return new File(homeDir(baseDir, distro, nodeVersion), 'config')
case 'rpm':
case 'deb':

View File

@ -24,6 +24,7 @@ import org.elasticsearch.gradle.VersionProperties
import org.gradle.api.DefaultTask
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.Transformer
import org.gradle.api.execution.TaskExecutionAdapter
import org.gradle.api.internal.tasks.options.Option
import org.gradle.api.provider.Property

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.gradle.vagrant
import org.elasticsearch.gradle.Version
import org.gradle.api.tasks.Input
class VagrantPropertiesExtension {
@ -26,7 +27,7 @@ class VagrantPropertiesExtension {
List<String> boxes
@Input
String upgradeFromVersion
Version upgradeFromVersion
@Input
List<String> upgradeFromVersions

View File

@ -3,6 +3,7 @@ package org.elasticsearch.gradle.vagrant
import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.FileContentsTask
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.Version
import org.gradle.api.*
import org.gradle.api.artifacts.dsl.RepositoryHandler
import org.gradle.api.execution.TaskExecutionAdapter
@ -37,7 +38,7 @@ class VagrantTestPlugin implements Plugin<Project> {
]
/** All onboarded archives by default, available for Bats tests even if not used **/
static List<String> DISTRIBUTION_ARCHIVES = ['tar', 'rpm', 'deb']
static List<String> DISTRIBUTION_ARCHIVES = ['tar', 'rpm', 'deb', 'oss-rpm', 'oss-deb']
/** Packages onboarded for upgrade tests **/
static List<String> UPGRADE_FROM_ARCHIVES = ['rpm', 'deb']
@ -105,12 +106,15 @@ class VagrantTestPlugin implements Plugin<Project> {
private static void createPackagingConfiguration(Project project) {
project.configurations.create(PACKAGING_CONFIGURATION)
String upgradeFromVersion = System.getProperty("tests.packaging.upgradeVersion")
if (upgradeFromVersion == null) {
String upgradeFromVersionRaw = System.getProperty("tests.packaging.upgradeVersion");
Version upgradeFromVersion
if (upgradeFromVersionRaw == null) {
String firstPartOfSeed = project.rootProject.testSeed.tokenize(':').get(0)
final long seed = Long.parseUnsignedLong(firstPartOfSeed, 16)
final def indexCompatVersions = project.bwcVersions.indexCompatible
upgradeFromVersion = indexCompatVersions[new Random(seed).nextInt(indexCompatVersions.size())]
} else {
upgradeFromVersion = Version.fromString(upgradeFromVersionRaw)
}
DISTRIBUTION_ARCHIVES.each {
@ -128,6 +132,10 @@ class VagrantTestPlugin implements Plugin<Project> {
// The version of elasticsearch that we upgrade *from*
project.dependencies.add(PACKAGING_CONFIGURATION,
"org.elasticsearch.distribution.${it}:elasticsearch:${upgradeFromVersion}@${it}")
if (upgradeFromVersion.onOrAfter('6.3.0')) {
project.dependencies.add(PACKAGING_CONFIGURATION,
"org.elasticsearch.distribution.${it}:elasticsearch-oss:${upgradeFromVersion}@${it}")
}
}
project.extensions.esvagrant.upgradeFromVersion = upgradeFromVersion
@ -173,7 +181,17 @@ class VagrantTestPlugin implements Plugin<Project> {
Task createUpgradeFromFile = project.tasks.create('createUpgradeFromFile', FileContentsTask) {
dependsOn copyPackagingArchives
file "${archivesDir}/upgrade_from_version"
contents project.extensions.esvagrant.upgradeFromVersion
contents project.extensions.esvagrant.upgradeFromVersion.toString()
}
Task createUpgradeIsOssFile = project.tasks.create('createUpgradeIsOssFile', FileContentsTask) {
dependsOn copyPackagingArchives
doFirst {
project.delete("${archivesDir}/upgrade_is_oss")
}
onlyIf { project.extensions.esvagrant.upgradeFromVersion.onOrAfter('6.3.0') }
file "${archivesDir}/upgrade_is_oss"
contents ''
}
File batsDir = new File(packagingDir, BATS)
@ -214,7 +232,7 @@ class VagrantTestPlugin implements Plugin<Project> {
Task vagrantSetUpTask = project.tasks.create('setupPackagingTest')
vagrantSetUpTask.dependsOn 'vagrantCheckVersion'
vagrantSetUpTask.dependsOn copyPackagingArchives, createVersionFile, createUpgradeFromFile
vagrantSetUpTask.dependsOn copyPackagingArchives, createVersionFile, createUpgradeFromFile, createUpgradeIsOssFile
vagrantSetUpTask.dependsOn copyBatsTests, copyBatsUtils
}

View File

@ -42,18 +42,18 @@ task createPluginsDir(type: EmptyDirTask) {
dirMode 0755
}
CopySpec archiveFiles(CopySpec... innerFiles) {
CopySpec archiveFiles(CopySpec modulesFiles) {
return copySpec {
into("elasticsearch-${version}") {
with libFiles
into('config') {
dirMode 0750
fileMode 0660
with configFiles('def')
with configFiles('def', false)
}
into('bin') {
with binFiles('def', oss)
with copySpec {
with binFiles('def')
from('../src/bin') {
include '*.bat'
filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf'))
@ -78,36 +78,48 @@ CopySpec archiveFiles(CopySpec... innerFiles) {
from('../src') {
include 'bin/*.exe'
}
for (CopySpec files : innerFiles) {
with files
into('modules') {
with modulesFiles
}
}
}
}
task buildIntegTestZip(type: Zip) {
// common config across all zip/tar
tasks.withType(AbstractArchiveTask) {
dependsOn createLogsDir, createPluginsDir
destinationDir = file('integ-test-zip/build/distributions')
baseName = 'elasticsearch'
String subdir = it.name.substring('build'.size()).replaceAll(/[A-Z]/) { '-' + it.toLowerCase() }.substring(1)
destinationDir = file("${subdir}/build/distributions")
baseName = "elasticsearch${ subdir.contains('oss') ? '-oss' : ''}"
}
task buildIntegTestZip(type: Zip) {
with archiveFiles(transportModulesFiles)
}
task buildZip(type: Zip) {
dependsOn createLogsDir, createPluginsDir
destinationDir = file('zip/build/distributions')
baseName = 'elasticsearch'
with archiveFiles(modulesFiles)
with archiveFiles(modulesFiles(false))
}
task buildTar(type: Tar) {
dependsOn createLogsDir, createPluginsDir
destinationDir = file('tar/build/distributions')
baseName = 'elasticsearch'
task buildOssZip(type: Zip) {
with archiveFiles(modulesFiles(true))
}
Closure commonTarConfig = {
extension = 'tar.gz'
compression = Compression.GZIP
dirMode 0755
fileMode 0644
with archiveFiles(modulesFiles)
}
task buildTar(type: Tar) {
configure(commonTarConfig)
with archiveFiles(modulesFiles(false))
}
task buildOssTar(type: Tar) {
configure(commonTarConfig)
with archiveFiles(modulesFiles(true))
}
// This configures the default artifact for the distribution specific
@ -119,8 +131,6 @@ task buildTar(type: Tar) {
subprojects {
apply plugin: 'distribution'
archivesBaseName = 'elasticsearch'
String buildTask = "build${it.name.replaceAll(/-[a-z]/) { it.substring(1).toUpperCase() }.capitalize()}"
ext.buildDist = parent.tasks.getByName(buildTask)
artifacts {
@ -158,7 +168,7 @@ configure(subprojects.findAll { it.name == 'integ-test-zip' }) {
apply plugin: 'elasticsearch.rest-test'
integTest {
includePackaged true
includePackaged = true
}
integTestCluster {
@ -190,12 +200,16 @@ configure(subprojects.findAll { it.name.contains('zip') }) {
// note: the group must be correct before applying the nexus plugin, or
// it will capture the wrong value...
project.group = "org.elasticsearch.distribution.${project.name}"
String subgroup = project.name == 'integ-test-zip' ? 'integ-test-zip' : 'zip'
project.group = "org.elasticsearch.distribution.${subgroup}"
// make the pom file name use elasticsearch instead of the project name
archivesBaseName = "elasticsearch${it.name.contains('oss') ? '-oss' : ''}"
publishing {
publications {
nebula {
artifactId 'elasticsearch'
artifactId archivesBaseName
artifact buildDist
}
/*
@ -215,7 +229,7 @@ configure(subprojects.findAll { it.name.contains('zip') }) {
* local work, since we publish to maven central externally.
*/
nebulaRealPom(MavenPublication) {
artifactId 'elasticsearch'
artifactId archivesBaseName
pom.packaging = 'pom'
pom.withXml { XmlProvider xml ->
Node root = xml.asNode()

View File

@ -0,0 +1,2 @@
// This file is intentionally blank. All configuration of the
// distribution is done in the parent project.

View File

@ -0,0 +1,2 @@
// This file is intentionally blank. All configuration of the
// distribution is done in the parent project.

View File

@ -17,7 +17,6 @@
* under the License.
*/
import org.apache.tools.ant.filters.FixCrLfFilter
import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.BuildPlugin
@ -27,6 +26,7 @@ import org.elasticsearch.gradle.NoticeTask
import org.elasticsearch.gradle.precommit.DependencyLicensesTask
import org.elasticsearch.gradle.precommit.UpdateShasTask
import org.elasticsearch.gradle.test.RunTask
import org.gradle.api.file.RelativePath
Collection distributions = project('archives').subprojects + project('packages').subprojects
@ -46,42 +46,156 @@ task generateDependenciesReport(type: ConcatFilesTask) {
*****************************************************************************/
// integ test zip only uses server, so a different notice file is needed there
task buildCoreNotice(type: NoticeTask) {
task buildServerNotice(type: NoticeTask) {
licensesDir new File(project(':server').projectDir, 'licenses')
}
// other distributions include notices from modules as well, which are added below later
task buildFullNotice(type: NoticeTask) {
task buildDefaultNotice(type: NoticeTask) {
licensesDir new File(project(':server').projectDir, 'licenses')
}
// other distributions include notices from modules as well, which are added below later
task buildOssNotice(type: NoticeTask) {
licensesDir new File(project(':server').projectDir, 'licenses')
}
/*****************************************************************************
* Modules *
*****************************************************************************/
String ossOutputs = 'build/outputs/oss'
String defaultOutputs = 'build/outputs/default'
String transportOutputs = 'build/outputs/transport-only'
task buildModules(type: Sync) {
into 'build/modules'
task processOssOutputs(type: Sync) {
into ossOutputs
}
task processDefaultOutputs(type: Sync) {
into defaultOutputs
from processOssOutputs
}
// Integ tests work over the rest http layer, so we need a transport included with the integ test zip.
// All transport modules are included so that they may be randomized for testing
task processTransportOutputs(type: Sync) {
into transportOutputs
}
// these are dummy tasks that can be used to depend on the relevant sub output dir
task buildOssModules {
dependsOn processOssOutputs
outputs.dir "${ossOutputs}/modules"
}
task buildOssBin {
dependsOn processOssOutputs
outputs.dir "${ossOutputs}/bin"
}
task buildOssConfig {
dependsOn processOssOutputs
outputs.dir "${ossOutputs}/config"
}
task buildDefaultModules {
dependsOn processDefaultOutputs
outputs.dir "${defaultOutputs}/modules"
}
task buildDefaultBin {
dependsOn processDefaultOutputs
outputs.dir "${defaultOutputs}/bin"
}
task buildDefaultConfig {
dependsOn processDefaultOutputs
outputs.dir "${defaultOutputs}/config"
}
task buildTransportModules {
dependsOn processTransportOutputs
outputs.dir "${transportOutputs}/modules"
}
void copyModule(Sync copyTask, Project module) {
copyTask.configure {
dependsOn { module.bundlePlugin }
from({ zipTree(module.bundlePlugin.outputs.files.singleFile) }) {
includeEmptyDirs false
// these are handled separately in the log4j config tasks below
exclude '*/config/log4j2.properties'
exclude 'config/log4j2.properties'
eachFile { details ->
String name = module.plugins.hasPlugin('elasticsearch.esplugin') ? module.esplugin.name : module.es_meta_plugin.name
// Copy all non config/bin files
// Note these might be unde a subdirectory in the case of a meta plugin
if ((details.relativePath.pathString ==~ /([^\/]+\/)?(config|bin)\/.*/) == false) {
details.relativePath = details.relativePath.prepend('modules', name)
} else if ((details.relativePath.pathString ==~ /([^\/]+\/)(config|bin)\/.*/)) {
// this is the meta plugin case, in which we need to remove the intermediate dir
String[] segments = details.relativePath.segments
details.relativePath = new RelativePath(true, segments.takeRight(segments.length - 1))
}
}
}
}
}
// log4j config could be contained in modules, so we must join it together using these tasks
task buildOssLog4jConfig {
dependsOn processOssOutputs
ext.contents = []
ext.log4jFile = file("${ossOutputs}/log4j2.properties")
outputs.file log4jFile
}
task buildDefaultLog4jConfig {
dependsOn processDefaultOutputs
ext.contents = []
ext.log4jFile = file("${defaultOutputs}/log4j2.properties")
outputs.file log4jFile
}
Closure writeLog4jProperties = {
String mainLog4jProperties = file('src/config/log4j2.properties').getText('UTF-8')
it.log4jFile.setText(mainLog4jProperties, 'UTF-8')
for (String moduleLog4jProperties : it.contents.reverse()) {
it.log4jFile.append(moduleLog4jProperties, 'UTF-8')
}
}
buildOssLog4jConfig.doLast(writeLog4jProperties)
buildDefaultLog4jConfig.doLast(writeLog4jProperties)
// copy log4j2.properties from modules that have it
void copyLog4jProperties(Task buildTask, Project module) {
buildTask.doFirst {
FileTree tree = zipTree(module.bundlePlugin.outputs.files.singleFile)
FileTree filtered = tree.matching {
include 'config/log4j2.properties'
include '*/config/log4j2.properties' // could be in a bundled plugin
}
if (filtered.isEmpty() == false) {
buildTask.contents.add('\n\n' + filtered.singleFile.getText('UTF-8'))
}
}
}
ext.restTestExpansions = [
'expected.modules.count': 0,
]
// we create the buildModules task above so the distribution subprojects can
// depend on it, but we don't actually configure it until here so we can do a single
// we create the buildOssModules task above but fill it here so we can do a single
// loop over modules to also setup cross task dependencies and increment our modules counter
project.rootProject.subprojects.findAll { it.parent.path == ':modules' }.each { Project module ->
buildFullNotice {
def defaultLicensesDir = new File(module.projectDir, 'licenses')
if (defaultLicensesDir.exists()) {
licensesDir defaultLicensesDir
}
File licenses = new File(module.projectDir, 'licenses')
if (licenses.exists()) {
buildDefaultNotice.licensesDir licenses
buildOssNotice.licensesDir licenses
}
buildModules {
dependsOn({ project(module.path).bundlePlugin })
into(module.name) {
from { zipTree(project(module.path).bundlePlugin.outputs.files.singleFile) }
}
copyModule(processOssOutputs, module)
if (module.name.startsWith('transport-')) {
copyModule(processTransportOutputs, module)
}
copyLog4jProperties(buildOssLog4jConfig, module)
copyLog4jProperties(buildDefaultLog4jConfig, module)
// make sure the module's integration tests run after the integ-test-zip (ie rest tests)
module.afterEvaluate({
module.integTest.mustRunAfter(':distribution:archives:integ-test-zip:integTest')
@ -89,20 +203,19 @@ project.rootProject.subprojects.findAll { it.parent.path == ':modules' }.each {
restTestExpansions['expected.modules.count'] += 1
}
// Integ tests work over the rest http layer, so we need a transport included with the integ test zip.
// All transport modules are included so that they may be randomized for testing
task buildTransportModules(type: Sync) {
into 'build/transport-modules'
}
project.rootProject.subprojects.findAll { it.path.startsWith(':modules:transport-') }.each { Project transport ->
buildTransportModules {
dependsOn({ project(transport.path).bundlePlugin })
into(transport.name) {
from { zipTree(project(transport.path).bundlePlugin.outputs.files.singleFile) }
}
// use licenses from each of the bundled xpack plugins
Project xpack = project(':x-pack:plugin')
xpack.subprojects.findAll { it.name != 'bwc' }.each { Project xpackSubproject ->
File licenses = new File(xpackSubproject.projectDir, 'licenses')
if (licenses.exists()) {
buildDefaultNotice.licensesDir licenses
}
}
// but copy just the top level meta plugin to the default modules
copyModule(processDefaultOutputs, xpack)
copyLog4jProperties(buildDefaultLog4jConfig, xpack)
//
// make sure we have a clean task since we aren't a java project, but we have tasks that
// put stuff in the build dir
@ -130,31 +243,64 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
from { project(':distribution:tools:plugin-cli').jar }
}
modulesFiles = copySpec {
into 'modules'
from project(':distribution').buildModules
}
transportModulesFiles = copySpec {
into "modules"
from project(':distribution').buildTransportModules
}
configFiles = { distributionType ->
modulesFiles = { oss ->
copySpec {
from '../src/config'
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType))
eachFile {
if (it.relativePath.segments[-2] == 'bin') {
// bin files, wherever they are within modules (eg platform specific) should be executable
it.mode = 0755
}
}
if (oss) {
from project(':distribution').buildOssModules
} else {
from project(':distribution').buildDefaultModules
}
}
}
binFiles = { distributionType ->
transportModulesFiles = copySpec {
from project(':distribution').buildTransportModules
}
configFiles = { distributionType, oss ->
copySpec {
// everything except windows files
from '../src/bin'
exclude '*.bat'
exclude '*.exe'
eachFile { it.setMode(0755) }
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType))
with copySpec {
// main config files, processed with distribution specific substitutions
from '../src/config'
exclude 'log4j2.properties' // this is handled separately below
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType))
}
if (oss) {
from project(':distribution').buildOssLog4jConfig
from project(':distribution').buildOssConfig
} else {
from project(':distribution').buildDefaultLog4jConfig
from project(':distribution').buildDefaultConfig
}
}
}
binFiles = { distributionType, oss ->
copySpec {
with copySpec {
// main bin files, processed with distribution specific substitutions
// everything except windows files
from '../src/bin'
exclude '*.bat'
exclude '*.exe'
exclude '*.bat'
eachFile { it.setMode(0755) }
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType))
}
with copySpec {
eachFile { it.setMode(0755) }
if (oss) {
from project(':distribution').buildOssBin
} else {
from project(':distribution').buildDefaultBin
}
}
}
}
@ -166,9 +312,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
noticeFile = copySpec {
if (project.name == 'integ-test-zip') {
from buildCoreNotice
from buildServerNotice
} else {
from buildFullNotice
from buildDefaultNotice
}
}
}
@ -176,7 +322,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
}
task run(type: RunTask) {
distribution = 'zip'
distribution = System.getProperty('run.distribution', 'zip')
}
/**

View File

@ -131,17 +131,20 @@ subprojects {
}
}
String debDir = 'distribution/packages/deb'
String rpmDir = 'distribution/packages/rpm'
String zipDir = 'distribution/archives/zip'
if (bwcVersion.before('6.3.0')) {
debDir = 'distribution/deb'
rpmDir = 'distribution/rpm'
zipDir = 'distribution/zip'
List<File> artifactFiles = []
List<String> projectDirs = []
for (String project : ['zip', 'deb', 'rpm']) {
String baseDir = "distribution"
if (bwcVersion.onOrAfter('6.3.0')) {
baseDir += project == 'zip' ? '/archives' : '/packages'
// add oss variant first
projectDirs.add("${baseDir}/oss-${project}")
artifactFiles.add(file("${checkoutDir}/${baseDir}/oss-${project}/build/distributions/elasticsearch-oss-${bwcVersion}.${project}"))
}
projectDirs.add("${baseDir}/${project}")
artifactFiles.add(file("${checkoutDir}/${baseDir}/${project}/build/distributions/elasticsearch-${bwcVersion}.${project}"))
}
File bwcDeb = file("${checkoutDir}/${debDir}/build/distributions/elasticsearch-${bwcVersion}.deb")
File bwcRpm = file("${checkoutDir}/${rpmDir}/build/distributions/elasticsearch-${bwcVersion}.rpm")
File bwcZip = file("${checkoutDir}/${zipDir}/build/distributions/elasticsearch-${bwcVersion}.zip")
task buildBwcVersion(type: Exec) {
dependsOn checkoutBwcBranch, writeBuildMetadata
workingDir = checkoutDir
@ -159,7 +162,10 @@ subprojects {
} else {
executable new File(checkoutDir, 'gradlew').toString()
}
args ":${debDir.replace('/', ':')}:assemble", ":${rpmDir.replace('/', ':')}:assemble", ":${zipDir.replace('/', ':')}:assemble", "-Dbuild.snapshot=true"
for (String dir : projectDirs) {
args ":${dir.replace('/', ':')}:assemble"
}
args "-Dbuild.snapshot=true"
final LogLevel logLevel = gradle.startParameter.logLevel
if ([LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG].contains(logLevel)) {
args "--${logLevel.name().toLowerCase(Locale.ENGLISH)}"
@ -172,7 +178,7 @@ subprojects {
args "--full-stacktrace"
}
doLast {
List missing = [bwcDeb, bwcRpm, bwcZip].grep { file ->
List missing = artifactFiles.grep { file ->
false == file.exists()
}
if (false == missing.empty) {
@ -183,8 +189,10 @@ subprojects {
}
artifacts {
'default' file: bwcDeb, name: 'elasticsearch', type: 'deb', builtBy: buildBwcVersion
'default' file: bwcRpm, name: 'elasticsearch', type: 'rpm', builtBy: buildBwcVersion
'default' file: bwcZip, name: 'elasticsearch', type: 'zip', builtBy: buildBwcVersion
for (File artifactFile : artifactFiles) {
String artifactName = artifactFile.name.contains('oss') ? 'elasticsearch-oss' : 'elasticsearch'
String suffix = artifactFile.toString()[-3..-1]
'default' file: artifactFile, name: artifactName, type: suffix, builtBy: buildBwcVersion
}
}
}

View File

@ -54,19 +54,21 @@ buildscript {
}
}
void addProcessFilesTask(String type) {
String packagingFiles = "build/packaging/${type}"
void addProcessFilesTask(String type, boolean oss) {
String packagingFiles = "build/packaging/${ oss ? 'oss-' : ''}${type}"
task("process${type.capitalize()}Files", type: Copy) {
from 'src/common'
from "src/${type}"
task("process${oss ? 'Oss' : ''}${type.capitalize()}Files", type: Copy) {
into packagingFiles
into('config') {
from '../src/config'
with copySpec {
from 'src/common'
from "src/${type}"
MavenFilteringHack.filter(it, expansionsForDistribution(type, oss))
}
MavenFilteringHack.filter(it, expansionsForDistribution(type))
into('config') {
with configFiles(type, oss)
}
doLast {
// create empty dirs, we set the permissions when configuring the packages
@ -77,19 +79,24 @@ void addProcessFilesTask(String type) {
}
}
}
addProcessFilesTask('deb')
addProcessFilesTask('rpm')
addProcessFilesTask('deb', true)
addProcessFilesTask('deb', false)
addProcessFilesTask('rpm', true)
addProcessFilesTask('rpm', false)
// Common configuration that is package dependent. This can't go in ospackage
// since we have different templated files that need to be consumed, but the structure
// is the same
Closure commonPackageConfig(String type) {
Closure commonPackageConfig(String type, boolean oss) {
return {
dependsOn "process${oss ? 'Oss' : ''}${type.capitalize()}Files"
packageName "elasticsearch${oss ? '-oss' : ''}"
// Follow elasticsearch's file naming convention
archiveName "elasticsearch-${project.version}.${type}"
archiveName "${packageName}-${project.version}.${type}"
destinationDir = file("${type}/build/distributions")
String packagingFiles = "build/packaging/${type}"
String prefix = "${oss ? 'oss-' : ''}${type}"
destinationDir = file("${prefix}/build/distributions")
String packagingFiles = "build/packaging/${prefix}"
String scripts = "${packagingFiles}/scripts"
preInstall file("${scripts}/preinst")
@ -104,7 +111,7 @@ Closure commonPackageConfig(String type) {
// specify it again explicitly for copying common files
into('/usr/share/elasticsearch') {
into('bin') {
with binFiles(type)
with binFiles(type, oss)
}
with copySpec {
with commonFiles
@ -113,6 +120,21 @@ Closure commonPackageConfig(String type) {
exclude 'LICENSE.txt'
}
}
into('modules') {
with copySpec {
with modulesFiles(oss)
// we need to specify every intermediate directory, but modules could have sub directories
// and there might not be any files as direct children of intermediates (eg platform)
// so we must iterate through the parents, but duplicate calls with the same path
// are ok (they don't show up in the built packages)
eachFile { FileCopyDetails fcp ->
String[] segments = fcp.relativePath.segments
for (int i = segments.length - 2; i > 0 && segments[i] != 'modules'; --i) {
directory('/' + segments[0..i].join('/'), 0755)
}
}
}
}
}
// ========= config files =========
@ -120,7 +142,7 @@ Closure commonPackageConfig(String type) {
configurationFile '/etc/elasticsearch/jvm.options'
configurationFile '/etc/elasticsearch/log4j2.properties'
into('/etc/elasticsearch') {
//dirMode 0750
dirMode 0750
fileMode 0660
permissionGroup 'elasticsearch'
includeEmptyDirs true
@ -183,7 +205,6 @@ apply plugin: 'nebula.ospackage-base'
// this is package indepdendent configuration
ospackage {
packageName 'elasticsearch'
maintainer 'Elasticsearch Team <info@elastic.co>'
summary '''
Elasticsearch is a distributed RESTful search engine built for the cloud.
@ -212,96 +233,74 @@ ospackage {
into '/usr/share/elasticsearch'
with libFiles
with modulesFiles
with noticeFile
}
task buildDeb(type: Deb) {
dependsOn processDebFiles
configure(commonPackageConfig('deb'))
Closure commonDebConfig(boolean oss) {
return {
configure(commonPackageConfig('deb', oss))
version = project.version
packageGroup 'web'
requires 'bash'
requires 'libc6'
requires 'adduser'
version = project.version
packageGroup 'web'
requires 'bash'
requires 'libc6'
requires 'adduser'
into('/usr/share/lintian/overrides') {
from('src/deb/lintian/elasticsearch')
}
into('/usr/share/doc/elasticsearch') {
from 'src/deb/copyright'
fileMode 0644
}
}
// task that sanity checks if the Deb archive can be extracted
task checkDeb(type: LoggedExec) {
dependsOn buildDeb
onlyIf { new File('/usr/bin/dpkg-deb').exists() || new File('/usr/local/bin/dpkg-deb').exists() }
final File debExtracted = new File("${buildDir}", 'deb-extracted')
commandLine 'dpkg-deb', '-x', "deb/build/distributions/elasticsearch-${project.version}.deb", debExtracted
doFirst {
debExtracted.deleteDir()
}
}
task buildRpm(type: Rpm) {
dependsOn processRpmFiles
configure(commonPackageConfig('rpm'))
packageGroup 'Application/Internet'
requires '/bin/bash'
prefix '/usr'
packager 'Elasticsearch'
version = project.version.replace('-', '_')
release = '1'
arch 'NOARCH'
os 'LINUX'
license '2009'
distribution 'Elasticsearch'
vendor 'Elasticsearch'
// TODO ospackage doesn't support icon but we used to have one
// without this the rpm will have parent dirs of any files we copy in, eg /etc/elasticsearch
addParentDirs false
// Declare the folders so that the RPM package manager removes
// them when upgrading or removing the package
directory('/usr/share/elasticsearch/bin', 0755)
directory('/usr/share/elasticsearch/lib', 0755)
directory('/usr/share/elasticsearch/modules', 0755)
modulesFiles.eachFile { FileCopyDetails fcp ->
if (fcp.name == "plugin-descriptor.properties") {
directory('/usr/share/elasticsearch/modules/' + fcp.file.parentFile.name, 0755)
into('/usr/share/lintian/overrides') {
from('src/deb/lintian/elasticsearch')
}
into('/usr/share/doc/elasticsearch') {
from 'src/deb/copyright'
fileMode 0644
}
}
}
// task that sanity checks if the RPM archive can be extracted
task checkRpm(type: LoggedExec) {
dependsOn buildRpm
onlyIf { new File('/bin/rpm').exists() || new File('/usr/bin/rpm').exists() || new File('/usr/local/bin/rpm').exists() }
final File rpmDatabase = new File("${buildDir}", 'rpm-database')
final File rpmExtracted = new File("${buildDir}", 'rpm-extracted')
commandLine 'rpm',
'--badreloc',
'--nodeps',
'--noscripts',
'--notriggers',
'--dbpath',
rpmDatabase,
'--relocate',
"/=${rpmExtracted}",
'-i',
"rpm/build/distributions/elasticsearch-${project.version}.rpm"
doFirst {
rpmDatabase.deleteDir()
rpmExtracted.deleteDir()
task buildDeb(type: Deb) {
configure(commonDebConfig(false))
}
task buildOssDeb(type: Deb) {
configure(commonDebConfig(true))
}
Closure commonRpmConfig(boolean oss) {
return {
configure(commonPackageConfig('rpm', oss))
packageGroup 'Application/Internet'
requires '/bin/bash'
prefix '/usr'
packager 'Elasticsearch'
version = project.version.replace('-', '_')
release = '1'
arch 'NOARCH'
os 'LINUX'
license '2009'
distribution 'Elasticsearch'
vendor 'Elasticsearch'
// TODO ospackage doesn't support icon but we used to have one
// without this the rpm will have parent dirs of any files we copy in, eg /etc/elasticsearch
addParentDirs false
// Declare the folders so that the RPM package manager removes
// them when upgrading or removing the package
directory('/usr/share/elasticsearch/bin', 0755)
directory('/usr/share/elasticsearch/lib', 0755)
directory('/usr/share/elasticsearch/modules', 0755)
}
}
task buildRpm(type: Rpm) {
configure(commonRpmConfig(false))
}
task buildOssRpm(type: Rpm) {
configure(commonRpmConfig(true))
}
// This configures the default artifact for the distribution specific
// subprojects. We have subprojects because Gradle project substitutions
// can only bind to the default configuration of a project
@ -313,7 +312,39 @@ subprojects {
artifacts {
'default' buildDist
}
// sanity checks if a archives can be extracted
File extractionDir = new File(buildDir, 'extracted')
task testExtraction(type: LoggedExec) {
dependsOn buildDist
doFirst {
project.delete(extractionDir)
extractionDir.mkdirs()
}
}
if (project.name.contains('deb')) {
testExtraction {
onlyIf { new File('/bin/dpkg-deb').exists() || new File('/usr/bin/dpkg-deb').exists() || new File('/usr/local/bin/dpkg-deb').exists() }
Closure debFilter = { f -> f.name.endsWith('.deb') }
commandLine 'dpkg-deb', '-x', "${-> buildDist.outputs.files.filter(debFilter).singleFile}", extractionDir
}
} else { // rpm
testExtraction {
onlyIf { new File('/bin/rpm').exists() || new File('/usr/bin/rpm').exists() || new File('/usr/local/bin/rpm').exists() }
final File rpmDatabase = new File(extractionDir, 'rpm-database')
final File rpmExtracted = new File(extractionDir, 'rpm-extracted')
commandLine 'rpm',
'--badreloc',
'--nodeps',
'--noscripts',
'--notriggers',
'--dbpath',
rpmDatabase,
'--relocate',
"/=${rpmExtracted}",
'-i',
"${-> buildDist.outputs.files.singleFile}"
}
}
check.dependsOn testExtraction
}
check.dependsOn checkDeb, checkRpm

View File

@ -0,0 +1,2 @@
// This file is intentionally blank. All configuration of the
// distribution is done in the parent project.

View File

@ -0,0 +1,2 @@
// This file is intentionally blank. All configuration of the
// distribution is done in the parent project.

View File

@ -152,7 +152,6 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
plugins.add(line.trim());
line = reader.readLine();
}
plugins.add("x-pack");
OFFICIAL_PLUGINS = Collections.unmodifiableSet(plugins);
} catch (IOException e) {
throw new RuntimeException(e);

View File

@ -479,6 +479,15 @@ public class InstallPluginCommandTests extends ESTestCase {
assertInstallCleaned(env.v2());
}
public void testBuiltinXpackModule() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("x-pack", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("is a system module"));
assertInstallCleaned(env.v2());
}
public void testJarHell() throws Exception {
// jar hell test needs a real filesystem
assumeTrue("real filesystem", isReal);
@ -881,23 +890,10 @@ public class InstallPluginCommandTests extends ESTestCase {
}
}
public void testOfficialPluginsIncludesXpack() throws Exception {
MockTerminal terminal = new MockTerminal();
new InstallPluginCommand() {
@Override
protected boolean addShutdownHook() {
return false;
}
}.main(new String[] { "--help" }, terminal);
assertTrue(terminal.getOutput(), terminal.getOutput().contains("x-pack"));
}
public void testInstallMisspelledOfficialPlugins() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
UserException e = expectThrows(UserException.class, () -> installPlugin("xpack", env.v1()));
assertThat(e.getMessage(), containsString("Unknown plugin xpack, did you mean [x-pack]?"));
e = expectThrows(UserException.class, () -> installPlugin("analysis-smartnc", env.v1()));
UserException e = expectThrows(UserException.class, () -> installPlugin("analysis-smartnc", env.v1()));
assertThat(e.getMessage(), containsString("Unknown plugin analysis-smartnc, did you mean [analysis-smartcn]?"));
e = expectThrows(UserException.class, () -> installPlugin("repository", env.v1()));

View File

@ -20,6 +20,7 @@
apply plugin: 'elasticsearch.docs-test'
integTestCluster {
distribution = 'oss-zip'
/* Enable regexes in painless so our tests don't complain about example
* snippets that use them. */
setting 'script.painless.regex.enabled', 'true'

View File

@ -4,7 +4,7 @@
:include-xpack: true
:es-test-dir: {docdir}/../src/test
:plugins-examples-dir: {docdir}/../../plugins/examples
:xes-repo-dir: {docdir}/../../../elasticsearch-extra/x-pack-elasticsearch/docs/{lang}
:xes-repo-dir: {docdir}/../../x-pack/docs/{lang}
:es-repo-dir: {docdir}

View File

@ -38,6 +38,7 @@ task setupSeedNodeAndUnicastHostsFile(type: DefaultTask) {
// setup the initial cluster with one node that will serve as the seed node
// for unicast discovery
ClusterConfiguration config = new ClusterConfiguration(project)
config.distribution = 'integ-test-zip'
config.clusterName = 'discovery-file-test-cluster'
List<NodeInfo> nodes = ClusterFormationTasks.setup(project, 'initialCluster', setupSeedNodeAndUnicastHostsFile, config)
File srcUnicastHostsFile = file('build/cluster/unicast_hosts.txt')

View File

@ -153,6 +153,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
project.afterEvaluate {
for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
ClusterConfiguration cluster = project.extensions.getByName("${integTestTaskName}Cluster") as ClusterConfiguration
cluster.distribution = 'integ-test-zip'
cluster.dependsOn(project.bundlePlugin)
Task restIntegTestTask = project.tasks.getByName(integTestTaskName)

View File

@ -0,0 +1,10 @@
import org.elasticsearch.gradle.test.RestIntegTestTask
subprojects { Project subproj ->
subproj.tasks.withType(RestIntegTestTask) {
subproj.extensions.configure("${it.name}Cluster") { cluster ->
cluster.distribution = 'oss-zip'
}
}
}

View File

@ -91,7 +91,7 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase {
"has.native.controller", "false");
try (Spawner spawner = new Spawner()) {
spawner.spawnNativePluginControllers(environment);
spawner.spawnNativeControllers(environment);
assertThat(spawner.getProcesses(), hasSize(0));
}
}
@ -149,7 +149,7 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase {
"has.native.controller", "false");
Spawner spawner = new Spawner();
spawner.spawnNativePluginControllers(environment);
spawner.spawnNativeControllers(environment);
List<Process> processes = spawner.getProcesses();
/*
@ -228,7 +228,7 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase {
"has.native.controller", "false");
Spawner spawner = new Spawner();
spawner.spawnNativePluginControllers(environment);
spawner.spawnNativeControllers(environment);
List<Process> processes = spawner.getProcesses();
/*
@ -277,7 +277,7 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase {
Spawner spawner = new Spawner();
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> spawner.spawnNativePluginControllers(environment));
() -> spawner.spawnNativeControllers(environment));
assertThat(
e.getMessage(),
equalTo("plugin [test_plugin] does not have permission to fork native controller"));
@ -298,11 +298,11 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase {
final Spawner spawner = new Spawner();
if (Constants.MAC_OS_X) {
// if the spawner were not skipping the Desktop Services Store files on macOS this would explode
spawner.spawnNativePluginControllers(environment);
spawner.spawnNativeControllers(environment);
} else {
// we do not ignore these files on non-macOS systems
final FileSystemException e =
expectThrows(FileSystemException.class, () -> spawner.spawnNativePluginControllers(environment));
expectThrows(FileSystemException.class, () -> spawner.spawnNativeControllers(environment));
if (Constants.WINDOWS) {
assertThat(e, instanceOf(NoSuchFileException.class));
} else {

View File

@ -46,7 +46,7 @@ setup() {
}
@test "[DEB] package depends on bash" {
dpkg -I elasticsearch-$(cat version).deb | grep "Depends:.*bash.*"
dpkg -I elasticsearch-oss-$(cat version).deb | grep "Depends:.*bash.*"
}
##################################
@ -58,21 +58,21 @@ setup() {
}
@test "[DEB] package is available" {
count=$(ls elasticsearch-$(cat version).deb | wc -l)
count=$(ls elasticsearch-oss-$(cat version).deb | wc -l)
[ "$count" -eq 1 ]
}
@test "[DEB] package is not installed" {
run dpkg -s 'elasticsearch'
run dpkg -s 'elasticsearch-oss'
[ "$status" -eq 1 ]
}
@test "[DEB] install package" {
dpkg -i elasticsearch-$(cat version).deb
dpkg -i elasticsearch-oss-$(cat version).deb
}
@test "[DEB] package is installed" {
dpkg -s 'elasticsearch'
dpkg -s 'elasticsearch-oss'
}
@test "[DEB] verify package installation" {
@ -109,11 +109,11 @@ setup() {
# Uninstall DEB package
##################################
@test "[DEB] remove package" {
dpkg -r 'elasticsearch'
dpkg -r 'elasticsearch-oss'
}
@test "[DEB] package has been removed" {
run dpkg -s 'elasticsearch'
run dpkg -s 'elasticsearch-oss'
[ "$status" -eq 0 ]
echo "$output" | grep -i "status" | grep -i "deinstall ok"
}
@ -167,7 +167,7 @@ setup() {
@test "[DEB] purge package" {
# User installed scripts aren't removed so we'll just get them ourselves
rm -rf $ESSCRIPTS
dpkg --purge 'elasticsearch'
dpkg --purge 'elasticsearch-oss'
}
@test "[DEB] verify package purge" {
@ -191,16 +191,16 @@ setup() {
}
@test "[DEB] package has been completly removed" {
run dpkg -s 'elasticsearch'
run dpkg -s 'elasticsearch-oss'
[ "$status" -eq 1 ]
}
@test "[DEB] reinstall package" {
dpkg -i elasticsearch-$(cat version).deb
dpkg -i elasticsearch-oss-$(cat version).deb
}
@test "[DEB] package is installed by reinstall" {
dpkg -s 'elasticsearch'
dpkg -s 'elasticsearch-oss'
}
@test "[DEB] verify package reinstallation" {
@ -208,10 +208,10 @@ setup() {
}
@test "[DEB] repurge package" {
dpkg --purge 'elasticsearch'
dpkg --purge 'elasticsearch-oss'
}
@test "[DEB] package has been completly removed again" {
run dpkg -s 'elasticsearch'
run dpkg -s 'elasticsearch-oss'
[ "$status" -eq 1 ]
}

View File

@ -45,7 +45,7 @@ setup() {
}
@test "[RPM] package depends on bash" {
rpm -qpR elasticsearch-$(cat version).rpm | grep '/bin/bash'
rpm -qpR elasticsearch-oss-$(cat version).rpm | grep '/bin/bash'
}
##################################
@ -57,21 +57,21 @@ setup() {
}
@test "[RPM] package is available" {
count=$(ls elasticsearch-$(cat version).rpm | wc -l)
count=$(ls elasticsearch-oss-$(cat version).rpm | wc -l)
[ "$count" -eq 1 ]
}
@test "[RPM] package is not installed" {
run rpm -qe 'elasticsearch'
run rpm -qe 'elasticsearch-oss'
[ "$status" -eq 1 ]
}
@test "[RPM] install package" {
rpm -i elasticsearch-$(cat version).rpm
rpm -i elasticsearch-oss-$(cat version).rpm
}
@test "[RPM] package is installed" {
rpm -qe 'elasticsearch'
rpm -qe 'elasticsearch-oss'
}
@test "[RPM] verify package installation" {
@ -103,11 +103,11 @@ setup() {
@test "[RPM] remove package" {
# User installed scripts aren't removed so we'll just get them ourselves
rm -rf $ESSCRIPTS
rpm -e 'elasticsearch'
rpm -e 'elasticsearch-oss'
}
@test "[RPM] package has been removed" {
run rpm -qe 'elasticsearch'
run rpm -qe 'elasticsearch-oss'
[ "$status" -eq 1 ]
}
@ -143,11 +143,11 @@ setup() {
}
@test "[RPM] reinstall package" {
rpm -i elasticsearch-$(cat version).rpm
rpm -i elasticsearch-oss-$(cat version).rpm
}
@test "[RPM] package is installed by reinstall" {
rpm -qe 'elasticsearch'
rpm -qe 'elasticsearch-oss'
}
@test "[RPM] verify package reinstallation" {
@ -159,7 +159,7 @@ setup() {
echo "# ping" >> "/etc/elasticsearch/elasticsearch.yml"
echo "# ping" >> "/etc/elasticsearch/jvm.options"
echo "# ping" >> "/etc/elasticsearch/log4j2.properties"
rpm -e 'elasticsearch'
rpm -e 'elasticsearch-oss'
}
@test "[RPM] verify preservation" {
@ -202,6 +202,6 @@ setup() {
}
@test "[RPM] package has been removed again" {
run rpm -qe 'elasticsearch'
run rpm -qe 'elasticsearch-oss'
[ "$status" -eq 1 ]
}

View File

@ -47,6 +47,12 @@ setup() {
if [ "$(cat upgrade_from_version)" == "$(cat version)" ]; then
sameVersion="true"
fi
# TODO: this needs to conditionally change based on version > 6.3.0
if [ -f upgrade_is_oss ]; then
export PACKAGE_NAME="elasticsearch-oss"
else
skip "upgrade cannot happen from pre 6.3.0 to elasticsearch-oss"
fi
}
@test "[UPGRADE] install old version" {

View File

@ -42,6 +42,7 @@ load $BATS_UTILS/packages.bash
# Cleans everything for the 1st execution
setup() {
skip_not_dpkg_or_rpm
export PACKAGE_NAME="elasticsearch-oss"
}
@test "[REINSTALL] install" {

View File

@ -46,6 +46,7 @@ export_elasticsearch_paths() {
if is_rpm; then
export ESENVFILE="/etc/sysconfig/elasticsearch"
fi
export PACKAGE_NAME=${PACKAGE_NAME:-"elasticsearch-oss"}
}
# Install the rpm or deb package.
@ -73,9 +74,9 @@ install_package() {
esac
done
if is_rpm; then
rpm $rpmCommand elasticsearch-$version.rpm
rpm $rpmCommand $PACKAGE_NAME-$version.rpm
elif is_dpkg; then
dpkg $dpkgCommand -i elasticsearch-$version.deb
dpkg $dpkgCommand -i $PACKAGE_NAME-$version.deb
else
skip "Only rpm or deb supported"
fi

View File

@ -288,20 +288,20 @@ clean_before_test() {
purge_elasticsearch() {
# Removes RPM package
if is_rpm; then
rpm --quiet -e elasticsearch > /dev/null 2>&1 || true
rpm --quiet -e $PACKAGE_NAME > /dev/null 2>&1 || true
fi
if [ -x "`which yum 2>/dev/null`" ]; then
yum remove -y elasticsearch > /dev/null 2>&1 || true
yum remove -y $PACKAGE_NAME > /dev/null 2>&1 || true
fi
# Removes DEB package
if is_dpkg; then
dpkg --purge elasticsearch > /dev/null 2>&1 || true
dpkg --purge $PACKAGE_NAME > /dev/null 2>&1 || true
fi
if [ -x "`which apt-get 2>/dev/null`" ]; then
apt-get --quiet --yes purge elasticsearch > /dev/null 2>&1 || true
apt-get --quiet --yes purge $PACKAGE_NAME > /dev/null 2>&1 || true
fi
}

View File

@ -163,6 +163,7 @@ forbiddenPatterns {
task generateModulesList {
List<String> modules = project(':modules').subprojects.collect { it.name }
modules.add('x-pack')
File modulesFile = new File(buildDir, 'generated-resources/modules.txt')
processResources.from(modulesFile)
inputs.property('modules', modules)

View File

@ -162,7 +162,7 @@ final class Bootstrap {
Settings settings = environment.settings();
try {
spawner.spawnNativePluginControllers(environment);
spawner.spawnNativeControllers(environment);
} catch (IOException e) {
throw new BootstrapException(e);
}

View File

@ -54,12 +54,12 @@ final class Spawner implements Closeable {
}
/**
* Spawns the native controllers for each plugin
* Spawns the native controllers for each plugin/module.
*
* @param environment the node environment
* @throws IOException if an I/O error occurs reading the plugins or spawning a native process
*/
void spawnNativePluginControllers(final Environment environment) throws IOException {
void spawnNativeControllers(final Environment environment) throws IOException {
if (!spawned.compareAndSet(false, true)) {
throw new IllegalStateException("native controllers already spawned");
}

View File

@ -16,9 +16,13 @@ List projects = [
'client:benchmark',
'benchmarks',
'distribution:archives:integ-test-zip',
'distribution:archives:oss-zip',
'distribution:archives:zip',
'distribution:archives:oss-tar',
'distribution:archives:tar',
'distribution:packages:oss-deb',
'distribution:packages:deb',
'distribution:packages:oss-rpm',
'distribution:packages:rpm',
'distribution:bwc:next-minor-snapshot',
'distribution:bwc:staged-minor-snapshot',

View File

@ -75,4 +75,6 @@ subprojects {
String snapshotProject = ":x-pack:plugin:bwc:${snapshotName}"
project(snapshotProject).ext.bwcVersion = snapshot
ext.projectSubstitutions["org.elasticsearch.plugin:x-pack:${snapshot}"] = snapshotProject
}
}
}

View File

@ -133,8 +133,12 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant ->
return tmpFile.exists()
}
// copy xpack rest api
File xpackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources')
project.copyRestSpec.from(xpackResources) {
include 'rest-api-spec/api/**'
}
integTestCluster {
plugin xpackProject('plugin').path
setting 'xpack.security.enabled', 'true'
setting 'xpack.security.authc.token.enabled', 'true'
// Disable monitoring exporters for the docs tests
@ -142,7 +146,7 @@ integTestCluster {
setting 'xpack.monitoring.exporters._local.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
setupCommand 'setupTestAdmin',
'bin/x-pack/users', 'useradd', 'test_admin', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'test_admin', '-p', 'x-pack-test-password', '-r', 'superuser'
waitCondition = waitWithAuth
}

View File

@ -105,7 +105,7 @@ integTestCluster {
keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass'
distribution = 'zip' // this is important since we use the reindex module in ML
setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
setupCommand 'setupTestUser', 'bin/users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
extraConfigFile nodeKeystore.name, nodeKeystore

View File

@ -4,4 +4,5 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
ES_CLASSPATH="$ES_CLASSPATH:$ES_HOME/plugins/x-pack/x-pack-core/*"
# include x-pack-core jars in classpath
ES_CLASSPATH="$ES_CLASSPATH:$ES_HOME/modules/x-pack/x-pack-core/*"

View File

@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core;
public final class XPackField {
// These should be moved back to XPackPlugin once its moved to common
public static final String NAME = "x-pack";
/** Name constant for the security feature. */
public static final String SECURITY = "security";
/** Name constant for the monitoring feature. */

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.core;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SetOnce;
import org.bouncycastle.operator.OperatorCreationException;
import org.elasticsearch.SpecialPermission;
@ -22,6 +23,8 @@ import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.multibindings.Multibinder;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Settings;
@ -51,6 +54,7 @@ import org.elasticsearch.xpack.core.ssl.SSLService;
import javax.security.auth.DestroyFailedException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.AccessController;
import java.security.GeneralSecurityException;
@ -63,6 +67,9 @@ import java.util.function.Supplier;
public class XPackPlugin extends XPackClientPlugin implements ScriptPlugin, ExtensiblePlugin {
private static Logger logger = ESLoggerFactory.getLogger(XPackPlugin.class);
private static DeprecationLogger deprecationLogger = new DeprecationLogger(logger);
// TODO: clean up this library to not ask for write access to all system properties!
static {
// invoke this clinit in unbound with permissions to access all system properties
@ -216,11 +223,15 @@ public class XPackPlugin extends XPackClientPlugin implements ScriptPlugin, Exte
}
public static Path resolveConfigFile(Environment env, String name) {
return env.configFile().resolve(XPackField.NAME).resolve(name);
Path config = env.configFile().resolve(name);
if (Files.exists(config) == false) {
Path legacyConfig = env.configFile().resolve("x-pack").resolve(name);
if (Files.exists(legacyConfig)) {
deprecationLogger.deprecated("Config file [" + name + "] is in a deprecated location. Move from " +
legacyConfig.toString() + " to " + config.toString());
return legacyConfig;
}
}
return config;
}
public static Path resolveXPackExtensionsFile(Environment env) {
return env.pluginsFile().resolve(XPackField.NAME).resolve("x-pack-security").resolve("extensions");
}
}

View File

@ -4,7 +4,7 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
source "`dirname "$0"`"/../elasticsearch-env
source "`dirname "$0"`"/elasticsearch-env
source "`dirname "$0"`"/x-pack-security-env

View File

@ -7,7 +7,7 @@ rem you may not use this file except in compliance with the Elastic License.
setlocal enabledelayedexpansion
setlocal enableextensions
call "%~dp0..\elasticsearch-env.bat" || exit /b 1
call "%~dp0elasticsearch-env.bat" || exit /b 1
call "%~dp0x-pack-security-env.bat" || exit /b 1

View File

@ -4,7 +4,7 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
source "`dirname "$0"`"/../elasticsearch-env
source "`dirname "$0"`"/elasticsearch-env
source "`dirname "$0"`"/x-pack-security-env

View File

@ -7,7 +7,7 @@ rem you may not use this file except in compliance with the Elastic License.
setlocal enabledelayedexpansion
setlocal enableextensions
call "%~dp0..\elasticsearch-env.bat" || exit /b 1
call "%~dp0elasticsearch-env.bat" || exit /b 1
call "%~dp0x-pack-security-env.bat" || exit /b 1

View File

@ -4,7 +4,7 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
source "`dirname "$0"`"/../elasticsearch-env
source "`dirname "$0"`"/elasticsearch-env
source "`dirname "$0"`"/x-pack-security-env

View File

@ -7,7 +7,7 @@ rem you may not use this file except in compliance with the Elastic License.
setlocal enabledelayedexpansion
setlocal enableextensions
call "%~dp0..\elasticsearch-env.bat" || exit /b 1
call "%~dp0elasticsearch-env.bat" || exit /b 1
call "%~dp0x-pack-security-env.bat" || exit /b 1

View File

@ -4,7 +4,7 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
source "`dirname "$0"`"/../elasticsearch-env
source "`dirname "$0"`"/elasticsearch-env
source "`dirname "$0"`"/x-pack-security-env

View File

@ -7,7 +7,7 @@ rem you may not use this file except in compliance with the Elastic License.
setlocal enabledelayedexpansion
setlocal enableextensions
call "%~dp0..\elasticsearch-env.bat" || exit /b 1
call "%~dp0elasticsearch-env.bat" || exit /b 1
call "%~dp0x-pack-security-env.bat" || exit /b 1

View File

@ -4,7 +4,7 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
source "`dirname "$0"`"/../elasticsearch-env
source "`dirname "$0"`"/elasticsearch-env
source "`dirname "$0"`"/x-pack-security-env

View File

@ -7,7 +7,7 @@ rem you may not use this file except in compliance with the Elastic License.
setlocal enabledelayedexpansion
setlocal enableextensions
call "%~dp0..\elasticsearch-env.bat" || exit /b 1
call "%~dp0elasticsearch-env.bat" || exit /b 1
call "%~dp0x-pack-security-env.bat" || exit /b 1

View File

@ -4,7 +4,7 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
source "`dirname "$0"`"/../elasticsearch-env
source "`dirname "$0"`"/elasticsearch-env
source "`dirname "$0"`"/x-pack-security-env

View File

@ -7,7 +7,7 @@ rem you may not use this file except in compliance with the Elastic License.
setlocal enabledelayedexpansion
setlocal enableextensions
call "%~dp0..\elasticsearch-env.bat" || exit /b 1
call "%~dp0elasticsearch-env.bat" || exit /b 1
call "%~dp0x-pack-security-env.bat" || exit /b 1

View File

@ -4,7 +4,7 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
source "`dirname "$0"`"/../elasticsearch-env
source "`dirname "$0"`"/elasticsearch-env
source "`dirname "$0"`"/x-pack-security-env

View File

@ -7,7 +7,7 @@ rem you may not use this file except in compliance with the Elastic License.
setlocal enabledelayedexpansion
setlocal enableextensions
call "%~dp0..\elasticsearch-env.bat" || exit /b 1
call "%~dp0elasticsearch-env.bat" || exit /b 1
call "%~dp0x-pack-security-env.bat" || exit /b 1

View File

@ -7,4 +7,4 @@
source "`dirname "$0"`"/x-pack-env
# include x-pack-security jars in classpath
ES_CLASSPATH="$ES_CLASSPATH:$ES_HOME/plugins/x-pack/x-pack-security/*"
ES_CLASSPATH="$ES_CLASSPATH:$ES_HOME/modules/x-pack/x-pack-security/*"

View File

@ -104,7 +104,8 @@ public class FileUserPasswdStore {
*/
static Map<String, char[]> parseFileLenient(Path path, Logger logger, Settings settings) {
try {
return parseFile(path, logger, settings);
Map<String, char[]> map = parseFile(path, logger, settings);
return map == null ? emptyMap() : map;
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
@ -114,8 +115,9 @@ public class FileUserPasswdStore {
}
/**
* parses the users file. Should never return {@code null}, if the file doesn't exist an
* empty map is returned
* Parses the users file.
*
* Returns {@code null}, if the {@code users} file does not exist.
*/
public static Map<String, char[]> parseFile(Path path, @Nullable Logger logger, Settings settings) {
if (logger == null) {
@ -123,8 +125,8 @@ public class FileUserPasswdStore {
}
logger.trace("reading users file [{}]...", path.toAbsolutePath());
if (!Files.exists(path)) {
return emptyMap();
if (Files.exists(path) == false) {
return null;
}
List<String> lines;

View File

@ -92,7 +92,8 @@ public class FileUserRolesStore {
*/
static Map<String, String[]> parseFileLenient(Path path, Logger logger) {
try {
return parseFile(path, logger);
Map<String, String[]> map = parseFile(path, logger);
return map == null ? emptyMap() : map;
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage("failed to parse users_roles file [{}]. skipping/removing all entries...",
@ -103,9 +104,10 @@ public class FileUserRolesStore {
}
/**
* parses the users_roles file. Should never return return {@code null}, if the file doesn't exist
* an empty map is returned. The read file holds a mapping per line of the form "role -&gt; users" while the returned
* map holds entries of the form "user -&gt; roles".
* Parses the users_roles file.
*
* Returns @{code null} if the {@code users_roles} file does not exist. The read file holds a mapping per
* line of the form "role -&gt; users" while the returned map holds entries of the form "user -&gt; roles".
*/
public static Map<String, String[]> parseFile(Path path, @Nullable Logger logger) {
if (logger == null) {
@ -113,9 +115,8 @@ public class FileUserRolesStore {
}
logger.trace("reading users_roles file [{}]...", path.toAbsolutePath());
if (!Files.exists(path)) {
return emptyMap();
if (Files.exists(path) == false) {
return null;
}
List<String> lines;

View File

@ -75,7 +75,7 @@ public class UsersTool extends LoggingAwareMultiCommand {
return new ListCommand();
}
static class AddUserCommand extends XPackConfigurationAwareCommand {
static class AddUserCommand extends EnvironmentAwareCommand {
private final OptionSpec<String> passwordOption;
private final OptionSpec<String> rolesOption;
@ -105,7 +105,7 @@ public class UsersTool extends LoggingAwareMultiCommand {
}
@Override
protected void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception {
protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception {
String username = parseUsername(arguments.values(options), env.settings());
final boolean allowReserved = XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(env.settings()) == false;
@ -121,11 +121,15 @@ public class UsersTool extends LoggingAwareMultiCommand {
Path rolesFile = FileUserRolesStore.resolveFile(env);
FileAttributesChecker attributesChecker = new FileAttributesChecker(passwordFile, rolesFile);
Map<String, char[]> users = new HashMap<>(FileUserPasswdStore.parseFile(passwordFile, null, env.settings()));
Map<String, char[]> users = FileUserPasswdStore.parseFile(passwordFile, null, env.settings());
if (users == null) {
throw new UserException(ExitCodes.CONFIG, "Configuration file [users] is missing");
}
if (users.containsKey(username)) {
throw new UserException(ExitCodes.CODE_ERROR, "User [" + username + "] already exists");
}
Hasher hasher = Hasher.BCRYPT;
users = new HashMap<>(users); // make modifiable
users.put(username, hasher.hash(new SecureString(password)));
FileUserPasswdStore.writeFile(users, passwordFile);
@ -139,7 +143,7 @@ public class UsersTool extends LoggingAwareMultiCommand {
}
}
static class DeleteUserCommand extends XPackConfigurationAwareCommand {
static class DeleteUserCommand extends EnvironmentAwareCommand {
private final OptionSpec<String> arguments;
@ -160,18 +164,22 @@ public class UsersTool extends LoggingAwareMultiCommand {
}
@Override
protected void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception {
protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception {
String username = parseUsername(arguments.values(options), env.settings());
Path passwordFile = FileUserPasswdStore.resolveFile(env);
Path rolesFile = FileUserRolesStore.resolveFile(env);
FileAttributesChecker attributesChecker = new FileAttributesChecker(passwordFile, rolesFile);
Map<String, char[]> users = new HashMap<>(FileUserPasswdStore.parseFile(passwordFile, null, env.settings()));
Map<String, char[]> users = FileUserPasswdStore.parseFile(passwordFile, null, env.settings());
if (users == null) {
throw new UserException(ExitCodes.CONFIG, "Configuration file [users] is missing");
}
if (users.containsKey(username) == false) {
throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
}
if (Files.exists(passwordFile)) {
users = new HashMap<>(users);
char[] passwd = users.remove(username);
if (passwd != null) {
FileUserPasswdStore.writeFile(users, passwordFile);
@ -190,7 +198,7 @@ public class UsersTool extends LoggingAwareMultiCommand {
}
}
static class PasswordCommand extends XPackConfigurationAwareCommand {
static class PasswordCommand extends EnvironmentAwareCommand {
private final OptionSpec<String> passwordOption;
private final OptionSpec<String> arguments;
@ -215,7 +223,7 @@ public class UsersTool extends LoggingAwareMultiCommand {
}
@Override
protected void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception {
protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception {
String username = parseUsername(arguments.values(options), env.settings());
char[] password = parsePassword(terminal, passwordOption.value(options));
@ -223,6 +231,9 @@ public class UsersTool extends LoggingAwareMultiCommand {
Path file = FileUserPasswdStore.resolveFile(env);
FileAttributesChecker attributesChecker = new FileAttributesChecker(file);
Map<String, char[]> users = new HashMap<>(FileUserPasswdStore.parseFile(file, null, env.settings()));
if (users == null) {
throw new UserException(ExitCodes.CONFIG, "Configuration file [users] is missing");
}
if (users.containsKey(username) == false) {
throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
}
@ -233,7 +244,7 @@ public class UsersTool extends LoggingAwareMultiCommand {
}
}
static class RolesCommand extends XPackConfigurationAwareCommand {
static class RolesCommand extends EnvironmentAwareCommand {
private final OptionSpec<String> addOption;
private final OptionSpec<String> removeOption;
@ -259,7 +270,7 @@ public class UsersTool extends LoggingAwareMultiCommand {
}
@Override
protected void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception {
protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception {
String username = parseUsername(arguments.values(options), env.settings());
String[] addRoles = parseRoles(terminal, env, addOption.value(options));
@ -303,7 +314,7 @@ public class UsersTool extends LoggingAwareMultiCommand {
}
}
static class ListCommand extends XPackConfigurationAwareCommand {
static class ListCommand extends EnvironmentAwareCommand {
private final OptionSpec<String> arguments;
@ -319,7 +330,7 @@ public class UsersTool extends LoggingAwareMultiCommand {
}
@Override
protected void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception {
protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception {
String username = null;
if (options.has(arguments)) {
@ -333,15 +344,24 @@ public class UsersTool extends LoggingAwareMultiCommand {
static void listUsersAndRoles(Terminal terminal, Environment env, String username) throws Exception {
Path userRolesFilePath = FileUserRolesStore.resolveFile(env);
Map<String, String[]> userRoles = FileUserRolesStore.parseFile(userRolesFilePath, null);
if (userRoles == null) {
throw new UserException(ExitCodes.CONFIG, "Configuration file [users_roles] is missing");
}
Path userFilePath = FileUserPasswdStore.resolveFile(env);
Set<String> users = FileUserPasswdStore.parseFile(userFilePath, null, env.settings()).keySet();
Map<String, char[]> users = FileUserPasswdStore.parseFile(userFilePath, null, env.settings());
if (users == null) {
throw new UserException(ExitCodes.CONFIG, "Configuration file [users] is missing");
}
Path rolesFilePath = FileRolesStore.resolveFile(env);
Set<String> knownRoles = Sets.union(FileRolesStore.parseFileForRoleNames(rolesFilePath, null), ReservedRolesStore.names());
if (knownRoles == null) {
throw new UserException(ExitCodes.CONFIG, "Configuration file [roles.xml] is missing");
}
if (username != null) {
if (!users.contains(username)) {
if (!users.containsKey(username)) {
throw new UserException(ExitCodes.NO_USER, "User [" + username + "] doesn't exist");
}
@ -373,7 +393,7 @@ public class UsersTool extends LoggingAwareMultiCommand {
usersExist = true;
}
// list users without roles
Set<String> usersWithoutRoles = Sets.newHashSet(users);
Set<String> usersWithoutRoles = Sets.newHashSet(users.keySet());
usersWithoutRoles.removeAll(userRoles.keySet());
for (String user : usersWithoutRoles) {
terminal.println(String.format(Locale.ROOT, "%-15s: -", user));
@ -480,35 +500,4 @@ public class UsersTool extends LoggingAwareMultiCommand {
return roles;
}
private abstract static class XPackConfigurationAwareCommand extends EnvironmentAwareCommand {
XPackConfigurationAwareCommand(final String description) {
super(description);
}
@Override
protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception {
checkConfigurationDir(env);
executeCommand(terminal, options, env);
}
/**
* Ensure the X-Pack configuration directory exists as a child of $ES_CONF_DIR or return a helpful error message.
*/
private void checkConfigurationDir(Environment env) throws Exception {
Path configDir = env.configFile().resolve(XPackField.NAME);
if (Files.exists(configDir) == false) {
throw new UserException(ExitCodes.CONFIG, String.format(Locale.ROOT,
"Directory %s does not exist. Please ensure " +
"that %s is the configuration directory for Elasticsearch and create directory %s/x-pack manually",
configDir.toString(),
configDir.getParent().toString(),
configDir.toString()));
}
}
protected abstract void executeCommand(Terminal terminal, OptionSet options, Environment env) throws Exception;
}
}

View File

@ -17,6 +17,7 @@ import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.XPackPlugin;
import javax.crypto.KeyGenerator;
import javax.crypto.SecretKey;
@ -68,7 +69,7 @@ public class SystemKeyTool extends EnvironmentAwareCommand {
}
keyPath = parsePath(args.get(0));
} else {
keyPath = env.configFile().resolve(XPackField.NAME).resolve("system_key");
keyPath = XPackPlugin.resolveConfigFile(env, "system_key");
}
// write the key

View File

@ -112,7 +112,7 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas
@Override
public Settings nodeSettings(int nodeOrdinal) {
final Path home = nodePath(nodeOrdinal);
final Path xpackConf = home.resolve("config").resolve(XPackField.NAME);
final Path xpackConf = home.resolve("config");
try {
Files.createDirectories(xpackConf);
} catch (IOException e) {

View File

@ -225,7 +225,7 @@ public class RealmSettingsTests extends ESTestCase {
private Settings.Builder configureSsl(String prefix, Settings.Builder builder, boolean useKeyStore, boolean useTrustStore) {
if (useKeyStore) {
builder.put(prefix + "keystore.path", "x-pack/ssl/" + randomAlphaOfLength(5) + ".jks");
builder.put(prefix + "keystore.path", "ssl/" + randomAlphaOfLength(5) + ".jks");
SecuritySettingsSource.addSecureSettings(builder, secureSettings -> {
secureSettings.setString(prefix + "keystore.secure_password", randomAlphaOfLength(8));
secureSettings.setString(prefix + "keystore.secure_key_password", randomAlphaOfLength(8));
@ -235,7 +235,7 @@ public class RealmSettingsTests extends ESTestCase {
SecuritySettingsSource.addSecureSettings(builder, secureSettings ->
secureSettings.setString(prefix + "secure_key_passphrase", randomAlphaOfLength(32)));
builder.put(prefix + "certificate", "x-pack/ssl/" + randomAlphaOfLength(5) + ".cert");
builder.put(prefix + "certificate", "ssl/" + randomAlphaOfLength(5) + ".cert");
}
if (useTrustStore) {
@ -243,7 +243,7 @@ public class RealmSettingsTests extends ESTestCase {
SecuritySettingsSource.addSecureSettings(builder, secureSettings ->
secureSettings.setString(prefix + "truststore.secure_password", randomAlphaOfLength(8)));
} else {
builder.put(prefix + "certificate_authorities", "x-pack/ssl/" + randomAlphaOfLength(8) + ".ca");
builder.put(prefix + "certificate_authorities", "ssl/" + randomAlphaOfLength(8) + ".ca");
}
builder.put(prefix + "verification_mode", "full");

View File

@ -118,7 +118,7 @@ public class ESNativeRealmMigrateToolTests extends CommandTestCase {
public void testMissingFiles() throws Exception {
Path homeDir = createTempDir();
Path confDir = homeDir.resolve("config");
Path xpackConfDir = confDir.resolve("x-pack");
Path xpackConfDir = confDir;
Files.createDirectories(xpackConfDir);
ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles();

View File

@ -15,7 +15,6 @@ import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.security.audit.logfile.CapturingLogger;
import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
import org.elasticsearch.xpack.core.security.authc.RealmConfig;
@ -43,6 +42,7 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
public class FileUserPasswdStoreTests extends ESTestCase {
@ -66,7 +66,7 @@ public class FileUserPasswdStoreTests extends ESTestCase {
}
public void testStore_ConfiguredWithUnreadableFile() throws Exception {
Path xpackConf = env.configFile().resolve(XPackField.NAME);
Path xpackConf = env.configFile();
Files.createDirectories(xpackConf);
Path file = xpackConf.resolve("users");
@ -82,7 +82,7 @@ public class FileUserPasswdStoreTests extends ESTestCase {
public void testStore_AutoReload() throws Exception {
Path users = getDataPath("users");
Path xpackConf = env.configFile().resolve(XPackField.NAME);
Path xpackConf = env.configFile();
Files.createDirectories(xpackConf);
Path file = xpackConf.resolve("users");
Files.copy(users, file, StandardCopyOption.REPLACE_EXISTING);
@ -119,7 +119,7 @@ public class FileUserPasswdStoreTests extends ESTestCase {
public void testStore_AutoReload_WithParseFailures() throws Exception {
Path users = getDataPath("users");
Path xpackConf = env.configFile().resolve(XPackField.NAME);
Path xpackConf = env.configFile();
Files.createDirectories(xpackConf);
Path testUsers = xpackConf.resolve("users");
Files.copy(users, testUsers, StandardCopyOption.REPLACE_EXISTING);
@ -184,6 +184,8 @@ public class FileUserPasswdStoreTests extends ESTestCase {
Path file = createTempDir().resolve(randomAlphaOfLength(10));
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
Map<String, char[]> users = FileUserPasswdStore.parseFile(file, logger, Settings.EMPTY);
assertThat(users, nullValue());
users = FileUserPasswdStore.parseFileLenient(file, logger, Settings.EMPTY);
assertThat(users, notNullValue());
assertThat(users.isEmpty(), is(true));
}

View File

@ -44,6 +44,7 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
public class FileUserRolesStoreTests extends ESTestCase {
@ -186,6 +187,8 @@ public class FileUserRolesStoreTests extends ESTestCase {
Path file = createTempDir().resolve(randomAlphaOfLength(10));
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
Map<String, String[]> usersRoles = FileUserRolesStore.parseFile(file, logger);
assertThat(usersRoles, nullValue());
usersRoles = FileUserRolesStore.parseFileLenient(file, logger);
assertThat(usersRoles, notNullValue());
assertThat(usersRoles.isEmpty(), is(true));
}
@ -270,7 +273,7 @@ public class FileUserRolesStoreTests extends ESTestCase {
}
private Path getUsersRolesPath() throws IOException {
Path xpackConf = env.configFile().resolve(XPackField.NAME);
Path xpackConf = env.configFile();
Files.createDirectories(xpackConf);
return xpackConf.resolve("users_roles");
}

View File

@ -302,7 +302,7 @@ public class FileRolesStoreTests extends ESTestCase {
try {
Path roles = getDataPath("roles.yml");
Path home = createTempDir();
Path xpackConf = home.resolve("config").resolve(XPackField.NAME);
Path xpackConf = home.resolve("config");
Files.createDirectories(xpackConf);
Path tmp = xpackConf.resolve("roles.yml");
try (OutputStream stream = Files.newOutputStream(tmp)) {
@ -430,7 +430,7 @@ public class FileRolesStoreTests extends ESTestCase {
public void testUsageStats() throws Exception {
Path roles = getDataPath("roles.yml");
Path home = createTempDir();
Path tmp = home.resolve("config/x-pack/roles.yml");
Path tmp = home.resolve("config/roles.yml");
Files.createDirectories(tmp.getParent());
try (OutputStream stream = Files.newOutputStream(tmp)) {
Files.copy(roles, stream);

View File

@ -95,7 +95,7 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase
public void testThatConnectionToServerTypeConnectionWorks() throws IOException, NodeValidationException {
Path home = createTempDir();
Path xpackConf = home.resolve("config").resolve(XPackField.NAME);
Path xpackConf = home.resolve("config");
Files.createDirectories(xpackConf);
Transport transport = internalCluster().getDataNodeInstance(Transport.class);
@ -127,7 +127,7 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase
public void testThatConnectionToClientTypeConnectionIsRejected() throws IOException, NodeValidationException, InterruptedException {
Path home = createTempDir();
Path xpackConf = home.resolve("config").resolve(XPackField.NAME);
Path xpackConf = home.resolve("config");
Files.createDirectories(xpackConf);
writeFile(xpackConf, "users", configUsers());
writeFile(xpackConf, "users_roles", configUsersRoles());

View File

@ -4,11 +4,11 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
source "`dirname "$0"`"/../elasticsearch-env
source "`dirname "$0"`"/elasticsearch-env
source "`dirname "$0"`"/x-pack-env
CLI_JAR=$(ls $ES_HOME/bin/x-pack/sql-cli-*.jar)
CLI_JAR=$(ls $ES_HOME/bin/sql-cli-*.jar)
exec \
"$JAVA" \

View File

@ -7,11 +7,11 @@ rem you may not use this file except in compliance with the Elastic License.
setlocal enabledelayedexpansion
setlocal enableextensions
call "%~dp0..\elasticsearch-env.bat" || exit /b 1
call "%~dp0elasticsearch-env.bat" || exit /b 1
call "%~dp0x-pack-env.bat" || exit /b 1
set CLI_JAR=%ES_HOME%/plugins/x-pack/bin/*
set CLI_JAR=%ES_HOME%/plugins/bin/*
%JAVA% ^
-cp "%CLI_JAR%" ^

View File

@ -8,14 +8,14 @@
- do:
nodes.info: {}
- match: { nodes.$master.plugins.0.name: x-pack-core }
- match: { nodes.$master.plugins.1.name: x-pack-deprecation }
- match: { nodes.$master.plugins.2.name: x-pack-graph }
- match: { nodes.$master.plugins.3.name: x-pack-logstash }
- match: { nodes.$master.plugins.4.name: x-pack-ml }
- match: { nodes.$master.plugins.5.name: x-pack-monitoring }
- match: { nodes.$master.plugins.6.name: x-pack-rollup }
- match: { nodes.$master.plugins.7.name: x-pack-security }
- match: { nodes.$master.plugins.8.name: x-pack-sql }
- match: { nodes.$master.plugins.9.name: x-pack-upgrade }
- match: { nodes.$master.plugins.10.name: x-pack-watcher }
- match: { nodes.$master.modules.13.name: x-pack-core }
- match: { nodes.$master.modules.14.name: x-pack-deprecation }
- match: { nodes.$master.modules.15.name: x-pack-graph }
- match: { nodes.$master.modules.16.name: x-pack-logstash }
- match: { nodes.$master.modules.17.name: x-pack-ml }
- match: { nodes.$master.modules.18.name: x-pack-monitoring }
- match: { nodes.$master.modules.19.name: x-pack-rollup }
- match: { nodes.$master.modules.20.name: x-pack-security }
- match: { nodes.$master.modules.21.name: x-pack-sql }
- match: { nodes.$master.modules.22.name: x-pack-upgrade }
- match: { nodes.$master.modules.23.name: x-pack-watcher }

View File

@ -4,7 +4,7 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
source "`dirname "$0"`"/../elasticsearch-env
source "`dirname "$0"`"/elasticsearch-env
source "`dirname "$0"`"/x-pack-watcher-env

View File

@ -7,7 +7,7 @@ rem you may not use this file except in compliance with the Elastic License.
setlocal enabledelayedexpansion
setlocal enableextensions
call "%~dp0..\elasticsearch-env.bat" || exit /b 1
call "%~dp0elasticsearch-env.bat" || exit /b 1
call "%~dp0x-pack-watcher-env.bat" || exit /b 1

View File

@ -7,4 +7,4 @@
source "`dirname "$0"`"/x-pack-env
# include x-pack-security jars in classpath
ES_CLASSPATH="$ES_CLASSPATH:$ES_HOME/plugins/x-pack/x-pack-watcher/*"
ES_CLASSPATH="$ES_CLASSPATH:$ES_HOME/modules/x-pack/x-pack-watcher/*"

View File

@ -4,4 +4,4 @@ rem you may not use this file except in compliance with the Elastic License.
call "%~dp0x-pack-env.bat" || exit /b 1
set ES_CLASSPATH=!ES_CLASSPATH!;!ES_HOME!/plugins/x-pack/x-pack-watcher/*
set ES_CLASSPATH=!ES_CLASSPATH!;!ES_HOME!/modules/x-pack/x-pack-watcher/*

View File

@ -9,6 +9,7 @@ import org.elasticsearch.bootstrap.BootstrapCheck;
import org.elasticsearch.bootstrap.BootstrapContext;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.XPackPlugin;
import org.elasticsearch.xpack.core.watcher.WatcherField;
import java.nio.file.Files;
@ -26,7 +27,7 @@ final class EncryptSensitiveDataBootstrapCheck implements BootstrapCheck {
public BootstrapCheckResult check(BootstrapContext context) {
if (Watcher.ENCRYPT_SENSITIVE_DATA_SETTING.get(context.settings)
&& WatcherField.ENCRYPTION_KEY_SETTING.exists(context.settings) == false) {
final Path systemKeyPath = environment.configFile().resolve(XPackField.NAME).resolve("system_key").toAbsolutePath();
final Path systemKeyPath = XPackPlugin.resolveConfigFile(environment, "system_key").toAbsolutePath();
final String message;
if (Files.exists(systemKeyPath)) {
message = "Encryption of sensitive data requires the key to be placed in the secure setting store. Run " +

View File

@ -17,7 +17,6 @@ project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps)
integTestCluster {
distribution 'zip'
plugin xpackProject('plugin').path
setting 'xpack.ml.enabled', 'false'
setting 'xpack.monitoring.enabled', 'false'
setting 'xpack.security.enabled', 'true'
@ -26,7 +25,7 @@ integTestCluster {
setting 'xpack.license.self_generated.type', 'trial'
setting 'logger.level', 'DEBUG'
setupCommand 'setupDummyUser',
'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",

View File

@ -1,3 +1,23 @@
// this file must exist so that qa projects are found
// by the elasticsearch x-plugins include mechanism
import org.elasticsearch.gradle.test.RestIntegTestTask
subprojects {
// HACK: please fix this
// we want to add the rest api specs for xpack to qa tests, but we
// need to wait until after the project is evaluated to only apply
// to those that rest tests. this used to be done automatically
// when xpack was a plugin, but now there is no place with xpack as a module.
// instead, we should package these and make them easy to use for rest tests,
// but currently, they must be copied into the resources of the test runner.
project.tasks.withType(RestIntegTestTask) {
File xpackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources')
project.copyRestSpec.from(xpackResources) {
include 'rest-api-spec/api/**'
}
}
}
/* Remove assemble on all qa projects because we don't need to publish
* artifacts for them. */

View File

@ -7,7 +7,7 @@ dependencies {
}
integTest {
includePackaged true
includePackaged = true
}
integTestRunner {
@ -22,14 +22,13 @@ integTestRunner {
}
integTestCluster {
plugin xpackProject('plugin').path
setting 'xpack.security.enabled', 'true'
setting 'xpack.watcher.enabled', 'false'
setting 'xpack.monitoring.enabled', 'false'
setting 'xpack.ml.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
setupCommand 'setupDummyUser',
'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",

View File

@ -140,12 +140,15 @@ subprojects {
Object extension = extensions.findByName("${baseName}#oldClusterTestCluster")
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
dependsOn copyTestNodeKeystore
plugin xpackProject('plugin').path
if (version.before('6.3.0')) {
plugin xpackProject('plugin').path
}
bwcVersion = version
numBwcNodes = 2
numNodes = 2
clusterName = 'full-cluster-restart'
setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/users'
setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
waitCondition = waitWithAuth
// some tests rely on the translog not being flushed
@ -189,12 +192,11 @@ subprojects {
dependsOn oldClusterTestRunner,
"${baseName}#oldClusterTestCluster#node0.stop",
"${baseName}#oldClusterTestCluster#node1.stop"
plugin xpackProject('plugin').path
numNodes = 2
clusterName = 'full-cluster-restart'
dataDir = { nodeNum -> oldClusterTest.nodes[nodeNum].dataDir }
cleanShared = false // We want to keep snapshots made by the old cluster!
setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
setupCommand 'setupTestUser', 'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
waitCondition = waitWithAuth
// debug logging for testRecovery see https://github.com/elastic/x-pack-elasticsearch/issues/2691

View File

@ -13,5 +13,4 @@ integTestCluster {
setting 'xpack.ml.enabled', 'true'
setting 'xpack.license.self_generated.type', 'trial'
numNodes = 3
plugin xpackProject('plugin').path
}

View File

@ -10,5 +10,4 @@ integTestCluster {
setting 'xpack.security.enabled', 'false'
setting 'xpack.ml.enabled', 'false'
numNodes = 1
plugin xpackProject('plugin').path
}

View File

@ -61,13 +61,12 @@ integTestCluster {
setting 'xpack.security.transport.ssl.verification_mode', 'certificate'
setting 'xpack.security.audit.enabled', 'true'
setting 'xpack.license.self_generated.type', 'trial'
plugin xpackProject('plugin').path
keystoreSetting 'bootstrap.password', 'x-pack-test-password'
keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass'
setupCommand 'setupDummyUser',
'bin/x-pack/users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
extraConfigFile nodeKeystore.name, nodeKeystore

View File

@ -9,5 +9,4 @@ dependencies {
integTestCluster {
setting 'xpack.security.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
plugin xpackProject('plugin').path
}

View File

@ -15,14 +15,13 @@ remoteClusterTestCluster {
numNodes = 2
clusterName = 'remote-cluster'
setting 'search.remote.connect', false
plugin xpackProject('plugin').path
setting 'xpack.security.enabled', 'true'
setting 'xpack.watcher.enabled', 'false'
setting 'xpack.monitoring.enabled', 'false'
setting 'xpack.ml.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
setupCommand 'setupDummyUser',
'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",
@ -43,14 +42,13 @@ task mixedClusterTest(type: RestIntegTestTask) {}
mixedClusterTestCluster {
dependsOn remoteClusterTestRunner
plugin xpackProject('plugin').path
setting 'xpack.security.enabled', 'true'
setting 'xpack.watcher.enabled', 'false'
setting 'xpack.monitoring.enabled', 'false'
setting 'xpack.ml.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
setupCommand 'setupDummyUser',
'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",

View File

@ -8,14 +8,13 @@ dependencies {
integTestCluster {
numNodes = 2
clusterName = 'multi-node'
plugin xpackProject('plugin').path
setting 'xpack.security.enabled', 'true'
setting 'xpack.watcher.enabled', 'false'
setting 'xpack.monitoring.enabled', 'false'
setting 'xpack.ml.enabled', 'false'
extraConfigFile 'x-pack/roles.yml', 'roles.yml'
setupCommand 'setup-test-user', 'bin/x-pack/users', 'useradd', 'test-user', '-p', 'x-pack-test-password', '-r', 'test'
setupCommand 'setup-super-user', 'bin/x-pack/users', 'useradd', 'super-user', '-p', 'x-pack-super-password', '-r', 'superuser'
extraConfigFile 'roles.yml', 'roles.yml'
setupCommand 'setup-test-user', 'bin/users', 'useradd', 'test-user', '-p', 'x-pack-test-password', '-r', 'test'
setupCommand 'setup-super-user', 'bin/users', 'useradd', 'super-user', '-p', 'x-pack-super-password', '-r', 'superuser'
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",

View File

@ -9,13 +9,12 @@ dependencies {
}
integTestCluster {
plugin xpackProject('plugin').path
// Whitelist reindexing from the local node so we can test it.
setting 'reindex.remote.whitelist', '127.0.0.1:*'
setting 'xpack.security.enabled', 'true'
setting 'xpack.ml.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
extraConfigFile 'x-pack/roles.yml', 'roles.yml'
extraConfigFile 'roles.yml', 'roles.yml'
[
test_admin: 'superuser',
powerful_user: 'superuser',
@ -26,7 +25,7 @@ integTestCluster {
can_not_see_hidden_fields_user: 'can_not_see_hidden_fields',
].each { String user, String role ->
setupCommand 'setupUser#' + user,
'bin/x-pack/users', 'useradd', user, '-p', 'x-pack-test-password', '-r', role
'bin/users', 'useradd', user, '-p', 'x-pack-test-password', '-r', role
}
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')

View File

@ -38,15 +38,8 @@ public class ReindexWithSecurityIT extends SecurityIntegTestCase {
*/
@Override
public void doAssertXPackIsInstalled() {
NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().clear().setPlugins(true).get();
for (NodeInfo nodeInfo : nodeInfos.getNodes()) {
// TODO: disable this assertion for now, due to random runs with mock plugins. perhaps run without mock plugins?
// assertThat(nodeInfo.getPlugins().getInfos(), hasSize(2));
Collection<String> pluginNames =
nodeInfo.getPlugins().getPluginInfos().stream().map(p -> p.getClassname()).collect(Collectors.toList());
assertThat("plugin [" + Security.class.getName() + "] not found in [" + pluginNames + "]", pluginNames,
hasItem(Security.class.getName()));
}
// this assertion doesn't make sense with a real distribution, since there is not currently a way
// from nodes info to see which modules are loaded
}
public void testDeleteByQuery() {

View File

@ -81,7 +81,9 @@ for (Version version : bwcVersions.wireCompatible) {
}
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
plugin xpackProject('plugin').path
if (version.before('6.3.0')) {
plugin xpackProject('plugin').path
}
bwcVersion = version
numBwcNodes = 2
numNodes = 2
@ -104,7 +106,6 @@ for (Version version : bwcVersions.wireCompatible) {
configure(extensions.findByName("${baseName}#mixedClusterTestCluster")) {
dependsOn oldClusterTestRunner, "${baseName}#oldClusterTestCluster#node1.stop"
plugin xpackProject('plugin').path
clusterName = 'rolling-upgrade-basic'
unicastTransportUri = { seedNode, node, ant -> oldClusterTest.nodes.get(0).transportUri() }
minimumMasterNodes = { 2 }
@ -128,7 +129,6 @@ for (Version version : bwcVersions.wireCompatible) {
configure(extensions.findByName("${baseName}#upgradedClusterTestCluster")) {
dependsOn(mixedClusterTestRunner, "${baseName}#oldClusterTestCluster#node0.stop")
plugin xpackProject('plugin').path
clusterName = 'rolling-upgrade-basic'
unicastTransportUri = { seedNode, node, ant -> mixedClusterTest.nodes.get(0).transportUri() }
minimumMasterNodes = { 2 }

View File

@ -122,8 +122,11 @@ subprojects {
Object extension = extensions.findByName("${baseName}#oldClusterTestCluster")
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
dependsOn copyTestNodeKeystore
plugin xpackProject('plugin').path
setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
if (version.before('6.3.0')) {
plugin xpackProject('plugin').path
}
String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/users'
setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
bwcVersion = version
numBwcNodes = 2
numNodes = 2
@ -152,7 +155,8 @@ subprojects {
if (version.onOrAfter('6.0.0')) {
keystoreFile 'xpack.watcher.encryption_key', "${mainProject.projectDir}/src/test/resources/system_key"
} else {
extraConfigFile 'x-pack/system_key', "${mainProject.projectDir}/src/test/resources/system_key"
String systemKeyFile = version.before('6.3.0') ? 'x-pack/system_key' : 'system_key'
extraConfigFile systemKeyFile, "${mainProject.projectDir}/src/test/resources/system_key"
}
setting 'xpack.watcher.encrypt_sensitive_data', 'true'
}
@ -167,8 +171,7 @@ subprojects {
configure(extensions.findByName("${baseName}#mixedClusterTestCluster")) {
dependsOn oldClusterTestRunner, "${baseName}#oldClusterTestCluster#node1.stop"
plugin xpackProject('plugin').path
setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
setupCommand 'setupTestUser', 'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
clusterName = 'rolling-upgrade'
unicastTransportUri = { seedNode, node, ant -> oldClusterTest.nodes.get(0).transportUri() }
minimumMasterNodes = { 2 }
@ -206,8 +209,7 @@ subprojects {
configure(extensions.findByName("${baseName}#upgradedClusterTestCluster")) {
dependsOn(mixedClusterTestRunner, "${baseName}#oldClusterTestCluster#node0.stop")
plugin xpackProject('plugin').path
setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
setupCommand 'setupTestUser', 'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
clusterName = 'rolling-upgrade'
unicastTransportUri = { seedNode, node, ant -> mixedClusterTest.nodes.get(0).transportUri() }
minimumMasterNodes = { 2 }

View File

@ -30,8 +30,6 @@ if (project.rootProject.vagrantSupported) {
}
integTestCluster {
plugin xpackProject('plugin').path
setting 'xpack.license.self_generated.type', 'trial'
setting 'xpack.security.enabled', 'true'
setting 'xpack.security.http.ssl.enabled', 'false'
@ -54,7 +52,7 @@ integTestCluster {
extraConfigFile 'idp-metadata.xml', idpFixtureProject.file("src/main/resources/provision/generated/idp-metadata.xml")
setupCommand 'setupTestAdmin',
'bin/x-pack/users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser"
'bin/users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser"
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')

View File

@ -19,14 +19,13 @@ integTestRunner {
}
integTestCluster {
plugin xpackProject('plugin').path
setting 'xpack.security.enabled', 'true'
setting 'xpack.ml.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
setupCommand 'setupDummyUser',
'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
setupCommand 'setupTransportClientUser',
'bin/x-pack/users', 'useradd', 'transport', '-p', 'x-pack-test-password', '-r', 'transport_client'
'bin/users', 'useradd', 'transport', '-p', 'x-pack-test-password', '-r', 'transport_client'
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",

View File

@ -19,7 +19,6 @@ integTestRunner {
integTestCluster {
dependsOn buildZip
plugin xpackProject('plugin').path
setting 'xpack.security.authc.realms.custom.order', '0'
setting 'xpack.security.authc.realms.custom.type', 'custom'
setting 'xpack.security.authc.realms.custom.filtered_setting', 'should be filtered'
@ -38,7 +37,7 @@ integTestCluster {
distribution = 'zip'
setupCommand 'setupDummyUser',
'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",

View File

@ -8,10 +8,9 @@ dependencies {
}
integTestCluster {
plugin xpackProject('plugin').path
setting 'xpack.security.enabled', 'true'
setting 'xpack.license.self_generated.type', 'trial'
extraConfigFile 'x-pack/roles.yml', 'roles.yml'
extraConfigFile 'roles.yml', 'roles.yml'
[
test_admin: 'superuser',
transport_user: 'superuser',
@ -19,7 +18,7 @@ integTestCluster {
bob: 'actual_role'
].each { String user, String role ->
setupCommand 'setupUser#' + user,
'bin/x-pack/users', 'useradd', user, '-p', 'x-pack-test-password', '-r', role
'bin/users', 'useradd', user, '-p', 'x-pack-test-password', '-r', role
}
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')

View File

@ -12,9 +12,8 @@ integTestRunner {
}
integTestCluster {
plugin xpackProject('plugin').path
setupCommand 'setupTestAdmin',
'bin/x-pack/users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser"
'bin/users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser"
setting 'xpack.security.enabled', 'true'
setting 'xpack.license.self_generated.type', 'trial'
waitCondition = { node, ant ->

View File

@ -67,7 +67,7 @@ public class UsersToolTests extends CommandTestCase {
public void setupHome() throws IOException {
Path homeDir = jimfs.getPath("eshome");
IOUtils.rm(homeDir);
confDir = homeDir.resolve("config").resolve(XPackField.NAME);
confDir = homeDir.resolve("config");
Files.createDirectories(confDir);
String defaultPassword = SecuritySettingsSourceField.TEST_PASSWORD;
Files.write(confDir.resolve("users"), Arrays.asList(
@ -112,7 +112,7 @@ public class UsersToolTests extends CommandTestCase {
return new AddUserCommand() {
@Override
protected Environment createEnv(Map<String, String> settings) throws UserException {
return new Environment(UsersToolTests.this.settings, confDir.getParent());
return new Environment(UsersToolTests.this.settings, confDir);
}
};
}
@ -122,7 +122,7 @@ public class UsersToolTests extends CommandTestCase {
return new DeleteUserCommand() {
@Override
protected Environment createEnv(Map<String, String> settings) throws UserException {
return new Environment(UsersToolTests.this.settings, confDir.getParent());
return new Environment(UsersToolTests.this.settings, confDir);
}
};
}
@ -132,7 +132,7 @@ public class UsersToolTests extends CommandTestCase {
return new PasswordCommand() {
@Override
protected Environment createEnv(Map<String, String> settings) throws UserException {
return new Environment(UsersToolTests.this.settings, confDir.getParent());
return new Environment(UsersToolTests.this.settings, confDir);
}
};
}
@ -142,7 +142,7 @@ public class UsersToolTests extends CommandTestCase {
return new RolesCommand() {
@Override
protected Environment createEnv(Map<String, String> settings) throws UserException {
return new Environment(UsersToolTests.this.settings, confDir.getParent());
return new Environment(UsersToolTests.this.settings, confDir);
}
};
}
@ -152,7 +152,7 @@ public class UsersToolTests extends CommandTestCase {
return new ListCommand() {
@Override
protected Environment createEnv(Map<String, String> settings) throws UserException {
return new Environment(UsersToolTests.this.settings, confDir.getParent());
return new Environment(UsersToolTests.this.settings, confDir);
}
};
}
@ -492,53 +492,49 @@ public class UsersToolTests extends CommandTestCase {
public void testUserAddNoConfig() throws Exception {
Path homeDir = jimfs.getPath("eshome");
Path xpackConfDir = homeDir.resolve("config").resolve(XPackField.NAME);
IOUtils.rm(confDir);
IOUtils.rm(confDir.resolve("users"));
pathHomeParameter = "-Epath.home=" + homeDir;
fileTypeParameter = "-Expack.security.authc.realms.file.type=file";
UserException e = expectThrows(UserException.class, () -> {
execute("useradd", pathHomeParameter, fileTypeParameter, "username", "-p", SecuritySettingsSourceField.TEST_PASSWORD);
});
assertEquals(ExitCodes.CONFIG, e.exitCode);
assertThat(e.getMessage(), containsString("is the configuration directory for Elasticsearch and create directory"));
assertThat(e.getMessage(), containsString("Configuration file [users] is missing"));
}
public void testUserListNoConfig() throws Exception {
Path homeDir = jimfs.getPath("eshome");
Path xpackConfDir = homeDir.resolve("config").resolve(XPackField.NAME);
IOUtils.rm(confDir);
IOUtils.rm(confDir.resolve("users"));
pathHomeParameter = "-Epath.home=" + homeDir;
fileTypeParameter = "-Expack.security.authc.realms.file.type=file";
UserException e = expectThrows(UserException.class, () -> {
execute("list", pathHomeParameter, fileTypeParameter);
});
assertEquals(ExitCodes.CONFIG, e.exitCode);
assertThat(e.getMessage(), containsString("is the configuration directory for Elasticsearch and create directory"));
assertThat(e.getMessage(), containsString("Configuration file [users] is missing"));
}
public void testUserDelNoConfig() throws Exception {
Path homeDir = jimfs.getPath("eshome");
Path xpackConfDir = homeDir.resolve("config").resolve(XPackField.NAME);
IOUtils.rm(confDir);
IOUtils.rm(confDir.resolve("users"));
pathHomeParameter = "-Epath.home=" + homeDir;
fileTypeParameter = "-Expack.security.authc.realms.file.type=file";
UserException e = expectThrows(UserException.class, () -> {
execute("userdel", pathHomeParameter, fileTypeParameter, "username");
});
assertEquals(ExitCodes.CONFIG, e.exitCode);
assertThat(e.getMessage(), containsString("is the configuration directory for Elasticsearch and create directory"));
assertThat(e.getMessage(), containsString("Configuration file [users] is missing"));
}
public void testListUserRolesNoConfig() throws Exception {
Path homeDir = jimfs.getPath("eshome");
Path xpackConfDir = homeDir.resolve("config").resolve(XPackField.NAME);
IOUtils.rm(confDir);
IOUtils.rm(confDir.resolve("users_roles"));
pathHomeParameter = "-Epath.home=" + homeDir;
fileTypeParameter = "-Expack.security.authc.realms.file.type=file";
UserException e = expectThrows(UserException.class, () -> {
execute("roles", pathHomeParameter, fileTypeParameter, "username");
});
assertEquals(ExitCodes.CONFIG, e.exitCode);
assertThat(e.getMessage(), containsString("is the configuration directory for Elasticsearch and create directory"));
assertThat(e.getMessage(), containsString("Configuration file [users_roles] is missing"));
}
}

View File

@ -77,7 +77,7 @@ public class SystemKeyToolTests extends CommandTestCase {
public void testGeneratePathInSettings() throws Exception {
final Path homeDir = initFileSystem(false);
Path xpackConf = homeDir.resolve("config").resolve(XPackField.NAME);
Path xpackConf = homeDir.resolve("config");
Files.createDirectories(xpackConf);
execute("-Epath.home=" + homeDir.toString());
byte[] bytes = Files.readAllBytes(xpackConf.resolve("system_key"));
@ -86,7 +86,7 @@ public class SystemKeyToolTests extends CommandTestCase {
public void testGenerateDefaultPath() throws Exception {
final Path homeDir = initFileSystem(false);
Path keyPath = homeDir.resolve("config/x-pack/system_key");
Path keyPath = homeDir.resolve("config/system_key");
Files.createDirectories(keyPath.getParent());
execute("-Epath.home=" + homeDir.toString());
byte[] bytes = Files.readAllBytes(keyPath);

View File

@ -14,14 +14,13 @@ task copyGraphRestTests(type: Copy) {
integTestCluster {
dependsOn copyGraphRestTests
plugin xpackProject('plugin').path
extraConfigFile 'x-pack/roles.yml', 'roles.yml'
extraConfigFile 'roles.yml', 'roles.yml'
setupCommand 'setupTestAdminUser',
'bin/x-pack/users', 'useradd', 'test_admin', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'test_admin', '-p', 'x-pack-test-password', '-r', 'superuser'
setupCommand 'setupGraphExplorerUser',
'bin/x-pack/users', 'useradd', 'graph_explorer', '-p', 'x-pack-test-password', '-r', 'graph_explorer'
'bin/users', 'useradd', 'graph_explorer', '-p', 'x-pack-test-password', '-r', 'graph_explorer'
setupCommand 'setupPowerlessUser',
'bin/x-pack/users', 'useradd', 'no_graph_explorer', '-p', 'x-pack-test-password', '-r', 'no_graph_explorer'
'bin/users', 'useradd', 'no_graph_explorer', '-p', 'x-pack-test-password', '-r', 'no_graph_explorer'
setting 'xpack.license.self_generated.type', 'trial'
setting 'xpack.security.enabled', 'true'
waitCondition = { node, ant ->

View File

@ -94,16 +94,15 @@ integTestRunner {
integTestCluster {
dependsOn copyMlRestTests
plugin xpackProject('plugin').path
extraConfigFile 'x-pack/roles.yml', 'roles.yml'
extraConfigFile 'roles.yml', 'roles.yml'
setupCommand 'setupTestAdminUser',
'bin/x-pack/users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
setupCommand 'setupMlAdminUser',
'bin/x-pack/users', 'useradd', 'ml_admin', '-p', 'x-pack-test-password', '-r', 'minimal,machine_learning_admin'
'bin/users', 'useradd', 'ml_admin', '-p', 'x-pack-test-password', '-r', 'minimal,machine_learning_admin'
setupCommand 'setupMlUserUser',
'bin/x-pack/users', 'useradd', 'ml_user', '-p', 'x-pack-test-password', '-r', 'minimal,machine_learning_user'
'bin/users', 'useradd', 'ml_user', '-p', 'x-pack-test-password', '-r', 'minimal,machine_learning_user'
setupCommand 'setupPowerlessUser',
'bin/x-pack/users', 'useradd', 'no_ml', '-p', 'x-pack-test-password', '-r', 'minimal'
'bin/users', 'useradd', 'no_ml', '-p', 'x-pack-test-password', '-r', 'minimal'
setting 'xpack.license.self_generated.type', 'trial'
setting 'xpack.security.enabled', 'true'
waitCondition = { node, ant ->

View File

@ -8,7 +8,6 @@ dependencies {
}
integTestCluster {
plugin xpackProject('plugin').path
setting 'xpack.monitoring.enabled', 'true'
setting 'xpack.watcher.enabled', 'true'
setting 'xpack.security.enabled', 'false'

View File

@ -160,12 +160,6 @@ integTestCluster.dependsOn(importClientCertificateInNodeKeyStore, importNodeCert
ext.pluginsCount = 0
project(xpackProject('plugin').path).subprojects { Project p ->
// the meta plugin contains the individual xpack plugins
if (p.extensions.findByName('esplugin') != null) {
pluginsCount += 1
}
}
project.rootProject.subprojects.findAll { it.path.startsWith(':plugins:') }.each { subproj ->
// need to get a non-decorated project object, so must re-lookup the project by path
integTestCluster.plugin(subproj.path)
@ -189,16 +183,14 @@ integTestCluster {
setting 'xpack.ml.enabled', 'false'
plugin xpackProject('plugin').path
// copy keystores into config/
extraConfigFile nodeKeystore.name, nodeKeystore
extraConfigFile clientKeyStore.name, clientKeyStore
setupCommand 'setupTestUser',
'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
setupCommand 'setupMonitoringUser',
'bin/x-pack/users', 'useradd', 'monitoring_agent', '-p', 'x-pack-test-password', '-r', 'remote_monitoring_agent'
'bin/users', 'useradd', 'monitoring_agent', '-p', 'x-pack-test-password', '-r', 'remote_monitoring_agent'
waitCondition = { NodeInfo node, AntBuilder ant ->
File tmpFile = new File(node.cwd, 'wait.success')

View File

@ -8,12 +8,6 @@ dependencies {
}
ext.pluginsCount = 0
project(xpackProject('plugin').path).subprojects { Project p ->
// the meta plugin contains the individual xpack plugins
if (p.extensions.findByName('esplugin') != null) {
pluginsCount += 1
}
}
project.rootProject.subprojects.findAll { it.path.startsWith(':plugins:') }.each { subproj ->
// need to get a non-decorated project object, so must re-lookup the project by path
integTestCluster.plugin(subproj.path)
@ -21,10 +15,9 @@ project.rootProject.subprojects.findAll { it.path.startsWith(':plugins:') }.each
}
integTestCluster {
plugin xpackProject('plugin').path
setting 'xpack.security.enabled', 'true'
setupCommand 'setupDummyUser',
'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
'bin/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow",

Some files were not shown because too many files have changed in this diff Show More