Test fixtures krb5 (#40297)

Replaces the vagrant based kerberos fixtures with docker based test fixtures plugin.
The configuration is now entirely static on the docker side and no longer driven by Gradle,
also two different services are being configured since there are two different consumers of the fixture that can run in parallel and require different configurations.
This commit is contained in:
Alpar Torok 2019-03-28 17:23:40 +02:00
parent 482d9804b1
commit d791e08932
16 changed files with 190 additions and 335 deletions

View File

@ -31,7 +31,6 @@ import org.gradle.api.Project;
import org.gradle.api.Task; import org.gradle.api.Task;
import org.gradle.api.plugins.BasePlugin; import org.gradle.api.plugins.BasePlugin;
import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.plugins.ExtraPropertiesExtension;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.TaskContainer; import org.gradle.api.tasks.TaskContainer;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
@ -104,6 +103,7 @@ public class TestFixturesPlugin implements Plugin<Project> {
"but none could be found so these will be skipped", project.getPath() "but none could be found so these will be skipped", project.getPath()
); );
disableTaskByType(tasks, getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask")); disableTaskByType(tasks, getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask"));
disableTaskByType(tasks, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask"));
// conventions are not honored when the tasks are disabled // conventions are not honored when the tasks are disabled
disableTaskByType(tasks, TestingConventionsTasks.class); disableTaskByType(tasks, TestingConventionsTasks.class);
disableTaskByType(tasks, ComposeUp.class); disableTaskByType(tasks, ComposeUp.class);
@ -122,6 +122,7 @@ public class TestFixturesPlugin implements Plugin<Project> {
fixtureProject, fixtureProject,
(name, port) -> setSystemProperty(task, name, port) (name, port) -> setSystemProperty(task, name, port)
); );
task.dependsOn(fixtureProject.getTasks().getByName("postProcessFixture"));
}) })
); );
@ -155,7 +156,6 @@ public class TestFixturesPlugin implements Plugin<Project> {
); );
} }
@Input
public boolean dockerComposeSupported(Project project) { public boolean dockerComposeSupported(Project project) {
if (OS.current().equals(OS.WINDOWS)) { if (OS.current().equals(OS.WINDOWS)) {
return false; return false;

View File

@ -24,18 +24,19 @@ import org.elasticsearch.gradle.test.RestIntegTestTask
import java.nio.file.Files import java.nio.file.Files
import java.nio.file.Path import java.nio.file.Path
import java.nio.file.Paths import java.nio.file.Paths
apply plugin: 'elasticsearch.test.fixtures'
esplugin { esplugin {
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin' classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
} }
apply plugin: 'elasticsearch.vagrantsupport'
versions << [ versions << [
'hadoop2': '2.8.1' 'hadoop2': '2.8.1'
] ]
testFixtures.useFixture ":test:fixtures:krb5kdc-fixture"
configurations { configurations {
hdfsFixture hdfsFixture
} }
@ -68,52 +69,14 @@ dependencyLicenses {
mapping from: /hadoop-.*/, to: 'hadoop' mapping from: /hadoop-.*/, to: 'hadoop'
} }
// MIT Kerberos Vagrant Testing Fixture
String box = "krb5kdc"
Map<String,String> vagrantEnvVars = [
'VAGRANT_CWD' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}",
'VAGRANT_VAGRANTFILE' : 'Vagrantfile',
'VAGRANT_PROJECT_DIR' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}"
]
task krb5kdcUpdate(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'box'
subcommand 'update'
boxName box
environmentVars vagrantEnvVars
dependsOn "vagrantCheckVersion", "virtualboxCheckVersion"
}
task krb5kdcFixture(type: org.elasticsearch.gradle.test.VagrantFixture) {
command 'up'
args '--provision', '--provider', 'virtualbox'
boxName box
environmentVars vagrantEnvVars
dependsOn krb5kdcUpdate
}
task krb5AddPrincipals {
dependsOn krb5kdcFixture
}
List<String> principals = [ "elasticsearch", "hdfs/hdfs.build.elastic.co" ]
String realm = "BUILD.ELASTIC.CO" String realm = "BUILD.ELASTIC.CO"
for (String principal : principals) {
Task create = project.tasks.create("addPrincipal#${principal}".replace('/', '_'), org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'ssh'
args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh $principal"
boxName box
environmentVars vagrantEnvVars
dependsOn krb5kdcFixture
}
krb5AddPrincipals.dependsOn(create)
}
// Create HDFS File System Testing Fixtures for HA/Secure combinations // Create HDFS File System Testing Fixtures for HA/Secure combinations
for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) { for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) {
project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) { project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) {
dependsOn project.configurations.hdfsFixture dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture
executable = new File(project.runtimeJavaHome, 'bin/java') executable = new File(project.runtimeJavaHome, 'bin/java')
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }" env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }"
waitCondition = { fixture, ant -> waitCondition = { fixture, ant ->
@ -126,9 +89,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
// If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) { if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
dependsOn krb5kdcFixture, krb5AddPrincipals miniHDFSArgs.add("-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")}");
Path krb5Config = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf")
miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5Config}");
if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) {
miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED') miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED')
} }
@ -145,9 +106,11 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
// If it's a secure fixture, then set the principal name and keytab locations to use for auth. // If it's a secure fixture, then set the principal name and keytab locations to use for auth.
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) { if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
Path keytabPath = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("hdfs_hdfs.build.elastic.co.keytab")
miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
miniHDFSArgs.add("${keytabPath}") miniHDFSArgs.add(
project(':test:fixtures:krb5kdc-fixture')
.ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
)
} }
args miniHDFSArgs.toArray() args miniHDFSArgs.toArray()
@ -170,10 +133,11 @@ project.afterEvaluate {
// If it's a secure cluster, add the keytab as an extra config, and set the krb5 conf in the JVM options. // If it's a secure cluster, add the keytab as an extra config, and set the krb5 conf in the JVM options.
if (integTestTaskName.equals('integTestSecure') || integTestTaskName.equals('integTestSecureHa')) { if (integTestTaskName.equals('integTestSecure') || integTestTaskName.equals('integTestSecureHa')) {
Path elasticsearchKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("elasticsearch.keytab").toAbsolutePath() String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")
Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath() restIntegTestTask.clusterConfig.extraConfigFile(
"repository-hdfs/krb5.keytab",
restIntegTestTask.clusterConfig.extraConfigFile("repository-hdfs/krb5.keytab", "${elasticsearchKT}") "${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"
)
jvmArgs = jvmArgs + " " + "-Djava.security.krb5.conf=${krb5conf}" jvmArgs = jvmArgs + " " + "-Djava.security.krb5.conf=${krb5conf}"
if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) {
jvmArgs = jvmArgs + " " + '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED' jvmArgs = jvmArgs + " " + '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED'
@ -189,9 +153,10 @@ project.afterEvaluate {
if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) {
restIntegTestTaskRunner.jvmArg '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED' restIntegTestTaskRunner.jvmArg '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED'
} }
restIntegTestTaskRunner.systemProperty (
Path hdfsKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("hdfs_hdfs.build.elastic.co.keytab").toAbsolutePath() "test.krb5.keytab.hdfs",
restIntegTestTaskRunner.systemProperty "test.krb5.keytab.hdfs", "${hdfsKT}" project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab")
)
} }
} }
@ -269,40 +234,24 @@ if (fixtureSupported) {
integTestHa.setEnabled(false) integTestHa.setEnabled(false)
} }
// Secure HDFS testing relies on the Vagrant based Kerberos fixture. check.dependsOn(integTestSecure, integTestSecureHa)
boolean secureFixtureSupported = false
if (fixtureSupported) { // Fixture dependencies
secureFixtureSupported = project.rootProject.vagrantSupported integTestSecureCluster.dependsOn secureHdfsFixture
integTestSecureHaCluster.dependsOn secureHaHdfsFixture
// Set the keytab files in the classpath so that we can access them from test code without the security manager
// freaking out.
project.dependencies {
testRuntime fileTree(dir: project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab").parent, include: ['*.keytab'])
} }
if (secureFixtureSupported) { // Run just the secure hdfs rest test suite.
project.check.dependsOn(integTestSecure) integTestSecureRunner.systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
project.check.dependsOn(integTestSecureHa) // Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
integTestSecureRunner.exclude('**/Ha*TestSuiteIT.class')
// Fixture dependencies // Only include the HA integration tests for the HA test task
integTestSecureCluster.dependsOn secureHdfsFixture, krb5kdcFixture integTestSecureHaRunner.patternSet.setIncludes(['**/Ha*TestSuiteIT.class'])
integTestSecureHaCluster.dependsOn secureHaHdfsFixture, krb5kdcFixture
// Set the keytab files in the classpath so that we can access them from test code without the security manager
// freaking out.
Path hdfsKeytabPath = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs")
project.dependencies {
testRuntime fileTree(dir: hdfsKeytabPath.toString(), include: ['*.keytab'])
}
// Run just the secure hdfs rest test suite.
integTestSecureRunner.systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
// Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
integTestSecureRunner.exclude('**/Ha*TestSuiteIT.class')
// Only include the HA integration tests for the HA test task
integTestSecureHaRunner.patternSet.setIncludes(['**/Ha*TestSuiteIT.class'])
} else {
// Security tests unsupported. Don't run these tests.
integTestSecure.enabled = false
integTestSecureHa.enabled = false
testingConventions.enabled = false
}
thirdPartyAudit { thirdPartyAudit {
ignoreMissingClasses() ignoreMissingClasses()

8
test/fixtures/hdfs-fixture/Dockerfile vendored Normal file
View File

@ -0,0 +1,8 @@
FROM java:8-jre
RUN apt-get update && apt-get install net-tools
EXPOSE 9998
EXPOSE 9999
CMD java -cp "/fixture:/fixture/*" hdfs.MiniHDFS /data

View File

@ -18,25 +18,23 @@
*/ */
apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.build'
apply plugin: 'elasticsearch.test.fixtures'
versions << [
'hadoop2': '2.8.1'
]
// we create MiniHdfsCluster with the hadoop artifact
dependencies { dependencies {
compile "org.apache.hadoop:hadoop-minicluster:${versions.hadoop2}" compile "org.apache.hadoop:hadoop-minicluster:2.8.1"
} }
// for testing, until fixtures are actually debuggable. task syncClasses(type: Sync) {
// gradle hides *EVERYTHING* so you have no clue what went wrong. from sourceSets.test.runtimeClasspath
task hdfs(type: JavaExec) { into "${buildDir}/fixture"
classpath = sourceSets.test.compileClasspath + sourceSets.test.output }
main = "hdfs.MiniHDFS"
args = [ 'build/fixtures/hdfsFixture' ] preProcessFixture {
dependsOn syncClasses
doLast {
file("${buildDir}/shared").mkdirs()
}
} }
// just a test fixture: we aren't using jars in releases
thirdPartyAudit.enabled = false
// TODO: add a simple HDFS client test for this fixture
unitTest.enabled = false unitTest.enabled = false

View File

@ -0,0 +1,11 @@
version: '3'
services:
hdfs:
hostname: hdfs.build.elastic.co
build:
context: .
dockerfile: Dockerfile
volumes:
- ./build/fixture:/fixture
ports:
- "9999:9999"

View File

@ -98,7 +98,6 @@ public class MiniHDFS {
UserGroupInformation.setConfiguration(cfg); UserGroupInformation.setConfiguration(cfg);
// TODO: remove hardcoded port!
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg);
if (secure) { if (secure) {
builder.nameNodePort(9998); builder.nameNodePort(9998);

View File

@ -0,0 +1,9 @@
FROM ubuntu:14.04
ADD . /fixture
RUN echo kerberos.build.elastic.co > /etc/hostname && echo "127.0.0.1 kerberos.build.elastic.co" >> /etc/hosts
RUN bash /fixture/src/main/resources/provision/installkdc.sh
EXPOSE 88
EXPOSE 88/udp
CMD sleep infinity

View File

@ -1,53 +0,0 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This Vagrantfile exists to define a virtual machine running MIT's Kerberos 5
# for usage as a testing fixture for the build process.
#
# In order to connect to the KDC process on this virtual machine, find and use
# the rendered krb5.conf file in the build output directory (build/conf).
#
# In order to provision principals in the KDC, use the provided addprinc.sh
# script with vagrant's ssh facility:
#
# vagrant ssh -c /vagrant/src/main/resources/provision/addprinc.sh principal
#
# You will find the newly created principal's keytab file in the build output
# directory (build/keytabs). Principal creation is idempotent, and will recopy
# existing user keytabs from the KDC if they already exist.
Vagrant.configure("2") do |config|
config.vm.define "krb5kdc" do |config|
config.vm.box = "elastic/ubuntu-14.04-x86_64"
end
config.vm.hostname = "kerberos.build.elastic.co"
if Vagrant.has_plugin?("vagrant-cachier")
config.cache.scope = :box
end
config.vm.network "forwarded_port", guest: 88, host: 60088, protocol: "tcp"
config.vm.network "forwarded_port", guest: 88, host: 60088, protocol: "udp"
config.vm.provision "shell", path: "src/main/resources/provision/installkdc.sh"
end

View File

@ -16,68 +16,38 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
apply plugin: 'elasticsearch.test.fixtures'
apply plugin: 'elasticsearch.build'
Map<String, String> vagrantEnvVars = [
'VAGRANT_CWD' : "${project.projectDir.absolutePath}",
'VAGRANT_VAGRANTFILE' : 'Vagrantfile',
'VAGRANT_PROJECT_DIR' : "${project.projectDir.absolutePath}"
]
String box = "krb5kdc"
List<String> defaultPrincipals = [ "elasticsearch" ]
task update(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'box'
subcommand 'update'
boxName box
environmentVars vagrantEnvVars
}
task up(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'up'
args '--provision', '--provider', 'virtualbox'
boxName box
environmentVars vagrantEnvVars
dependsOn update
}
task addDefaultPrincipals {
dependsOn up
}
for (String principal : defaultPrincipals) {
Task addTask = project.tasks.create("addPrincipal#${principal}", org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'ssh'
args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh $principal"
boxName box
environmentVars vagrantEnvVars
dependsOn up
}
addDefaultPrincipals.dependsOn(addTask)
}
task halt(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'halt'
boxName box
environmentVars vagrantEnvVars
}
task destroy(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'destroy'
args '-f'
boxName box
environmentVars vagrantEnvVars
dependsOn halt
}
thirdPartyAudit.enabled = false
unitTest.enabled = false
// installKDC uses tabs in it for the Kerberos ACL file. // installKDC uses tabs in it for the Kerberos ACL file.
// Ignore it for pattern checking. // Ignore it for pattern checking.
forbiddenPatterns { forbiddenPatterns {
exclude "**/installkdc.sh" exclude "**/installkdc.sh"
} }
List<String> services = ["peppa", "hdfs"]
preProcessFixture.doLast {
// We need to create these up-front because if docker creates them they will be owned by root and we won't be
// able to clean them up
services.each { file("${buildDir}/shared/${it}").mkdirs() }
}
postProcessFixture {
inputs.dir("${buildDir}/shared")
services.each { service ->
File confTemplate = file("${buildDir}/shared/${service}/krb5.conf.template")
File confFile = file("${buildDir}/shared/${service}/krb5.conf")
outputs.file(confFile)
doLast {
assert confTemplate.exists()
String confContents = confTemplate.text
.replace("\${MAPPED_PORT}", "${ext."test.fixtures.${service}.udp.88"}")
confFile.text = confContents
}
}
}
project.ext.krb5Conf = { service -> file("$buildDir/shared/${service}/krb5.conf") }
project.ext.krb5Keytabs = { service, fileName -> file("$buildDir/shared/${service}/keytabs/${fileName}") }
unitTest.enabled = false

View File

@ -0,0 +1,24 @@
version: '3'
services:
peppa:
hostname: kerberos.build.elastic.co
build:
context: .
dockerfile: Dockerfile
command: "bash /fixture/src/main/resources/provision/peppa.sh"
volumes:
- ./build/shared/peppa:/fixture/build
ports:
- "4444"
- "88/udp"
hdfs:
hostname: kerberos.build.elastic.co
build:
context: .
dockerfile: Dockerfile
command: "bash /fixture/src/main/resources/provision/hdfs.sh"
volumes:
- ./build/shared/hdfs:/fixture/build
ports:
- "4444"
- "88/udp"

View File

@ -19,6 +19,9 @@
set -e set -e
krb5kdc
kadmind
if [[ $# -lt 1 ]]; then if [[ $# -lt 1 ]]; then
echo 'Usage: addprinc.sh principalName [password]' echo 'Usage: addprinc.sh principalName [password]'
echo ' principalName user principal name without realm' echo ' principalName user principal name without realm'
@ -30,7 +33,7 @@ PRINC="$1"
PASSWD="$2" PASSWD="$2"
USER=$(echo $PRINC | tr "/" "_") USER=$(echo $PRINC | tr "/" "_")
VDIR=/vagrant VDIR=/fixture
RESOURCES=$VDIR/src/main/resources RESOURCES=$VDIR/src/main/resources
PROV_DIR=$RESOURCES/provision PROV_DIR=$RESOURCES/provision
ENVPROP_FILE=$RESOURCES/env.properties ENVPROP_FILE=$RESOURCES/env.properties
@ -64,3 +67,9 @@ else
sudo kadmin -p $ADMIN_PRIN -kt $ADMIN_KTAB -q "addprinc -pw $PASSWD $PRINC" sudo kadmin -p $ADMIN_PRIN -kt $ADMIN_KTAB -q "addprinc -pw $PASSWD $PRINC"
fi fi
fi fi
echo "Copying conf to local"
# make the configuration available externally
cp -v $LOCALSTATEDIR/krb5.conf $BUILD_DIR/krb5.conf.template
# We are running as root in the container, allow non root users running the container to be able to clean these up
chmod -R 777 $BUILD_DIR

View File

@ -0,0 +1,11 @@
#!/bin/bash
set -e
addprinc.sh "elasticsearch"
addprinc.sh "hdfs/hdfs.build.elastic.co"
# Use this as a signal that setup is complete
python3 -m http.server 4444 &
sleep infinity

View File

@ -22,32 +22,15 @@ set -e
# KDC installation steps and considerations based on https://web.mit.edu/kerberos/krb5-latest/doc/admin/install_kdc.html # KDC installation steps and considerations based on https://web.mit.edu/kerberos/krb5-latest/doc/admin/install_kdc.html
# and helpful input from https://help.ubuntu.com/community/Kerberos # and helpful input from https://help.ubuntu.com/community/Kerberos
VDIR=/vagrant VDIR=/fixture
RESOURCES=$VDIR/src/main/resources RESOURCES=$VDIR/src/main/resources
PROV_DIR=$RESOURCES/provision PROV_DIR=$RESOURCES/provision
ENVPROP_FILE=$RESOURCES/env.properties ENVPROP_FILE=$RESOURCES/env.properties
BUILD_DIR=$VDIR/build
CONF_DIR=$BUILD_DIR/conf
KEYTAB_DIR=$BUILD_DIR/keytabs
LOCALSTATEDIR=/etc LOCALSTATEDIR=/etc
LOGDIR=/var/log/krb5 LOGDIR=/var/log/krb5
MARKER_FILE=/etc/marker MARKER_FILE=/etc/marker
# Output location for our rendered configuration files and keytabs
mkdir -p $BUILD_DIR
rm -rf $BUILD_DIR/*
mkdir -p $CONF_DIR
mkdir -p $KEYTAB_DIR
if [ -f $MARKER_FILE ]; then
echo "Already provisioned..."
echo "Recopying configuration files..."
cp $LOCALSTATEDIR/krb5.conf $CONF_DIR/krb5.conf
cp $LOCALSTATEDIR/krb5kdc/kdc.conf $CONF_DIR/kdc.conf
exit 0;
fi
# Pull environment information # Pull environment information
REALM_NAME=$(cat $ENVPROP_FILE | grep realm= | cut -d '=' -f 2) REALM_NAME=$(cat $ENVPROP_FILE | grep realm= | cut -d '=' -f 2)
KDC_NAME=$(cat $ENVPROP_FILE | grep kdc= | cut -d '=' -f 2) KDC_NAME=$(cat $ENVPROP_FILE | grep kdc= | cut -d '=' -f 2)
@ -60,7 +43,7 @@ sed -i 's/${REALM_NAME}/'$REALM_NAME'/g' $LOCALSTATEDIR/krb5.conf
sed -i 's/${KDC_NAME}/'$KDC_NAME'/g' $LOCALSTATEDIR/krb5.conf sed -i 's/${KDC_NAME}/'$KDC_NAME'/g' $LOCALSTATEDIR/krb5.conf
sed -i 's/${BUILD_ZONE}/'$BUILD_ZONE'/g' $LOCALSTATEDIR/krb5.conf sed -i 's/${BUILD_ZONE}/'$BUILD_ZONE'/g' $LOCALSTATEDIR/krb5.conf
sed -i 's/${ELASTIC_ZONE}/'$ELASTIC_ZONE'/g' $LOCALSTATEDIR/krb5.conf sed -i 's/${ELASTIC_ZONE}/'$ELASTIC_ZONE'/g' $LOCALSTATEDIR/krb5.conf
cp $LOCALSTATEDIR/krb5.conf $CONF_DIR/krb5.conf
# Transfer and interpolate the kdc.conf # Transfer and interpolate the kdc.conf
mkdir -p $LOCALSTATEDIR/krb5kdc mkdir -p $LOCALSTATEDIR/krb5kdc
@ -69,7 +52,6 @@ sed -i 's/${REALM_NAME}/'$REALM_NAME'/g' $LOCALSTATEDIR/krb5kdc/kdc.conf
sed -i 's/${KDC_NAME}/'$KDC_NAME'/g' $LOCALSTATEDIR/krb5kdc/kdc.conf sed -i 's/${KDC_NAME}/'$KDC_NAME'/g' $LOCALSTATEDIR/krb5kdc/kdc.conf
sed -i 's/${BUILD_ZONE}/'$BUILD_ZONE'/g' $LOCALSTATEDIR/krb5kdc/kdc.conf sed -i 's/${BUILD_ZONE}/'$BUILD_ZONE'/g' $LOCALSTATEDIR/krb5kdc/kdc.conf
sed -i 's/${ELASTIC_ZONE}/'$ELASTIC_ZONE'/g' $LOCALSTATEDIR/krb5.conf sed -i 's/${ELASTIC_ZONE}/'$ELASTIC_ZONE'/g' $LOCALSTATEDIR/krb5.conf
cp $LOCALSTATEDIR/krb5kdc/kdc.conf $CONF_DIR/kdc.conf
# Touch logging locations # Touch logging locations
mkdir -p $LOGDIR mkdir -p $LOGDIR
@ -112,9 +94,5 @@ EOF
kadmin.local -q "addprinc -pw elastic admin/admin@$REALM_NAME" kadmin.local -q "addprinc -pw elastic admin/admin@$REALM_NAME"
kadmin.local -q "ktadd -k /etc/admin.keytab admin/admin@$REALM_NAME" kadmin.local -q "ktadd -k /etc/admin.keytab admin/admin@$REALM_NAME"
# Start Kerberos Services # Create a link so addprinc.sh is on path
krb5kdc ln -s $PROV_DIR/addprinc.sh /usr/bin/
kadmind
# Mark that the vm is already provisioned
touch $MARKER_FILE

View File

@ -32,12 +32,8 @@
[realms] [realms]
${REALM_NAME} = { ${REALM_NAME} = {
kdc = ${KDC_NAME}:88
kdc = ${KDC_NAME}:60088
kdc = localhost:60088
kdc = localhost:88
kdc = 127.0.0.1:60088
kdc = 127.0.0.1:88 kdc = 127.0.0.1:88
kdc = 127.0.0.1:${MAPPED_PORT}
admin_server = ${KDC_NAME}:749 admin_server = ${KDC_NAME}:749
default_domain = ${BUILD_ZONE} default_domain = ${BUILD_ZONE}
} }

View File

@ -0,0 +1,13 @@
#!/bin/bash
set -e
addprinc.sh elasticsearch
addprinc.sh HTTP/localhost
addprinc.sh peppa
addprinc.sh george dino
# Use this as a signal that setup is complete
python3 -m http.server 4444 &
sleep infinity

View File

@ -2,9 +2,11 @@ import java.nio.file.Path
import java.nio.file.Paths import java.nio.file.Paths
import java.nio.file.Files import java.nio.file.Files
apply plugin: 'elasticsearch.vagrantsupport'
apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.rest-test'
apply plugin: 'elasticsearch.test.fixtures'
testFixtures.useFixture ":test:fixtures:krb5kdc-fixture"
dependencies { dependencies {
testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile "org.elasticsearch.plugin:x-pack-core:${version}"
@ -12,75 +14,6 @@ dependencies {
testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts')
} }
// MIT Kerberos Vagrant Testing Fixture
String box = "krb5kdc"
Map<String,String> vagrantEnvVars = [
'VAGRANT_CWD' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}",
'VAGRANT_VAGRANTFILE' : 'Vagrantfile',
'VAGRANT_PROJECT_DIR' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}"
]
task krb5kdcUpdate(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'box'
subcommand 'update'
boxName box
environmentVars vagrantEnvVars
dependsOn "vagrantCheckVersion", "virtualboxCheckVersion"
}
task krb5kdcFixture(type: org.elasticsearch.gradle.test.VagrantFixture) {
command 'up'
args '--provision', '--provider', 'virtualbox'
boxName box
environmentVars vagrantEnvVars
dependsOn krb5kdcUpdate
}
// lazily resolve to avoid any slowdowns from DNS lookups prior to when we need this value
Object httpPrincipal = new Object() {
@Override
String toString() {
InetAddress resolvedAddress = InetAddress.getByName('127.0.0.1')
return "HTTP/" + resolvedAddress.getCanonicalHostName()
}
}
String realm = "BUILD.ELASTIC.CO"
task 'addPrincipal#peppa'(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'ssh'
args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh peppa "
boxName box
environmentVars vagrantEnvVars
dependsOn krb5kdcFixture
}
task 'addPrincipal#george'(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'ssh'
args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh george dino"
boxName box
environmentVars vagrantEnvVars
dependsOn krb5kdcFixture
}
task 'addPrincipal#HTTP'(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
command 'ssh'
args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh $httpPrincipal"
boxName box
environmentVars vagrantEnvVars
dependsOn krb5kdcFixture
}
task krb5AddPrincipals { dependsOn krb5kdcFixture, 'addPrincipal#peppa', 'addPrincipal#george', 'addPrincipal#HTTP' }
def generatedResources = "$buildDir/generated-resources/keytabs"
task copyKeytabToGeneratedResources(type: Copy) {
Path peppaKeytab = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("peppa.keytab").toAbsolutePath()
from peppaKeytab;
into generatedResources
dependsOn krb5AddPrincipals
}
integTestCluster { integTestCluster {
// force localhost IPv4 otherwise it is a chicken and egg problem where we need the keytab for the hostname when starting the cluster // force localhost IPv4 otherwise it is a chicken and egg problem where we need the keytab for the hostname when starting the cluster
// but do not know the exact address that is first in the http ports file // but do not know the exact address that is first in the http ports file
@ -96,12 +29,10 @@ integTestCluster {
setting 'xpack.security.authc.realms.kerberos.kerberos.krb.debug', 'true' setting 'xpack.security.authc.realms.kerberos.kerberos.krb.debug', 'true'
setting 'xpack.security.authc.realms.kerberos.kerberos.remove_realm_name', 'false' setting 'xpack.security.authc.realms.kerberos.kerberos.remove_realm_name', 'false'
Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath() jvmArgs += " -Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("peppa")}"
String jvmArgsStr = " -Djava.security.krb5.conf=${krb5conf}" + " -Dsun.security.krb5.debug=true" jvmArgs += " -Dsun.security.krb5.debug=true"
jvmArgs jvmArgsStr
Path esKeytab = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs") extraConfigFile("es.keytab", project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "HTTP_localhost.keytab"))
.resolve("$httpPrincipal".replace('/', '_') + ".keytab").toAbsolutePath()
extraConfigFile("es.keytab", "${esKeytab}")
setupCommand 'setupTestAdmin', setupCommand 'setupTestAdmin',
'bin/elasticsearch-users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser" 'bin/elasticsearch-users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser"
@ -119,6 +50,7 @@ integTestCluster {
} }
String realm = "BUILD.ELASTIC.CO"
integTestRunner { integTestRunner {
Path peppaKeytab = Paths.get("${project.buildDir}", "generated-resources", "keytabs", "peppa.keytab") Path peppaKeytab = Paths.get("${project.buildDir}", "generated-resources", "keytabs", "peppa.keytab")
systemProperty 'test.userkt', "peppa@${realm}" systemProperty 'test.userkt', "peppa@${realm}"
@ -126,16 +58,17 @@ integTestRunner {
systemProperty 'test.userpwd', "george@${realm}" systemProperty 'test.userpwd', "george@${realm}"
systemProperty 'test.userpwd.password', "dino" systemProperty 'test.userpwd.password', "dino"
systemProperty 'tests.security.manager', 'true' systemProperty 'tests.security.manager', 'true'
Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath() jvmArgs([
List jvmargs = ["-Djava.security.krb5.conf=${krb5conf}","-Dsun.security.krb5.debug=true"] "-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("peppa")}",
jvmArgs jvmargs "-Dsun.security.krb5.debug=true"
])
} }
if (project.rootProject.vagrantSupported == false) { def generatedResources = "$buildDir/generated-resources/keytabs"
integTest.enabled = false task copyKeytabToGeneratedResources(type: Copy) {
testingConventions.enabled = false from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "peppa.keytab")
} else { into generatedResources
project.sourceSets.test.output.dir(generatedResources) dependsOn project(':test:fixtures:krb5kdc-fixture').postProcessFixture
integTestCluster.dependsOn krb5AddPrincipals, krb5kdcFixture, copyKeytabToGeneratedResources
integTest.finalizedBy project(':test:fixtures:krb5kdc-fixture').halt
} }
project.sourceSets.test.output.dir(generatedResources, builtBy:copyKeytabToGeneratedResources)