Merge branch 'master' into ccr
* master: Mute 'Test typed keys parameter for suggesters' as we await a fix. Build test: Thread linger Fix gradle4.8 deprecation warnings (#31654) Mute FileRealmTests#testAuthenticateCaching with an @AwaitsFix. Mute TransportChangePasswordActionTests#testIncorrectPasswordHashingAlgorithm with an @AwaitsFix. Build: Fix naming conventions task (#31681) Introduce a Hashing Processor (#31087)
This commit is contained in:
commit
e46d23dfb2
18
build.gradle
18
build.gradle
|
@ -486,25 +486,17 @@ task run(type: Run) {
|
|||
impliesSubProjects = true
|
||||
}
|
||||
|
||||
task wrapper(type: Wrapper)
|
||||
|
||||
gradle.projectsEvaluated {
|
||||
|
||||
allprojects {
|
||||
tasks.withType(Wrapper) { Wrapper wrapper ->
|
||||
wrapper.distributionType = DistributionType.ALL
|
||||
|
||||
wrapper.doLast {
|
||||
wrapper {
|
||||
distributionType = DistributionType.ALL
|
||||
doLast {
|
||||
final DistributionLocator locator = new DistributionLocator()
|
||||
final GradleVersion version = GradleVersion.version(wrapper.gradleVersion)
|
||||
final URI distributionUri = locator.getDistributionFor(version, wrapper.distributionType.name().toLowerCase(Locale.ENGLISH))
|
||||
final URI sha256Uri = new URI(distributionUri.toString() + ".sha256")
|
||||
final String sha256Sum = new String(sha256Uri.toURL().bytes)
|
||||
wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n"
|
||||
println "Added checksum to wrapper properties"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static void assertLinesInFile(final Path path, final List<String> expectedLines) {
|
||||
|
@ -591,7 +583,7 @@ if (System.properties.get("build.compare") != null) {
|
|||
}
|
||||
}
|
||||
sourceBuild {
|
||||
gradleVersion = "4.7" // does not default to gradle weapper of project dir, but current version
|
||||
gradleVersion = "4.8.1" // does not default to gradle weapper of project dir, but current version
|
||||
projectDir = referenceProject
|
||||
tasks = ["clean", "assemble"]
|
||||
arguments = ["-Dbuild.compare_friendly=true"]
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import java.nio.file.Files
|
||||
|
||||
plugins {
|
||||
|
@ -41,6 +39,12 @@ if (project == rootProject) {
|
|||
buildDir = 'build-bootstrap'
|
||||
}
|
||||
|
||||
// Make sure :buildSrc: doesn't generate classes incompatible with RUNTIME_JAVA_HOME
|
||||
// We can't use BuildPlugin here, so read from file
|
||||
String minimumRuntimeVersion = file('src/main/resources/minimumRuntimeVersion').text.trim()
|
||||
targetCompatibility = minimumRuntimeVersion
|
||||
sourceCompatibility = minimumRuntimeVersion
|
||||
|
||||
/*****************************************************************************
|
||||
* Propagating version.properties to the rest of the build *
|
||||
*****************************************************************************/
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.gradle
|
||||
|
||||
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
|
||||
import nebula.plugin.extraconfigurations.ProvidedBasePlugin
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.eclipse.jgit.lib.Constants
|
||||
import org.eclipse.jgit.lib.RepositoryBuilder
|
||||
|
@ -58,9 +57,6 @@ import java.time.ZonedDateTime
|
|||
*/
|
||||
class BuildPlugin implements Plugin<Project> {
|
||||
|
||||
static final JavaVersion minimumRuntimeVersion = JavaVersion.VERSION_1_8
|
||||
static final JavaVersion minimumCompilerVersion = JavaVersion.VERSION_1_10
|
||||
|
||||
@Override
|
||||
void apply(Project project) {
|
||||
if (project.pluginManager.hasPlugin('elasticsearch.standalone-rest-test')) {
|
||||
|
@ -95,6 +91,12 @@ class BuildPlugin implements Plugin<Project> {
|
|||
/** Performs checks on the build environment and prints information about the build environment. */
|
||||
static void globalBuildInfo(Project project) {
|
||||
if (project.rootProject.ext.has('buildChecksDone') == false) {
|
||||
JavaVersion minimumRuntimeVersion = JavaVersion.toVersion(
|
||||
BuildPlugin.class.getClassLoader().getResourceAsStream("minimumRuntimeVersion").text.trim()
|
||||
)
|
||||
JavaVersion minimumCompilerVersion = JavaVersion.toVersion(
|
||||
BuildPlugin.class.getClassLoader().getResourceAsStream("minimumCompilerVersion").text.trim()
|
||||
)
|
||||
String compilerJavaHome = findCompilerJavaHome()
|
||||
String runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome)
|
||||
File gradleJavaHome = Jvm.current().javaHome
|
||||
|
@ -192,10 +194,12 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.rootProject.ext.runtimeJavaVersion = runtimeJavaVersionEnum
|
||||
project.rootProject.ext.javaVersions = javaVersions
|
||||
project.rootProject.ext.buildChecksDone = true
|
||||
project.rootProject.ext.minimumCompilerVersion = minimumCompilerVersion
|
||||
project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion
|
||||
}
|
||||
|
||||
project.targetCompatibility = minimumRuntimeVersion
|
||||
project.sourceCompatibility = minimumRuntimeVersion
|
||||
project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion
|
||||
project.sourceCompatibility = project.rootProject.ext.minimumRuntimeVersion
|
||||
|
||||
// set java home for each project, so they dont have to find it in the root project
|
||||
project.ext.compilerJavaHome = project.rootProject.ext.compilerJavaHome
|
||||
|
@ -467,6 +471,24 @@ class BuildPlugin implements Plugin<Project> {
|
|||
|
||||
/**Configuration generation of maven poms. */
|
||||
public static void configurePomGeneration(Project project) {
|
||||
// Only works with `enableFeaturePreview('STABLE_PUBLISHING')`
|
||||
// https://github.com/gradle/gradle/issues/5696#issuecomment-396965185
|
||||
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask ->
|
||||
// The GenerateMavenPom task is aggressive about setting the destination, instead of fighting it,
|
||||
// just make a copy.
|
||||
doLast {
|
||||
project.copy {
|
||||
from generatePOMTask.destination
|
||||
into "${project.buildDir}/distributions"
|
||||
rename { "${project.archivesBaseName}-${project.version}.pom" }
|
||||
}
|
||||
}
|
||||
// build poms with assemble (if the assemble task exists)
|
||||
Task assemble = project.tasks.findByName('assemble')
|
||||
if (assemble) {
|
||||
assemble.dependsOn(generatePOMTask)
|
||||
}
|
||||
}
|
||||
project.plugins.withType(MavenPublishPlugin.class).whenPluginAdded {
|
||||
project.publishing {
|
||||
publications {
|
||||
|
@ -476,20 +498,6 @@ class BuildPlugin implements Plugin<Project> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Work around Gradle 4.8 issue until we `enableFeaturePreview('STABLE_PUBLISHING')`
|
||||
// https://github.com/gradle/gradle/issues/5696#issuecomment-396965185
|
||||
project.getGradle().getTaskGraph().whenReady {
|
||||
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom t ->
|
||||
// place the pom next to the jar it is for
|
||||
t.destination = new File(project.buildDir, "distributions/${project.archivesBaseName}-${project.version}.pom")
|
||||
// build poms with assemble (if the assemble task exists)
|
||||
Task assemble = project.tasks.findByName('assemble')
|
||||
if (assemble) {
|
||||
assemble.dependsOn(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -159,16 +159,18 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
/** Adds a task to move jar and associated files to a "-client" name. */
|
||||
protected static void addClientJarTask(Project project) {
|
||||
Task clientJar = project.tasks.create('clientJar')
|
||||
clientJar.dependsOn(project.jar, 'generatePomFileForClientJarPublication', project.javadocJar, project.sourcesJar)
|
||||
clientJar.dependsOn(project.jar, project.tasks.generatePomFileForClientJarPublication, project.javadocJar, project.sourcesJar)
|
||||
clientJar.doFirst {
|
||||
Path jarFile = project.jar.outputs.files.singleFile.toPath()
|
||||
String clientFileName = jarFile.fileName.toString().replace(project.version, "client-${project.version}")
|
||||
Files.copy(jarFile, jarFile.resolveSibling(clientFileName), StandardCopyOption.REPLACE_EXISTING)
|
||||
|
||||
String pomFileName = jarFile.fileName.toString().replace('.jar', '.pom')
|
||||
String clientPomFileName = clientFileName.replace('.jar', '.pom')
|
||||
Files.copy(jarFile.resolveSibling(pomFileName), jarFile.resolveSibling(clientPomFileName),
|
||||
StandardCopyOption.REPLACE_EXISTING)
|
||||
Files.copy(
|
||||
project.tasks.generatePomFileForClientJarPublication.outputs.files.singleFile.toPath(),
|
||||
jarFile.resolveSibling(clientPomFileName),
|
||||
StandardCopyOption.REPLACE_EXISTING
|
||||
)
|
||||
|
||||
String sourcesFileName = jarFile.fileName.toString().replace('.jar', '-sources.jar')
|
||||
String clientSourcesFileName = clientFileName.replace('.jar', '-sources.jar')
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.gradle.api.Plugin
|
|||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.api.tasks.compile.JavaCompile
|
||||
|
||||
/**
|
||||
* Configures the build to compile tests against Elasticsearch's test framework
|
||||
|
@ -61,5 +62,12 @@ public class StandaloneRestTestPlugin implements Plugin<Project> {
|
|||
|
||||
PrecommitTasks.create(project, false)
|
||||
project.check.dependsOn(project.precommit)
|
||||
|
||||
project.tasks.withType(JavaCompile) {
|
||||
// This will be the default in Gradle 5.0
|
||||
if (options.compilerArgs.contains("-processor") == false) {
|
||||
options.compilerArgs << '-proc:none'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,12 +50,5 @@ public class StandaloneTestPlugin implements Plugin<Project> {
|
|||
test.testClassesDirs = project.sourceSets.test.output.classesDirs
|
||||
test.mustRunAfter(project.precommit)
|
||||
project.check.dependsOn(test)
|
||||
|
||||
project.tasks.withType(JavaCompile) {
|
||||
// This will be the default in Gradle 5.0
|
||||
if (options.compilerArgs.contains("-processor") == false) {
|
||||
options.compilerArgs << '-proc:none'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
1.10
|
|
@ -0,0 +1 @@
|
|||
1.8
|
|
@ -4,7 +4,6 @@ import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
|||
import org.gradle.testkit.runner.BuildResult;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
import org.gradle.testkit.runner.TaskOutcome;
|
||||
import org.junit.Ignore;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
|
@ -22,7 +21,6 @@ public class NamingConventionsTaskIT extends GradleIntegrationTestCase {
|
|||
assertTrue(output, output.contains("build plugin can be applied"));
|
||||
}
|
||||
|
||||
@Ignore("AwaitsFix : https://github.com/elastic/elasticsearch/issues/31665")
|
||||
public void testNameCheckFailsAsItShould() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
|
@ -48,7 +46,6 @@ public class NamingConventionsTaskIT extends GradleIntegrationTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Ignore("AwaitsFix : https://github.com/elastic/elasticsearch/issues/31665")
|
||||
public void testNameCheckFailsAsItShouldWithMain() {
|
||||
BuildResult result = GradleRunner.create()
|
||||
.withProjectDir(getProjectDir("namingConventionsSelfTest"))
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.gradle.test;
|
|||
import com.carrotsearch.randomizedtesting.JUnit4MethodProvider;
|
||||
import com.carrotsearch.randomizedtesting.RandomizedRunner;
|
||||
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
|
||||
import org.junit.Assert;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
|
@ -29,5 +30,6 @@ import org.junit.runner.RunWith;
|
|||
JUnit4MethodProvider.class,
|
||||
JUnit3MethodProvider.class
|
||||
})
|
||||
@ThreadLeakLingering(linger = 5000) // wait for "Connection worker" to die
|
||||
public abstract class BaseTestCase extends Assert {
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ buildscript {
|
|||
}
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.2'
|
||||
classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4'
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,8 +34,10 @@ setup:
|
|||
---
|
||||
"Test typed keys parameter for suggesters":
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: queying a context suggester with no context was deprecated in 7.0
|
||||
# version: " - 6.99.99"
|
||||
# reason: queying a context suggester with no context was deprecated in 7.0
|
||||
version: "all"
|
||||
reason: "Awaiting a fix: https://github.com/elastic/elasticsearch/issues/31698"
|
||||
features: "warnings"
|
||||
|
||||
- do:
|
||||
|
|
|
@ -128,3 +128,6 @@ if (extraProjects.exists()) {
|
|||
addSubProjects('', extraProjectDir)
|
||||
}
|
||||
}
|
||||
|
||||
// enable in preparation for Gradle 5.0
|
||||
enableFeaturePreview('STABLE_PUBLISHING')
|
||||
|
|
|
@ -36,7 +36,7 @@ subprojects {
|
|||
// default to main class files if such a source set exists
|
||||
final List files = []
|
||||
if (project.sourceSets.findByName("main")) {
|
||||
files.add(project.sourceSets.main.output.classesDir)
|
||||
files.add(project.sourceSets.main.output.classesDirs)
|
||||
dependsOn project.tasks.classes
|
||||
}
|
||||
// filter out non-existent classes directories from empty source sets
|
||||
|
@ -151,6 +151,7 @@ integTestCluster {
|
|||
setting 'xpack.license.self_generated.type', 'trial'
|
||||
keystoreSetting 'bootstrap.password', 'x-pack-test-password'
|
||||
keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass'
|
||||
keystoreSetting 'xpack.security.ingest.hash.processor.key', 'hmackey'
|
||||
distribution = 'zip' // this is important since we use the reindex module in ML
|
||||
|
||||
setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
|
||||
|
|
|
@ -173,6 +173,7 @@ import org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener;
|
|||
import org.elasticsearch.xpack.security.authz.accesscontrol.OptOutQueryCache;
|
||||
import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore;
|
||||
import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
|
||||
import org.elasticsearch.xpack.security.ingest.HashProcessor;
|
||||
import org.elasticsearch.xpack.security.ingest.SetSecurityUserProcessor;
|
||||
import org.elasticsearch.xpack.security.rest.SecurityRestFilter;
|
||||
import org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction;
|
||||
|
@ -573,6 +574,10 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
|
|||
// hide settings
|
||||
settingsList.add(Setting.listSetting(SecurityField.setting("hide_settings"), Collections.emptyList(), Function.identity(),
|
||||
Property.NodeScope, Property.Filtered));
|
||||
|
||||
// ingest processor settings
|
||||
settingsList.add(HashProcessor.HMAC_KEY_SETTING);
|
||||
|
||||
return settingsList;
|
||||
}
|
||||
|
||||
|
@ -716,7 +721,10 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
|
|||
|
||||
@Override
|
||||
public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) {
|
||||
return Collections.singletonMap(SetSecurityUserProcessor.TYPE, new SetSecurityUserProcessor.Factory(parameters.threadContext));
|
||||
Map<String, Processor.Factory> processors = new HashMap<>();
|
||||
processors.put(SetSecurityUserProcessor.TYPE, new SetSecurityUserProcessor.Factory(parameters.threadContext));
|
||||
processors.put(HashProcessor.TYPE, new HashProcessor.Factory(parameters.env.settings()));
|
||||
return processors;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,200 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.ingest;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.SecureSetting;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.ingest.AbstractProcessor;
|
||||
import org.elasticsearch.ingest.ConfigurationUtils;
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
import org.elasticsearch.xpack.core.security.SecurityField;
|
||||
|
||||
import javax.crypto.Mac;
|
||||
import javax.crypto.SecretKeyFactory;
|
||||
import javax.crypto.spec.PBEKeySpec;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.spec.InvalidKeySpecException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException;
|
||||
|
||||
/**
|
||||
* A processor that hashes the contents of a field (or fields) using various hashing algorithms
|
||||
*/
|
||||
public final class HashProcessor extends AbstractProcessor {
|
||||
public static final String TYPE = "hash";
|
||||
public static final Setting.AffixSetting<SecureString> HMAC_KEY_SETTING = SecureSetting
|
||||
.affixKeySetting(SecurityField.setting("ingest." + TYPE) + ".", "key",
|
||||
(key) -> SecureSetting.secureString(key, null));
|
||||
|
||||
private final List<String> fields;
|
||||
private final String targetField;
|
||||
private final Method method;
|
||||
private final Mac mac;
|
||||
private final byte[] salt;
|
||||
private final boolean ignoreMissing;
|
||||
|
||||
HashProcessor(String tag, List<String> fields, String targetField, byte[] salt, Method method, @Nullable Mac mac,
|
||||
boolean ignoreMissing) {
|
||||
super(tag);
|
||||
this.fields = fields;
|
||||
this.targetField = targetField;
|
||||
this.method = method;
|
||||
this.mac = mac;
|
||||
this.salt = salt;
|
||||
this.ignoreMissing = ignoreMissing;
|
||||
}
|
||||
|
||||
List<String> getFields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
String getTargetField() {
|
||||
return targetField;
|
||||
}
|
||||
|
||||
byte[] getSalt() {
|
||||
return salt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(IngestDocument document) {
|
||||
Map<String, String> hashedFieldValues = fields.stream().map(f -> {
|
||||
String value = document.getFieldValue(f, String.class, ignoreMissing);
|
||||
if (value == null && ignoreMissing) {
|
||||
return new Tuple<String, String>(null, null);
|
||||
}
|
||||
try {
|
||||
return new Tuple<>(f, method.hash(mac, salt, value));
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("field[" + f + "] could not be hashed", e);
|
||||
}
|
||||
}).filter(tuple -> Objects.nonNull(tuple.v1())).collect(Collectors.toMap(Tuple::v1, Tuple::v2));
|
||||
if (fields.size() == 1) {
|
||||
document.setFieldValue(targetField, hashedFieldValues.values().iterator().next());
|
||||
} else {
|
||||
document.setFieldValue(targetField, hashedFieldValues);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
public static final class Factory implements Processor.Factory {
|
||||
|
||||
private final Settings settings;
|
||||
private final Map<String, SecureString> secureKeys;
|
||||
|
||||
public Factory(Settings settings) {
|
||||
this.settings = settings;
|
||||
this.secureKeys = new HashMap<>();
|
||||
HMAC_KEY_SETTING.getAllConcreteSettings(settings).forEach(k -> {
|
||||
secureKeys.put(k.getKey(), k.get(settings));
|
||||
});
|
||||
}
|
||||
|
||||
private static Mac createMac(Method method, SecureString password, byte[] salt, int iterations) {
|
||||
try {
|
||||
SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2With" + method.getAlgorithm());
|
||||
PBEKeySpec keySpec = new PBEKeySpec(password.getChars(), salt, iterations, 128);
|
||||
byte[] pbkdf2 = secretKeyFactory.generateSecret(keySpec).getEncoded();
|
||||
Mac mac = Mac.getInstance(method.getAlgorithm());
|
||||
mac.init(new SecretKeySpec(pbkdf2, method.getAlgorithm()));
|
||||
return mac;
|
||||
} catch (NoSuchAlgorithmException | InvalidKeySpecException | InvalidKeyException e) {
|
||||
throw new IllegalArgumentException("invalid settings", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public HashProcessor create(Map<String, Processor.Factory> registry, String processorTag, Map<String, Object> config) {
|
||||
boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
|
||||
List<String> fields = ConfigurationUtils.readList(TYPE, processorTag, config, "fields");
|
||||
if (fields.isEmpty()) {
|
||||
throw ConfigurationUtils.newConfigurationException(TYPE, processorTag, "fields", "must specify at least one field");
|
||||
} else if (fields.stream().anyMatch(Strings::isNullOrEmpty)) {
|
||||
throw ConfigurationUtils.newConfigurationException(TYPE, processorTag, "fields",
|
||||
"a field-name entry is either empty or null");
|
||||
}
|
||||
String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field");
|
||||
String keySettingName = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "key_setting");
|
||||
SecureString key = secureKeys.get(keySettingName);
|
||||
if (key == null) {
|
||||
throw ConfigurationUtils.newConfigurationException(TYPE, processorTag, "key_setting",
|
||||
"key [" + keySettingName + "] must match [xpack.security.ingest.hash.*.key]. It is not set");
|
||||
}
|
||||
String saltString = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "salt");
|
||||
byte[] salt = saltString.getBytes(StandardCharsets.UTF_8);
|
||||
String methodProperty = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "method", "SHA256");
|
||||
Method method = Method.fromString(processorTag, "method", methodProperty);
|
||||
int iterations = ConfigurationUtils.readIntProperty(TYPE, processorTag, config, "iterations", 5);
|
||||
Mac mac = createMac(method, key, salt, iterations);
|
||||
return new HashProcessor(processorTag, fields, targetField, salt, method, mac, ignoreMissing);
|
||||
}
|
||||
}
|
||||
|
||||
enum Method {
|
||||
SHA1("HmacSHA1"),
|
||||
SHA256("HmacSHA256"),
|
||||
SHA384("HmacSHA384"),
|
||||
SHA512("HmacSHA512");
|
||||
|
||||
private final String algorithm;
|
||||
|
||||
Method(String algorithm) {
|
||||
this.algorithm = algorithm;
|
||||
}
|
||||
|
||||
public String getAlgorithm() {
|
||||
return algorithm;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
|
||||
public String hash(Mac mac, byte[] salt, String input) {
|
||||
try {
|
||||
byte[] encrypted = mac.doFinal(input.getBytes(StandardCharsets.UTF_8));
|
||||
byte[] messageWithSalt = new byte[salt.length + encrypted.length];
|
||||
System.arraycopy(salt, 0, messageWithSalt, 0, salt.length);
|
||||
System.arraycopy(encrypted, 0, messageWithSalt, salt.length, encrypted.length);
|
||||
return Base64.getEncoder().encodeToString(messageWithSalt);
|
||||
} catch (IllegalStateException e) {
|
||||
throw new ElasticsearchException("error hashing data", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static Method fromString(String processorTag, String propertyName, String type) {
|
||||
try {
|
||||
return Method.valueOf(type.toUpperCase(Locale.ROOT));
|
||||
} catch(IllegalArgumentException e) {
|
||||
throw newConfigurationException(TYPE, processorTag, propertyName, "type [" + type +
|
||||
"] not supported, cannot convert field. Valid hash methods: " + Arrays.toString(Method.values()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -153,6 +153,7 @@ public class TransportChangePasswordActionTests extends ESTestCase {
|
|||
verify(usersStore, times(1)).changePassword(eq(request), any(ActionListener.class));
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31696")
|
||||
public void testIncorrectPasswordHashingAlgorithm() {
|
||||
final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe"));
|
||||
final Hasher hasher = Hasher.resolve(randomFrom("pbkdf2", "pbkdf2_1000", "bcrypt9", "bcrypt5"));
|
||||
|
|
|
@ -84,6 +84,7 @@ public class FileRealmTests extends ESTestCase {
|
|||
assertThat(user.roles(), arrayContaining("role1", "role2"));
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31697")
|
||||
public void testAuthenticateCaching() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("cache.hash_algo", Hasher.values()[randomIntBetween(0, Hasher.values().length - 1)].name().toLowerCase(Locale.ROOT)).build();
|
||||
|
|
|
@ -0,0 +1,136 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.ingest;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.settings.MockSecureSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class HashProcessorFactoryTests extends ESTestCase {
|
||||
|
||||
public void testProcessor() {
|
||||
MockSecureSettings mockSecureSettings = new MockSecureSettings();
|
||||
mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key");
|
||||
Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build();
|
||||
HashProcessor.Factory factory = new HashProcessor.Factory(settings);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("fields", Collections.singletonList("_field"));
|
||||
config.put("target_field", "_target");
|
||||
config.put("salt", "_salt");
|
||||
config.put("key_setting", "xpack.security.ingest.hash.processor.key");
|
||||
for (HashProcessor.Method method : HashProcessor.Method.values()) {
|
||||
config.put("method", method.toString());
|
||||
HashProcessor processor = factory.create(null, "_tag", new HashMap<>(config));
|
||||
assertThat(processor.getFields(), equalTo(Collections.singletonList("_field")));
|
||||
assertThat(processor.getTargetField(), equalTo("_target"));
|
||||
assertArrayEquals(processor.getSalt(), "_salt".getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
}
|
||||
|
||||
public void testProcessorNoFields() {
|
||||
MockSecureSettings mockSecureSettings = new MockSecureSettings();
|
||||
mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key");
|
||||
Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build();
|
||||
HashProcessor.Factory factory = new HashProcessor.Factory(settings);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("target_field", "_target");
|
||||
config.put("salt", "_salt");
|
||||
config.put("key_setting", "xpack.security.ingest.hash.processor.key");
|
||||
config.put("method", HashProcessor.Method.SHA1.toString());
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> factory.create(null, "_tag", config));
|
||||
assertThat(e.getMessage(), equalTo("[fields] required property is missing"));
|
||||
}
|
||||
|
||||
public void testProcessorNoTargetField() {
|
||||
MockSecureSettings mockSecureSettings = new MockSecureSettings();
|
||||
mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key");
|
||||
Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build();
|
||||
HashProcessor.Factory factory = new HashProcessor.Factory(settings);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("fields", Collections.singletonList("_field"));
|
||||
config.put("salt", "_salt");
|
||||
config.put("key_setting", "xpack.security.ingest.hash.processor.key");
|
||||
config.put("method", HashProcessor.Method.SHA1.toString());
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> factory.create(null, "_tag", config));
|
||||
assertThat(e.getMessage(), equalTo("[target_field] required property is missing"));
|
||||
}
|
||||
|
||||
public void testProcessorFieldsIsEmpty() {
|
||||
MockSecureSettings mockSecureSettings = new MockSecureSettings();
|
||||
mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key");
|
||||
Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build();
|
||||
HashProcessor.Factory factory = new HashProcessor.Factory(settings);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("fields", Collections.singletonList(randomBoolean() ? "" : null));
|
||||
config.put("salt", "_salt");
|
||||
config.put("target_field", "_target");
|
||||
config.put("key_setting", "xpack.security.ingest.hash.processor.key");
|
||||
config.put("method", HashProcessor.Method.SHA1.toString());
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> factory.create(null, "_tag", config));
|
||||
assertThat(e.getMessage(), equalTo("[fields] a field-name entry is either empty or null"));
|
||||
}
|
||||
|
||||
public void testProcessorMissingSalt() {
|
||||
MockSecureSettings mockSecureSettings = new MockSecureSettings();
|
||||
mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key");
|
||||
Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build();
|
||||
HashProcessor.Factory factory = new HashProcessor.Factory(settings);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("fields", Collections.singletonList("_field"));
|
||||
config.put("target_field", "_target");
|
||||
config.put("key_setting", "xpack.security.ingest.hash.processor.key");
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> factory.create(null, "_tag", config));
|
||||
assertThat(e.getMessage(), equalTo("[salt] required property is missing"));
|
||||
}
|
||||
|
||||
public void testProcessorInvalidMethod() {
|
||||
MockSecureSettings mockSecureSettings = new MockSecureSettings();
|
||||
mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key");
|
||||
Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build();
|
||||
HashProcessor.Factory factory = new HashProcessor.Factory(settings);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("fields", Collections.singletonList("_field"));
|
||||
config.put("salt", "_salt");
|
||||
config.put("target_field", "_target");
|
||||
config.put("key_setting", "xpack.security.ingest.hash.processor.key");
|
||||
config.put("method", "invalid");
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> factory.create(null, "_tag", config));
|
||||
assertThat(e.getMessage(), equalTo("[method] type [invalid] not supported, cannot convert field. " +
|
||||
"Valid hash methods: [sha1, sha256, sha384, sha512]"));
|
||||
}
|
||||
|
||||
public void testProcessorInvalidOrMissingKeySetting() {
|
||||
Settings settings = Settings.builder().setSecureSettings(new MockSecureSettings()).build();
|
||||
HashProcessor.Factory factory = new HashProcessor.Factory(settings);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("fields", Collections.singletonList("_field"));
|
||||
config.put("salt", "_salt");
|
||||
config.put("target_field", "_target");
|
||||
config.put("key_setting", "invalid");
|
||||
config.put("method", HashProcessor.Method.SHA1.toString());
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> factory.create(null, "_tag", new HashMap<>(config)));
|
||||
assertThat(e.getMessage(),
|
||||
equalTo("[key_setting] key [invalid] must match [xpack.security.ingest.hash.*.key]. It is not set"));
|
||||
config.remove("key_setting");
|
||||
ElasticsearchException ex = expectThrows(ElasticsearchException.class,
|
||||
() -> factory.create(null, "_tag", config));
|
||||
assertThat(ex.getMessage(), equalTo("[key_setting] required property is missing"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.ingest;
|
||||
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.security.ingest.HashProcessor.Method;
|
||||
|
||||
import javax.crypto.Mac;
|
||||
import javax.crypto.SecretKeyFactory;
|
||||
import javax.crypto.spec.PBEKeySpec;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class HashProcessorTests extends ESTestCase {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testIgnoreMissing() throws Exception {
|
||||
Method method = randomFrom(Method.values());
|
||||
Mac mac = createMac(method);
|
||||
Map<String, Object> fields = new HashMap<>();
|
||||
fields.put("one", "foo");
|
||||
HashProcessor processor = new HashProcessor("_tag", Arrays.asList("one", "two"),
|
||||
"target", "_salt".getBytes(StandardCharsets.UTF_8), Method.SHA1, mac, true);
|
||||
IngestDocument ingestDocument = new IngestDocument(fields, new HashMap<>());
|
||||
processor.execute(ingestDocument);
|
||||
Map<String, String> target = ingestDocument.getFieldValue("target", Map.class);
|
||||
assertThat(target.size(), equalTo(1));
|
||||
assertNotNull(target.get("one"));
|
||||
|
||||
HashProcessor failProcessor = new HashProcessor("_tag", Arrays.asList("one", "two"),
|
||||
"target", "_salt".getBytes(StandardCharsets.UTF_8), Method.SHA1, mac, false);
|
||||
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> failProcessor.execute(ingestDocument));
|
||||
assertThat(exception.getMessage(), equalTo("field [two] not present as part of path [two]"));
|
||||
}
|
||||
|
||||
public void testStaticKeyAndSalt() throws Exception {
|
||||
byte[] salt = "_salt".getBytes(StandardCharsets.UTF_8);
|
||||
SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
|
||||
PBEKeySpec keySpec = new PBEKeySpec("hmackey".toCharArray(), salt, 5, 128);
|
||||
byte[] pbkdf2 = secretKeyFactory.generateSecret(keySpec).getEncoded();
|
||||
Mac mac = Mac.getInstance(Method.SHA1.getAlgorithm());
|
||||
mac.init(new SecretKeySpec(pbkdf2, Method.SHA1.getAlgorithm()));
|
||||
Map<String, Object> fields = new HashMap<>();
|
||||
fields.put("field", "0123456789");
|
||||
HashProcessor processor = new HashProcessor("_tag", Collections.singletonList("field"),
|
||||
"target", salt, Method.SHA1, mac, false);
|
||||
IngestDocument ingestDocument = new IngestDocument(fields, new HashMap<>());
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target", String.class), equalTo("X3NhbHQMW0oHJGEEE9obGcGv5tGd7HFyDw=="));
|
||||
}
|
||||
|
||||
public void testProcessorSingleField() throws Exception {
|
||||
List<String> fields = Collections.singletonList(randomAlphaOfLength(6));
|
||||
Map<String, Object> docFields = new HashMap<>();
|
||||
for (String field : fields) {
|
||||
docFields.put(field, randomAlphaOfLengthBetween(2, 10));
|
||||
}
|
||||
|
||||
String targetField = randomAlphaOfLength(6);
|
||||
Method method = randomFrom(Method.values());
|
||||
Mac mac = createMac(method);
|
||||
byte[] salt = randomByteArrayOfLength(5);
|
||||
HashProcessor processor = new HashProcessor("_tag", fields, targetField, salt, method, mac, false);
|
||||
IngestDocument ingestDocument = new IngestDocument(docFields, new HashMap<>());
|
||||
processor.execute(ingestDocument);
|
||||
|
||||
String targetFieldValue = ingestDocument.getFieldValue(targetField, String.class);
|
||||
Object expectedTargetFieldValue = method.hash(mac, salt, ingestDocument.getFieldValue(fields.get(0), String.class));
|
||||
assertThat(targetFieldValue, equalTo(expectedTargetFieldValue));
|
||||
byte[] bytes = Base64.getDecoder().decode(targetFieldValue);
|
||||
byte[] actualSaltPrefix = new byte[salt.length];
|
||||
System.arraycopy(bytes, 0, actualSaltPrefix, 0, salt.length);
|
||||
assertArrayEquals(salt, actualSaltPrefix);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testProcessorMultipleFields() throws Exception {
|
||||
List<String> fields = new ArrayList<>();
|
||||
for (int i = 0; i < randomIntBetween(2, 10); i++) {
|
||||
fields.add(randomAlphaOfLength(5 + i));
|
||||
}
|
||||
Map<String, Object> docFields = new HashMap<>();
|
||||
for (String field : fields) {
|
||||
docFields.put(field, randomAlphaOfLengthBetween(2, 10));
|
||||
}
|
||||
|
||||
String targetField = randomAlphaOfLength(6);
|
||||
Method method = randomFrom(Method.values());
|
||||
Mac mac = createMac(method);
|
||||
byte[] salt = randomByteArrayOfLength(5);
|
||||
HashProcessor processor = new HashProcessor("_tag", fields, targetField, salt, method, mac, false);
|
||||
IngestDocument ingestDocument = new IngestDocument(docFields, new HashMap<>());
|
||||
processor.execute(ingestDocument);
|
||||
|
||||
Map<String, String> targetFieldMap = ingestDocument.getFieldValue(targetField, Map.class);
|
||||
for (Map.Entry<String, String> entry : targetFieldMap.entrySet()) {
|
||||
Object expectedTargetFieldValue = method.hash(mac, salt, ingestDocument.getFieldValue(entry.getKey(), String.class));
|
||||
assertThat(entry.getValue(), equalTo(expectedTargetFieldValue));
|
||||
byte[] bytes = Base64.getDecoder().decode(entry.getValue());
|
||||
byte[] actualSaltPrefix = new byte[salt.length];
|
||||
System.arraycopy(bytes, 0, actualSaltPrefix, 0, salt.length);
|
||||
assertArrayEquals(salt, actualSaltPrefix);
|
||||
}
|
||||
}
|
||||
|
||||
private Mac createMac(Method method) throws Exception {
|
||||
char[] password = randomAlphaOfLengthBetween(1, 10).toCharArray();
|
||||
byte[] salt = randomAlphaOfLength(5).getBytes(StandardCharsets.UTF_8);
|
||||
int iterations = randomIntBetween(1, 10);
|
||||
SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2With" + method.getAlgorithm());
|
||||
PBEKeySpec keySpec = new PBEKeySpec(password, salt, iterations, 128);
|
||||
byte[] pbkdf2 = secretKeyFactory.generateSecret(keySpec).getEncoded();
|
||||
Mac mac = Mac.getInstance(method.getAlgorithm());
|
||||
mac.init(new SecretKeySpec(pbkdf2, method.getAlgorithm()));
|
||||
return mac;
|
||||
}
|
||||
}
|
|
@ -6,7 +6,7 @@ buildscript {
|
|||
}
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.2'
|
||||
classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4'
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test Hash Processor":
|
||||
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"processors": [
|
||||
{
|
||||
"hash" : {
|
||||
"fields" : ["user_ssid"],
|
||||
"target_field" : "anonymized",
|
||||
"salt": "_salt",
|
||||
"iterations": 5,
|
||||
"method": "sha1",
|
||||
"key_setting": "xpack.security.ingest.hash.processor.key"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
pipeline: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"user_ssid": "0123456789"
|
||||
}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.anonymized: "X3NhbHQMW0oHJGEEE9obGcGv5tGd7HFyDw==" }
|
||||
|
Loading…
Reference in New Issue