Merge branch 'master' into index-lifecycle

This commit is contained in:
Colin Goodheart-Smithe 2018-07-25 10:39:14 +01:00
commit 81faa2fe6b
No known key found for this signature in database
GPG Key ID: F975E7BDD739B3C7
27 changed files with 736 additions and 304 deletions

View File

@ -7,5 +7,6 @@
ES_RUNTIME_JAVA:
- java8
- java8fips
- java10
- java11

View File

@ -16,21 +16,17 @@
* specific language governing permissions and limitations
* under the License.
*/
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
import org.apache.tools.ant.taskdefs.condition.Os
import org.apache.tools.ant.filters.ReplaceTokens
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionCollection
import org.elasticsearch.gradle.VersionProperties
import org.gradle.plugins.ide.eclipse.model.SourceFolder
import org.gradle.api.tasks.wrapper.Wrapper
import org.gradle.api.tasks.wrapper.Wrapper.DistributionType
import org.gradle.util.GradleVersion
import org.gradle.util.DistributionLocator
import org.apache.tools.ant.taskdefs.condition.Os
import org.apache.tools.ant.filters.ReplaceTokens
import java.nio.file.Files
import java.nio.file.Path
@ -562,7 +558,7 @@ task run(type: Run) {
}
wrapper {
distributionType = DistributionType.ALL
distributionType = 'ALL'
doLast {
final DistributionLocator locator = new DistributionLocator()
final GradleVersion version = GradleVersion.version(wrapper.gradleVersion)
@ -571,6 +567,10 @@ wrapper {
final String sha256Sum = new String(sha256Uri.toURL().bytes)
wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n"
println "Added checksum to wrapper properties"
// Update build-tools to reflect the Gradle upgrade
// TODO: we can remove this once we have tests to make sure older versions work.
project(':build-tools').file('src/main/resources/minimumGradleVersion').text = gradleVersion
println "Updated minimum Gradle Version"
}
}

View File

@ -25,8 +25,9 @@ plugins {
group = 'org.elasticsearch.gradle'
if (GradleVersion.current() < GradleVersion.version('4.9')) {
throw new GradleException('Gradle 4.9+ is required to build elasticsearch')
String minimumGradleVersion = file('src/main/resources/minimumGradleVersion').text.trim()
if (GradleVersion.current() < GradleVersion.version(minimumGradleVersion)) {
throw new GradleException("Gradle ${minimumGradleVersion}+ is required to build elasticsearch")
}
if (JavaVersion.current() < JavaVersion.VERSION_1_8) {

View File

@ -20,6 +20,7 @@ package org.elasticsearch.gradle
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
import org.apache.commons.io.IOUtils
import org.apache.tools.ant.taskdefs.condition.Os
import org.eclipse.jgit.lib.Constants
import org.eclipse.jgit.lib.RepositoryBuilder
@ -53,6 +54,7 @@ import org.gradle.internal.jvm.Jvm
import org.gradle.process.ExecResult
import org.gradle.util.GradleVersion
import java.nio.charset.StandardCharsets
import java.time.ZoneOffset
import java.time.ZonedDateTime
/**
@ -67,8 +69,13 @@ class BuildPlugin implements Plugin<Project> {
+ 'elasticearch.standalone-rest-test, and elasticsearch.build '
+ 'are mutually exclusive')
}
if (GradleVersion.current() < GradleVersion.version('4.9')) {
throw new GradleException('Gradle 4.9+ is required to use elasticsearch.build plugin')
final String minimumGradleVersion
InputStream is = getClass().getResourceAsStream("/minimumGradleVersion")
try { minimumGradleVersion = IOUtils.toString(is, StandardCharsets.UTF_8.toString()) } finally { is.close() }
if (GradleVersion.current() < GradleVersion.version(minimumGradleVersion.trim())) {
throw new GradleException(
"Gradle ${minimumGradleVersion}+ is required to use elasticsearch.build plugin"
)
}
project.pluginManager.apply('java')
project.pluginManager.apply('carrotsearch.randomized-testing')
@ -153,14 +160,6 @@ class BuildPlugin implements Plugin<Project> {
}
println " Random Testing Seed : ${project.testSeed}"
// enforce Gradle version
final GradleVersion currentGradleVersion = GradleVersion.current();
final GradleVersion minGradle = GradleVersion.version('4.3')
if (currentGradleVersion < minGradle) {
throw new GradleException("${minGradle} or above is required to build Elasticsearch")
}
// enforce Java version
if (compilerJavaVersionEnum < minimumCompilerVersion) {
final String message =

View File

@ -0,0 +1 @@
4.9

View File

@ -1,6 +1,11 @@
package org.elasticsearch.gradle.test;
import org.gradle.testkit.runner.GradleRunner;
import java.io.File;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
@ -13,4 +18,47 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
return new File(root, name);
}
protected GradleRunner getGradleRunner(String sampleProject) {
return GradleRunner.create()
.withProjectDir(getProjectDir(sampleProject))
.withPluginClasspath();
}
protected File getBuildDir(String name) {
return new File(getProjectDir(name), "build");
}
protected void assertOutputContains(String output, String... lines) {
for (String line : lines) {
assertOutputContains(output, line);
}
List<Integer> index = Stream.of(lines).map(line -> output.indexOf(line)).collect(Collectors.toList());
if (index.equals(index.stream().sorted().collect(Collectors.toList())) == false) {
fail("Expected the following lines to appear in this order:\n" +
Stream.of(lines).map(line -> " - `" + line + "`").collect(Collectors.joining("\n")) +
"\nBut they did not. Output is:\n\n```" + output + "\n```\n"
);
}
}
protected void assertOutputContains(String output, String line) {
assertTrue(
"Expected the following line in output:\n\n" + line + "\n\nOutput is:\n" + output,
output.contains(line)
);
}
protected void assertOutputDoesNotContain(String output, String line) {
assertFalse(
"Expected the following line not to be in output:\n\n" + line + "\n\nOutput is:\n" + output,
output.contains(line)
);
}
protected void assertOutputDoesNotContain(String output, String... lines) {
for (String line : lines) {
assertOutputDoesNotContain(line);
}
}
}

View File

@ -35,8 +35,8 @@ include-tagged::{doc-tests}/LicensingDocumentationIT.java[put-license-response]
<2> Make sure that the license is valid.
<3> Check the acknowledge flag.
<4> It should be true if license is acknowledge.
<5> Otherwise we can see the acknowledge messages in `acknowledgeHeader()`
<6> and check component-specific messages in `acknowledgeMessages()`.
<5> Otherwise we can see the acknowledge messages in `acknowledgeHeader()` and check
component-specific messages in `acknowledgeMessages()`.
[[java-rest-high-put-license-async]]
==== Asynchronous Execution

Binary file not shown.

View File

@ -42,7 +42,11 @@ public final class ConvertProcessor extends AbstractProcessor {
@Override
public Object convert(Object value) {
try {
return Integer.parseInt(value.toString());
String strValue = value.toString();
if (strValue.startsWith("0x") || strValue.startsWith("-0x")) {
return Integer.decode(strValue);
}
return Integer.parseInt(strValue);
} catch(NumberFormatException e) {
throw new IllegalArgumentException("unable to convert [" + value + "] to integer", e);
}
@ -52,7 +56,11 @@ public final class ConvertProcessor extends AbstractProcessor {
@Override
public Object convert(Object value) {
try {
return Long.parseLong(value.toString());
String strValue = value.toString();
if (strValue.startsWith("0x") || strValue.startsWith("-0x")) {
return Long.decode(strValue);
}
return Long.parseLong(strValue);
} catch(NumberFormatException e) {
throw new IllegalArgumentException("unable to convert [" + value + "] to long", e);
}

View File

@ -49,6 +49,33 @@ public class ConvertProcessorTests extends ESTestCase {
assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(randomInt));
}
public void testConvertIntHex() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
int randomInt = randomInt();
String intString = randomInt < 0 ? "-0x" + Integer.toHexString(-randomInt) : "0x" + Integer.toHexString(randomInt);
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, intString);
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(randomInt));
}
public void testConvertIntLeadingZero() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "010");
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(10));
}
public void testConvertIntHexError() {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String value = "0x" + randomAlphaOfLengthBetween(1, 10);
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, value);
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to integer"));
}
public void testConvertIntList() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
int numItems = randomIntBetween(1, 10);
@ -92,6 +119,33 @@ public class ConvertProcessorTests extends ESTestCase {
assertThat(ingestDocument.getFieldValue(fieldName, Long.class), equalTo(randomLong));
}
public void testConvertLongHex() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
long randomLong = randomLong();
String longString = randomLong < 0 ? "-0x" + Long.toHexString(-randomLong) : "0x" + Long.toHexString(randomLong);
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, longString);
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.LONG, false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue(fieldName, Long.class), equalTo(randomLong));
}
public void testConvertLongLeadingZero() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "010");
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.LONG, false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue(fieldName, Long.class), equalTo(10L));
}
public void testConvertLongHexError() {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String value = "0x" + randomAlphaOfLengthBetween(1, 10);
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, value);
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.LONG, false);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to long"));
}
public void testConvertLongList() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
int numItems = randomIntBetween(1, 10);

View File

@ -30,30 +30,30 @@ import java.util.Map;
public final class PainlessLookup {
public Collection<PainlessClass> getStructs() {
return javaClassesToPainlessStructs.values();
return classesToPainlessClasses.values();
}
private final Map<String, Class<?>> painlessTypesToJavaClasses;
private final Map<Class<?>, PainlessClass> javaClassesToPainlessStructs;
private final Map<String, Class<?>> canonicalClassNamesToClasses;
private final Map<Class<?>, PainlessClass> classesToPainlessClasses;
PainlessLookup(Map<String, Class<?>> painlessTypesToJavaClasses, Map<Class<?>, PainlessClass> javaClassesToPainlessStructs) {
this.painlessTypesToJavaClasses = Collections.unmodifiableMap(painlessTypesToJavaClasses);
this.javaClassesToPainlessStructs = Collections.unmodifiableMap(javaClassesToPainlessStructs);
PainlessLookup(Map<String, Class<?>> canonicalClassNamesToClasses, Map<Class<?>, PainlessClass> classesToPainlessClasses) {
this.canonicalClassNamesToClasses = Collections.unmodifiableMap(canonicalClassNamesToClasses);
this.classesToPainlessClasses = Collections.unmodifiableMap(classesToPainlessClasses);
}
public Class<?> getClassFromBinaryName(String painlessType) {
return painlessTypesToJavaClasses.get(painlessType.replace('$', '.'));
return canonicalClassNamesToClasses.get(painlessType.replace('$', '.'));
}
public boolean isSimplePainlessType(String painlessType) {
return painlessTypesToJavaClasses.containsKey(painlessType);
return canonicalClassNamesToClasses.containsKey(painlessType);
}
public PainlessClass getPainlessStructFromJavaClass(Class<?> clazz) {
return javaClassesToPainlessStructs.get(clazz);
return classesToPainlessClasses.get(clazz);
}
public Class<?> getJavaClassFromPainlessType(String painlessType) {
return PainlessLookupUtility.canonicalTypeNameToType(painlessType, painlessTypesToJavaClasses);
return PainlessLookupUtility.canonicalTypeNameToType(painlessType, canonicalClassNamesToClasses);
}
}

View File

@ -37,7 +37,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Stack;
import java.util.regex.Pattern;
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.CONSTRUCTOR_NAME;
@ -666,179 +665,6 @@ public class PainlessLookupBuilder {
}
}
private void copyStruct(String struct, List<String> children) {
final PainlessClassBuilder owner = classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(struct));
if (owner == null) {
throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy.");
}
for (int count = 0; count < children.size(); ++count) {
final PainlessClassBuilder child =
classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(children.get(count)));
if (child == null) {
throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" +
" not defined for copy to owner struct [" + owner.name + "].");
}
if (!child.clazz.isAssignableFrom(owner.clazz)) {
throw new ClassCastException("Child struct [" + child.name + "]" +
" is not a super type of owner struct [" + owner.name + "] in copy.");
}
for (Map.Entry<String,PainlessMethod> kvPair : child.methods.entrySet()) {
String methodKey = kvPair.getKey();
PainlessMethod method = kvPair.getValue();
if (owner.methods.get(methodKey) == null) {
// TODO: some of these are no longer valid or outright don't work
// TODO: since classes may not come from the Painless classloader
// TODO: and it was dependent on the order of the extends which
// TODO: which no longer exists since this is generated automatically
// sanity check, look for missing covariant/generic override
/*if (owner.clazz.isInterface() && child.clazz == Object.class) {
// ok
} else if (child.clazz == Spliterator.OfPrimitive.class || child.clazz == PrimitiveIterator.class) {
// ok, we rely on generics erasure for these (its guaranteed in the javadocs though!!!!)
} else if (Constants.JRE_IS_MINIMUM_JAVA9 && owner.clazz == LocalDate.class) {
// ok, java 9 added covariant override for LocalDate.getEra() to return IsoEra:
// https://bugs.openjdk.java.net/browse/JDK-8072746
} else {
try {
// TODO: we *have* to remove all these public members and use getter methods to encapsulate!
final Class<?> impl;
final Class<?> arguments[];
if (method.augmentation != null) {
impl = method.augmentation;
arguments = new Class<?>[method.arguments.size() + 1];
arguments[0] = method.owner.clazz;
for (int i = 0; i < method.arguments.size(); i++) {
arguments[i + 1] = method.arguments.get(i).clazz;
}
} else {
impl = owner.clazz;
arguments = new Class<?>[method.arguments.size()];
for (int i = 0; i < method.arguments.size(); i++) {
arguments[i] = method.arguments.get(i).clazz;
}
}
java.lang.reflect.Method m = impl.getMethod(method.method.getName(), arguments);
if (m.getReturnType() != method.rtn.clazz) {
throw new IllegalStateException("missing covariant override for: " + m + " in " + owner.name);
}
if (m.isBridge() && !Modifier.isVolatile(method.modifiers)) {
// its a bridge in the destination, but not in the source, but it might still be ok, check generics:
java.lang.reflect.Method source = child.clazz.getMethod(method.method.getName(), arguments);
if (!Arrays.equals(source.getGenericParameterTypes(), source.getParameterTypes())) {
throw new IllegalStateException("missing generic override for: " + m + " in " + owner.name);
}
}
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}*/
owner.methods.put(methodKey, method);
}
}
for (PainlessField field : child.members.values()) {
if (owner.members.get(field.name) == null) {
owner.members.put(field.name, new PainlessField(
field.name, field.javaName, owner.clazz, field.clazz, field.modifiers, field.getter, field.setter));
}
}
}
}
/**
* Precomputes a more efficient structure for dynamic method/field access.
*/
private void addRuntimeClass(final PainlessClassBuilder struct) {
// add all getters/setters
for (Map.Entry<String, PainlessMethod> method : struct.methods.entrySet()) {
String name = method.getValue().name;
PainlessMethod m = method.getValue();
if (m.arguments.size() == 0 &&
name.startsWith("get") &&
name.length() > 3 &&
Character.isUpperCase(name.charAt(3))) {
StringBuilder newName = new StringBuilder();
newName.append(Character.toLowerCase(name.charAt(3)));
newName.append(name.substring(4));
struct.getters.putIfAbsent(newName.toString(), m.handle);
} else if (m.arguments.size() == 0 &&
name.startsWith("is") &&
name.length() > 2 &&
Character.isUpperCase(name.charAt(2))) {
StringBuilder newName = new StringBuilder();
newName.append(Character.toLowerCase(name.charAt(2)));
newName.append(name.substring(3));
struct.getters.putIfAbsent(newName.toString(), m.handle);
}
if (m.arguments.size() == 1 &&
name.startsWith("set") &&
name.length() > 3 &&
Character.isUpperCase(name.charAt(3))) {
StringBuilder newName = new StringBuilder();
newName.append(Character.toLowerCase(name.charAt(3)));
newName.append(name.substring(4));
struct.setters.putIfAbsent(newName.toString(), m.handle);
}
}
// add all members
for (Map.Entry<String, PainlessField> member : struct.members.entrySet()) {
struct.getters.put(member.getKey(), member.getValue().getter);
struct.setters.put(member.getKey(), member.getValue().setter);
}
}
/** computes the functional interface method for a class, or returns null */
private PainlessMethod computeFunctionalInterfaceMethod(PainlessClassBuilder clazz) {
if (!clazz.clazz.isInterface()) {
return null;
}
// if its marked with this annotation, we fail if the conditions don't hold (means whitelist bug)
// otherwise, this annotation is pretty useless.
boolean hasAnnotation = clazz.clazz.isAnnotationPresent(FunctionalInterface.class);
List<java.lang.reflect.Method> methods = new ArrayList<>();
for (java.lang.reflect.Method m : clazz.clazz.getMethods()) {
// default interface methods don't count
if (m.isDefault()) {
continue;
}
// static methods don't count
if (Modifier.isStatic(m.getModifiers())) {
continue;
}
// if its from Object, it doesn't count
try {
Object.class.getMethod(m.getName(), m.getParameterTypes());
continue;
} catch (ReflectiveOperationException e) {
// it counts
}
methods.add(m);
}
if (methods.size() != 1) {
if (hasAnnotation) {
throw new IllegalArgumentException("Class: " + clazz.name +
" is marked with FunctionalInterface but doesn't fit the bill: " + methods);
}
return null;
}
// inspect the one method found from the reflection API, it should match the whitelist!
java.lang.reflect.Method oneMethod = methods.get(0);
PainlessMethod painless = clazz.methods.get(buildPainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount()));
if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) {
throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " +
"method is not whitelisted!");
}
return painless;
}
public PainlessLookup build() {
String origin = "internal error";
@ -849,11 +675,11 @@ public class PainlessLookupBuilder {
for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
PainlessClassBuilder painlessStruct =
classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(painlessTypeName));
classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(painlessTypeName));
if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) {
throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " +
"[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]");
"[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]");
}
origin = whitelistStruct.origin;
@ -894,78 +720,144 @@ public class PainlessLookupBuilder {
throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception);
}
// goes through each Painless struct and determines the inheritance list,
// and then adds all inherited types to the Painless struct's whitelist
copyPainlessClassMembers();
cacheRuntimeHandles();
setFunctionalInterfaceMethods();
Map<Class<?>, PainlessClass> classesToPainlessClasses = new HashMap<>(classesToPainlessClassBuilders.size());
for (Map.Entry<Class<?>, PainlessClassBuilder> painlessClassBuilderEntry : classesToPainlessClassBuilders.entrySet()) {
classesToPainlessClasses.put(painlessClassBuilderEntry.getKey(), painlessClassBuilderEntry.getValue().build());
}
return new PainlessLookup(canonicalClassNamesToClasses, classesToPainlessClasses);
}
private void copyPainlessClassMembers() {
for (Class<?> parentClass : classesToPainlessClassBuilders.keySet()) {
copyPainlessInterfaceMembers(parentClass, parentClass);
Class<?> childClass = parentClass.getSuperclass();
while (childClass != null) {
if (classesToPainlessClassBuilders.containsKey(childClass)) {
copyPainlessClassMembers(childClass, parentClass);
}
copyPainlessInterfaceMembers(childClass, parentClass);
childClass = childClass.getSuperclass();
}
}
for (Class<?> javaClass : classesToPainlessClassBuilders.keySet()) {
PainlessClassBuilder painlessStruct = classesToPainlessClassBuilders.get(javaClass);
if (javaClass.isInterface()) {
copyPainlessClassMembers(Object.class, javaClass);
}
}
}
List<String> painlessSuperStructs = new ArrayList<>();
Class<?> javaSuperClass = painlessStruct.clazz.getSuperclass();
Stack<Class<?>> javaInteraceLookups = new Stack<>();
javaInteraceLookups.push(painlessStruct.clazz);
// adds super classes to the inheritance list
if (javaSuperClass != null && javaSuperClass.isInterface() == false) {
while (javaSuperClass != null) {
PainlessClassBuilder painlessSuperStruct = classesToPainlessClassBuilders.get(javaSuperClass);
if (painlessSuperStruct != null) {
painlessSuperStructs.add(painlessSuperStruct.name);
}
javaInteraceLookups.push(javaSuperClass);
javaSuperClass = javaSuperClass.getSuperclass();
}
private void copyPainlessInterfaceMembers(Class<?> parentClass, Class<?> targetClass) {
for (Class<?> childClass : parentClass.getInterfaces()) {
if (classesToPainlessClassBuilders.containsKey(childClass)) {
copyPainlessClassMembers(childClass, targetClass);
}
// adds all super interfaces to the inheritance list
while (javaInteraceLookups.isEmpty() == false) {
Class<?> javaInterfaceLookup = javaInteraceLookups.pop();
copyPainlessInterfaceMembers(childClass, targetClass);
}
}
for (Class<?> javaSuperInterface : javaInterfaceLookup.getInterfaces()) {
PainlessClassBuilder painlessInterfaceStruct = classesToPainlessClassBuilders.get(javaSuperInterface);
private void copyPainlessClassMembers(Class<?> originalClass, Class<?> targetClass) {
PainlessClassBuilder originalPainlessClassBuilder = classesToPainlessClassBuilders.get(originalClass);
PainlessClassBuilder targetPainlessClassBuilder = classesToPainlessClassBuilders.get(targetClass);
if (painlessInterfaceStruct != null) {
String painlessInterfaceStructName = painlessInterfaceStruct.name;
Objects.requireNonNull(originalPainlessClassBuilder);
Objects.requireNonNull(targetPainlessClassBuilder);
if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) {
painlessSuperStructs.add(painlessInterfaceStructName);
}
for (Map.Entry<String, PainlessMethod> painlessMethodEntry : originalPainlessClassBuilder.methods.entrySet()) {
String painlessMethodKey = painlessMethodEntry.getKey();
PainlessMethod newPainlessMethod = painlessMethodEntry.getValue();
PainlessMethod existingPainlessMethod = targetPainlessClassBuilder.methods.get(painlessMethodKey);
for (Class<?> javaPushInterface : javaInterfaceLookup.getInterfaces()) {
javaInteraceLookups.push(javaPushInterface);
}
if (existingPainlessMethod == null || existingPainlessMethod.target != newPainlessMethod.target &&
existingPainlessMethod.target.isAssignableFrom(newPainlessMethod.target)) {
targetPainlessClassBuilder.methods.put(painlessMethodKey, newPainlessMethod);
}
}
for (Map.Entry<String, PainlessField> painlessFieldEntry : originalPainlessClassBuilder.members.entrySet()) {
String painlessFieldKey = painlessFieldEntry.getKey();
PainlessField newPainlessField = painlessFieldEntry.getValue();
PainlessField existingPainlessField = targetPainlessClassBuilder.members.get(painlessFieldKey);
if (existingPainlessField == null || existingPainlessField.target != newPainlessField.target &&
existingPainlessField.target.isAssignableFrom(newPainlessField.target)) {
targetPainlessClassBuilder.members.put(painlessFieldKey, newPainlessField);
}
}
}
private void cacheRuntimeHandles() {
for (PainlessClassBuilder painlessClassBuilder : classesToPainlessClassBuilders.values()) {
cacheRuntimeHandles(painlessClassBuilder);
}
}
private void cacheRuntimeHandles(PainlessClassBuilder painlessClassBuilder) {
for (PainlessMethod painlessMethod : painlessClassBuilder.methods.values()) {
String methodName = painlessMethod.name;
int typeParametersSize = painlessMethod.arguments.size();
if (typeParametersSize == 0 && methodName.startsWith("get") && methodName.length() > 3 &&
Character.isUpperCase(methodName.charAt(3))) {
painlessClassBuilder.getters.putIfAbsent(
Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), painlessMethod.handle);
} else if (typeParametersSize == 0 && methodName.startsWith("is") && methodName.length() > 2 &&
Character.isUpperCase(methodName.charAt(2))) {
painlessClassBuilder.getters.putIfAbsent(
Character.toLowerCase(methodName.charAt(2)) + methodName.substring(3), painlessMethod.handle);
} else if (typeParametersSize == 1 && methodName.startsWith("set") && methodName.length() > 3 &&
Character.isUpperCase(methodName.charAt(3))) {
painlessClassBuilder.setters.putIfAbsent(
Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), painlessMethod.handle);
}
}
for (PainlessField painlessField : painlessClassBuilder.members.values()) {
painlessClassBuilder.getters.put(painlessField.name, painlessField.getter);
painlessClassBuilder.setters.put(painlessField.name, painlessField.setter);
}
}
private void setFunctionalInterfaceMethods() {
for (Map.Entry<Class<?>, PainlessClassBuilder> painlessClassBuilderEntry : classesToPainlessClassBuilders.entrySet()) {
setFunctionalInterfaceMethod(painlessClassBuilderEntry.getValue());
}
}
private void setFunctionalInterfaceMethod(PainlessClassBuilder painlessClassBuilder) {
Class<?> targetClass = painlessClassBuilder.clazz;
if (targetClass.isInterface()) {
List<java.lang.reflect.Method> javaMethods = new ArrayList<>();
for (java.lang.reflect.Method javaMethod : targetClass.getMethods()) {
if (javaMethod.isDefault() == false && Modifier.isStatic(javaMethod.getModifiers()) == false) {
try {
Object.class.getMethod(javaMethod.getName(), javaMethod.getParameterTypes());
} catch (ReflectiveOperationException roe) {
javaMethods.add(javaMethod);
}
}
}
// copies methods and fields from super structs to the parent struct
copyStruct(painlessStruct.name, painlessSuperStructs);
// copies methods and fields from Object into interface types
if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) {
PainlessClassBuilder painlessObjectStruct = classesToPainlessClassBuilders.get(Object.class);
if (painlessObjectStruct != null) {
copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name));
}
if (javaMethods.size() != 1 && targetClass.isAnnotationPresent(FunctionalInterface.class)) {
throw new IllegalArgumentException("class [" + typeToCanonicalTypeName(targetClass) + "] " +
"is illegally marked as a FunctionalInterface with java methods " + javaMethods);
} else if (javaMethods.size() == 1) {
java.lang.reflect.Method javaMethod = javaMethods.get(0);
String painlessMethodKey = buildPainlessMethodKey(javaMethod.getName(), javaMethod.getParameterCount());
painlessClassBuilder.functionalMethod = painlessClassBuilder.methods.get(painlessMethodKey);
}
}
// precompute runtime classes
for (PainlessClassBuilder painlessStruct : classesToPainlessClassBuilders.values()) {
addRuntimeClass(painlessStruct);
}
Map<Class<?>, PainlessClass> javaClassesToPainlessClasses = new HashMap<>();
// copy all structs to make them unmodifiable for outside users:
for (Map.Entry<Class<?>,PainlessClassBuilder> entry : classesToPainlessClassBuilders.entrySet()) {
entry.getValue().functionalMethod = computeFunctionalInterfaceMethod(entry.getValue());
javaClassesToPainlessClasses.put(entry.getKey(), entry.getValue().build());
}
return new PainlessLookup(canonicalClassNamesToClasses, javaClassesToPainlessClasses);
}
}

View File

@ -181,27 +181,32 @@ public class Netty4HttpServerPipeliningTests extends ESTestCase {
@Override
public void run() {
final String uri = fullHttpRequest.uri();
try {
final String uri = fullHttpRequest.uri();
final ByteBuf buffer = Unpooled.copiedBuffer(uri, StandardCharsets.UTF_8);
final ByteBuf buffer = Unpooled.copiedBuffer(uri, StandardCharsets.UTF_8);
Netty4HttpRequest httpRequest = new Netty4HttpRequest(fullHttpRequest, pipelinedRequest.getSequence());
Netty4HttpResponse response = httpRequest.createResponse(RestStatus.OK, new BytesArray(uri.getBytes(StandardCharsets.UTF_8)));
response.headers().add(HttpHeaderNames.CONTENT_LENGTH, buffer.readableBytes());
Netty4HttpRequest httpRequest = new Netty4HttpRequest(fullHttpRequest, pipelinedRequest.getSequence());
Netty4HttpResponse response =
httpRequest.createResponse(RestStatus.OK, new BytesArray(uri.getBytes(StandardCharsets.UTF_8)));
response.headers().add(HttpHeaderNames.CONTENT_LENGTH, buffer.readableBytes());
final boolean slow = uri.matches("/slow/\\d+");
if (slow) {
try {
Thread.sleep(scaledRandomIntBetween(500, 1000));
} catch (InterruptedException e) {
throw new RuntimeException(e);
final boolean slow = uri.matches("/slow/\\d+");
if (slow) {
try {
Thread.sleep(scaledRandomIntBetween(500, 1000));
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
} else {
assert uri.matches("/\\d+");
}
} else {
assert uri.matches("/\\d+");
}
final ChannelPromise promise = ctx.newPromise();
ctx.writeAndFlush(response, promise);
final ChannelPromise promise = ctx.newPromise();
ctx.writeAndFlush(response, promise);
} finally {
fullHttpRequest.release();
}
}
}

View File

@ -174,6 +174,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final Version V_6_3_1 = new Version(V_6_3_1_ID, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final int V_6_3_2_ID = 6030299;
public static final Version V_6_3_2 = new Version(V_6_3_2_ID, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final int V_6_3_3_ID = 6030399;
public static final Version V_6_3_3 = new Version(V_6_3_3_ID, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final int V_6_4_0_ID = 6040099;
public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final int V_7_0_0_alpha1_ID = 7000001;
@ -196,6 +198,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
return V_7_0_0_alpha1;
case V_6_4_0_ID:
return V_6_4_0;
case V_6_3_3_ID:
return V_6_3_3;
case V_6_3_2_ID:
return V_6_3_2;
case V_6_3_1_ID:

View File

@ -166,6 +166,13 @@ public class KeyStoreWrapper implements SecureSettings {
this.dataBytes = dataBytes;
}
/**
* Get the metadata format version for the keystore
**/
public int getFormatVersion() {
return formatVersion;
}
/** Returns a path representing the ES keystore in the given config dir. */
public static Path keystorePath(Path configDir) {
return configDir.resolve(KEYSTORE_FILENAME);
@ -593,8 +600,10 @@ public class KeyStoreWrapper implements SecureSettings {
@Override
public synchronized void close() {
this.closed = true;
for (Entry entry : entries.get().values()) {
Arrays.fill(entry.bytes, (byte)0);
if (null != entries.get() && entries.get().isEmpty() == false) {
for (Entry entry : entries.get().values()) {
Arrays.fill(entry.bytes, (byte) 0);
}
}
}
}

View File

@ -61,8 +61,8 @@ import static org.hamcrest.Matchers.notNullValue;
public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuilder> {
@Override
protected MatchQueryBuilder doCreateTestQueryBuilder() {
String fieldName = STRING_ALIAS_FIELD_NAME; //randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME,
//INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME);
String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME,
INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME);
if (fieldName.equals(DATE_FIELD_NAME)) {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
}

View File

@ -9,6 +9,8 @@ import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import java.util.ArrayList;
import java.util.Arrays;
@ -25,9 +27,15 @@ import static org.elasticsearch.common.Strings.collectionToDelimitedString;
public final class Automatons {
public static final Setting<Integer> MAX_DETERMINIZED_STATES_SETTING =
Setting.intSetting("xpack.security.automata.max_determinized_states", 100000, DEFAULT_MAX_DETERMINIZED_STATES,
Setting.Property.NodeScope);
public static final Automaton EMPTY = Automata.makeEmpty();
public static final Automaton MATCH_ALL = Automata.makeAnyString();
// this value is not final since we allow it to be set at runtime
private static int maxDeterminizedStates = 100000;
static final char WILDCARD_STRING = '*'; // String equality with support for wildcards
static final char WILDCARD_CHAR = '?'; // Char equality with support for wildcards
static final char WILDCARD_ESCAPE = '\\'; // Escape character
@ -49,13 +57,12 @@ public final class Automatons {
if (patterns.isEmpty()) {
return EMPTY;
}
Automaton automaton = null;
List<Automaton> automata = new ArrayList<>(patterns.size());
for (String pattern : patterns) {
final Automaton patternAutomaton = minimize(pattern(pattern), DEFAULT_MAX_DETERMINIZED_STATES);
automaton = automaton == null ? patternAutomaton : unionAndMinimize(Arrays.asList(automaton, patternAutomaton));
final Automaton patternAutomaton = pattern(pattern);
automata.add(patternAutomaton);
}
// the automaton is always minimized and deterministic
return automaton;
return unionAndMinimize(automata);
}
/**
@ -111,12 +118,12 @@ public final class Automatons {
public static Automaton unionAndMinimize(Collection<Automaton> automata) {
Automaton res = union(automata);
return minimize(res, DEFAULT_MAX_DETERMINIZED_STATES);
return minimize(res, maxDeterminizedStates);
}
public static Automaton minusAndMinimize(Automaton a1, Automaton a2) {
Automaton res = minus(a1, a2, DEFAULT_MAX_DETERMINIZED_STATES);
return minimize(res, DEFAULT_MAX_DETERMINIZED_STATES);
Automaton res = minus(a1, a2, maxDeterminizedStates);
return minimize(res, maxDeterminizedStates);
}
public static Predicate<String> predicate(String... patterns) {
@ -131,8 +138,17 @@ public final class Automatons {
return predicate(automaton, "Predicate for " + automaton);
}
public static void updateMaxDeterminizedStates(Settings settings) {
maxDeterminizedStates = MAX_DETERMINIZED_STATES_SETTING.get(settings);
}
// accessor for testing
static int getMaxDeterminizedStates() {
return maxDeterminizedStates;
}
private static Predicate<String> predicate(Automaton automaton, final String toString) {
CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_MAX_DETERMINIZED_STATES);
CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, maxDeterminizedStates);
return new Predicate<String>() {
@Override
public boolean test(String s) {

View File

@ -8,8 +8,11 @@ package org.elasticsearch.xpack.core.security.support;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@ -113,4 +116,39 @@ public class AutomatonsTests extends ESTestCase {
// expected
}
}
public void testLotsOfIndices() {
final int numberOfIndices = scaledRandomIntBetween(512, 1024);
final List<String> names = new ArrayList<>(numberOfIndices);
for (int i = 0; i < numberOfIndices; i++) {
names.add(randomAlphaOfLengthBetween(6, 48));
}
final Automaton automaton = Automatons.patterns(names);
assertTrue(automaton.isDeterministic());
CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton);
for (String name : names) {
assertTrue(runAutomaton.run(name));
}
}
public void testSettingMaxDeterminizedStates() {
try {
assertNotEquals(10000, Automatons.getMaxDeterminizedStates());
// set to the min value
Settings settings = Settings.builder().put(Automatons.MAX_DETERMINIZED_STATES_SETTING.getKey(), 10000).build();
Automatons.updateMaxDeterminizedStates(settings);
assertEquals(10000, Automatons.getMaxDeterminizedStates());
final List<String> names = new ArrayList<>(1024);
for (int i = 0; i < 1024; i++) {
names.add(randomAlphaOfLength(48));
}
TooComplexToDeterminizeException e = expectThrows(TooComplexToDeterminizeException.class, () -> Automatons.patterns(names));
assertThat(e.getMaxDeterminizedStates(), equalTo(10000));
} finally {
Automatons.updateMaxDeterminizedStates(Settings.EMPTY);
assertEquals(100000, Automatons.getMaxDeterminizedStates());
}
}
}

View File

@ -0,0 +1,54 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.elasticsearch.bootstrap.BootstrapCheck;
import org.elasticsearch.bootstrap.BootstrapContext;
import org.elasticsearch.common.settings.Settings;
public class FIPS140JKSKeystoreBootstrapCheck implements BootstrapCheck {
private final boolean fipsModeEnabled;
FIPS140JKSKeystoreBootstrapCheck(Settings settings) {
this.fipsModeEnabled = Security.FIPS_MODE_ENABLED.get(settings);
}
/**
* Test if the node fails the check.
*
* @param context the bootstrap context
* @return the result of the bootstrap check
*/
@Override
public BootstrapCheckResult check(BootstrapContext context) {
if (fipsModeEnabled) {
final Settings settings = context.settings;
Settings keystoreTypeSettings = settings.filter(k -> k.endsWith("keystore.type"))
.filter(k -> settings.get(k).equalsIgnoreCase("jks"));
if (keystoreTypeSettings.isEmpty() == false) {
return BootstrapCheckResult.failure("JKS Keystores cannot be used in a FIPS 140 compliant JVM. Please " +
"revisit [" + keystoreTypeSettings.toDelimitedString(',') + "] settings");
}
// Default Keystore type is JKS if not explicitly set
Settings keystorePathSettings = settings.filter(k -> k.endsWith("keystore.path"))
.filter(k -> settings.hasValue(k.replace(".path", ".type")) == false);
if (keystorePathSettings.isEmpty() == false) {
return BootstrapCheckResult.failure("JKS Keystores cannot be used in a FIPS 140 compliant JVM. Please " +
"revisit [" + keystorePathSettings.toDelimitedString(',') + "] settings");
}
}
return BootstrapCheckResult.success();
}
@Override
public boolean alwaysEnforce() {
return fipsModeEnabled;
}
}

View File

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.elasticsearch.bootstrap.BootstrapCheck;
import org.elasticsearch.bootstrap.BootstrapContext;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.core.XPackSettings;
import java.util.Locale;
public class FIPS140PasswordHashingAlgorithmBootstrapCheck implements BootstrapCheck {
private final boolean fipsModeEnabled;
FIPS140PasswordHashingAlgorithmBootstrapCheck(Settings settings) {
this.fipsModeEnabled = Security.FIPS_MODE_ENABLED.get(settings);
}
/**
* Test if the node fails the check.
*
* @param context the bootstrap context
* @return the result of the bootstrap check
*/
@Override
public BootstrapCheckResult check(BootstrapContext context) {
final String selectedAlgorithm = XPackSettings.PASSWORD_HASHING_ALGORITHM.get(context.settings);
if (selectedAlgorithm.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) {
return BootstrapCheckResult.failure("Only PBKDF2 is allowed for password hashing in a FIPS-140 JVM. Please set the " +
"appropriate value for [ " + XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey() + " ] setting.");
}
return BootstrapCheckResult.success();
}
@Override
public boolean alwaysEnforce() {
return fipsModeEnabled;
}
}

View File

@ -0,0 +1,52 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.elasticsearch.bootstrap.BootstrapCheck;
import org.elasticsearch.bootstrap.BootstrapContext;
import org.elasticsearch.common.settings.KeyStoreWrapper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import java.io.IOException;
import java.io.UncheckedIOException;
public class FIPS140SecureSettingsBootstrapCheck implements BootstrapCheck {
private final boolean fipsModeEnabled;
private final Environment environment;
FIPS140SecureSettingsBootstrapCheck(Settings settings, Environment environment) {
this.fipsModeEnabled = Security.FIPS_MODE_ENABLED.get(settings);
this.environment = environment;
}
/**
* Test if the node fails the check.
*
* @param context the bootstrap context
* @return the result of the bootstrap check
*/
@Override
public BootstrapCheckResult check(BootstrapContext context) {
if (fipsModeEnabled) {
try (KeyStoreWrapper secureSettings = KeyStoreWrapper.load(environment.configFile())) {
if (secureSettings != null && secureSettings.getFormatVersion() < 3) {
return BootstrapCheckResult.failure("Secure settings store is not of the latest version. Please use " +
"bin/elasticsearch-keystore create to generate a new secure settings store and migrate the secure settings there.");
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
return BootstrapCheckResult.success();
}
@Override
public boolean alwaysEnforce() {
return fipsModeEnabled;
}
}

View File

@ -126,6 +126,7 @@ import org.elasticsearch.xpack.security.action.privilege.TransportGetPrivilegesA
import org.elasticsearch.xpack.security.action.privilege.TransportPutPrivilegesAction;
import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
import org.elasticsearch.xpack.core.security.index.IndexAuditTrailField;
import org.elasticsearch.xpack.core.security.support.Automatons;
import org.elasticsearch.xpack.core.security.user.AnonymousUser;
import org.elasticsearch.xpack.core.ssl.SSLConfiguration;
import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings;
@ -217,7 +218,6 @@ import org.elasticsearch.xpack.security.transport.SecurityServerTransportInterce
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4HttpServerTransport;
import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4ServerTransport;
import org.elasticsearch.xpack.core.template.TemplateUtils;
import org.elasticsearch.xpack.security.transport.nio.SecurityNioHttpServerTransport;
import org.elasticsearch.xpack.security.transport.nio.SecurityNioTransport;
import org.joda.time.DateTime;
@ -256,6 +256,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
DiscoveryPlugin, MapperPlugin, ExtensiblePlugin {
private static final Logger logger = Loggers.getLogger(Security.class);
static final Setting<Boolean> FIPS_MODE_ENABLED =
Setting.boolSetting("xpack.security.fips_mode.enabled", false, Property.NodeScope);
static final Setting<List<String>> AUDIT_OUTPUTS_SETTING =
Setting.listSetting(SecurityField.setting("audit.outputs"),
@ -294,9 +296,6 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
this.enabled = XPackSettings.SECURITY_ENABLED.get(settings);
if (enabled && transportClientMode == false) {
validateAutoCreateIndex(settings);
}
if (enabled) {
// we load them all here otherwise we can't access secure settings since they are closed once the checks are
// fetched
final List<BootstrapCheck> checks = new ArrayList<>();
@ -305,9 +304,13 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
new PkiRealmBootstrapCheck(getSslService()),
new TLSLicenseBootstrapCheck(),
new PasswordHashingAlgorithmBootstrapCheck(),
new FIPS140SecureSettingsBootstrapCheck(settings, env),
new FIPS140JKSKeystoreBootstrapCheck(settings),
new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings),
new KerberosRealmBootstrapCheck(env)));
checks.addAll(InternalRealms.getBootstrapChecks(settings, env));
this.bootstrapChecks = Collections.unmodifiableList(checks);
Automatons.updateMaxDeterminizedStates(settings);
} else {
this.bootstrapChecks = Collections.emptyList();
}
@ -592,6 +595,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
}
// The following just apply in node mode
settingsList.add(FIPS_MODE_ENABLED);
// IP Filter settings
IPFilter.addSettings(settingsList);
@ -601,13 +605,14 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
LoggingAuditTrail.registerSettings(settingsList);
IndexAuditTrail.registerSettings(settingsList);
// authentication settings
// authentication and authorization settings
AnonymousUser.addSettings(settingsList);
RealmSettings.addSettings(settingsList, securityExtensions);
NativeRolesStore.addSettings(settingsList);
ReservedRealm.addSettings(settingsList);
AuthenticationService.addSettings(settingsList);
AuthorizationService.addSettings(settingsList);
settingsList.add(Automatons.MAX_DETERMINIZED_STATES_SETTING);
settingsList.add(CompositeRolesStore.CACHE_SIZE_SETTING);
settingsList.add(FieldPermissionsCache.CACHE_SIZE_SETTING);
settingsList.add(TokenService.TOKEN_EXPIRATION);

View File

@ -0,0 +1,66 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.elasticsearch.bootstrap.BootstrapContext;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
public class FIPS140JKSKeystoreBootstrapCheckTests extends ESTestCase {
public void testNoKeystoreIsAllowed() {
final Settings.Builder settings = Settings.builder()
.put("xpack.security.fips_mode.enabled", "true");
assertFalse(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure());
}
public void testSSLKeystoreTypeIsNotAllowed() {
final Settings.Builder settings = Settings.builder()
.put("xpack.security.fips_mode.enabled", "true")
.put("xpack.ssl.keystore.path", "/this/is/the/path")
.put("xpack.ssl.keystore.type", "JKS");
assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure());
}
public void testSSLImplicitKeystoreTypeIsNotAllowed() {
final Settings.Builder settings = Settings.builder()
.put("xpack.security.fips_mode.enabled", "true")
.put("xpack.ssl.keystore.path", "/this/is/the/path")
.put("xpack.ssl.keystore.type", "JKS");
assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure());
}
public void testTransportSSLKeystoreTypeIsNotAllowed() {
final Settings.Builder settings = Settings.builder()
.put("xpack.security.fips_mode.enabled", "true")
.put("xpack.security.transport.ssl.keystore.path", "/this/is/the/path")
.put("xpack.security.transport.ssl.keystore.type", "JKS");
assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure());
}
public void testHttpSSLKeystoreTypeIsNotAllowed() {
final Settings.Builder settings = Settings.builder()
.put("xpack.security.fips_mode.enabled", "true")
.put("xpack.security.http.ssl.keystore.path", "/this/is/the/path")
.put("xpack.security.http.ssl.keystore.type", "JKS");
assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure());
}
public void testRealmKeystoreTypeIsNotAllowed() {
final Settings.Builder settings = Settings.builder()
.put("xpack.security.fips_mode.enabled", "true")
.put("xpack.security.authc.realms.ldap.ssl.keystore.path", "/this/is/the/path")
.put("xpack.security.authc.realms.ldap.ssl.keystore.type", "JKS");
assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure());
}
public void testImplicitRealmKeystoreTypeIsNotAllowed() {
final Settings.Builder settings = Settings.builder()
.put("xpack.security.fips_mode.enabled", "true")
.put("xpack.security.authc.realms.ldap.ssl.keystore.path", "/this/is/the/path");
assertTrue(new FIPS140JKSKeystoreBootstrapCheck(settings.build()).check(new BootstrapContext(settings.build(), null)).isFailure());
}
}

View File

@ -0,0 +1,34 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.elasticsearch.bootstrap.BootstrapContext;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.XPackSettings;
public class FIPS140PasswordHashingAlgorithmBootstrapCheckTests extends ESTestCase {
public void testPBKDF2AlgorithmIsAllowed() {
Settings settings = Settings.builder().put("xpack.security.fips_mode.enabled", "true").build();
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2_10000").build();
assertFalse(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure());
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2").build();
assertFalse(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure());
}
public void testBCRYPTAlgorithmIsNotAllowed() {
Settings settings = Settings.builder().put("xpack.security.fips_mode.enabled", "true").build();
assertTrue(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure());
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "BCRYPT").build();
assertTrue(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure());
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "BCRYPT11").build();
assertTrue(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure());
}
}

View File

@ -0,0 +1,102 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.SimpleFSDirectory;
import org.elasticsearch.bootstrap.BootstrapContext;
import org.elasticsearch.common.settings.KeyStoreWrapper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.test.ESTestCase;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import java.io.ByteArrayOutputStream;
import java.nio.file.Path;
import java.security.AccessControlException;
import java.security.KeyStore;
import java.util.Base64;
public class FIPS140SecureSettingsBootstrapCheckTests extends ESTestCase {
public void testLegacySecureSettingsIsNotAllowed() throws Exception {
assumeFalse("Can't run in a FIPS JVM, PBE is not available", inFipsJvm());
final Settings.Builder builder = Settings.builder()
.put("path.home", createTempDir())
.put("xpack.security.fips_mode.enabled", "true");
Environment env = TestEnvironment.newEnvironment(builder.build());
generateV2Keystore(env);
assertTrue(new FIPS140SecureSettingsBootstrapCheck(builder.build(), env).check(new BootstrapContext(builder.build(),
null)).isFailure());
}
public void testCorrectSecureSettingsVersionIsAllowed() throws Exception {
final Settings.Builder builder = Settings.builder()
.put("path.home", createTempDir())
.put("xpack.security.fips_mode.enabled", "true");
Environment env = TestEnvironment.newEnvironment(builder.build());
final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create();
try {
keyStoreWrapper.save(env.configFile(), "password".toCharArray());
} catch (final AccessControlException e) {
if (e.getPermission() instanceof RuntimePermission && e.getPermission().getName().equals("accessUserInformation")) {
// this is expected:but we don't care in tests
} else {
throw e;
}
}
assertFalse(new FIPS140SecureSettingsBootstrapCheck(builder.build(), env).check(new BootstrapContext(builder.build(),
null)).isFailure());
}
private void generateV2Keystore(Environment env) throws Exception {
Path configDir = env.configFile();
SimpleFSDirectory directory = new SimpleFSDirectory(configDir);
byte[] fileBytes = new byte[20];
random().nextBytes(fileBytes);
try (IndexOutput output = directory.createOutput("elasticsearch.keystore", IOContext.DEFAULT)) {
CodecUtil.writeHeader(output, "elasticsearch.keystore", 2);
output.writeByte((byte) 0); // hasPassword = false
output.writeString("PKCS12");
output.writeString("PBE"); // string algo
output.writeString("PBE"); // file algo
output.writeVInt(2); // num settings
output.writeString("string_setting");
output.writeString("STRING");
output.writeString("file_setting");
output.writeString("FILE");
SecretKeyFactory secretFactory = SecretKeyFactory.getInstance("PBE");
KeyStore keystore = KeyStore.getInstance("PKCS12");
keystore.load(null, null);
SecretKey secretKey = secretFactory.generateSecret(new PBEKeySpec("stringSecretValue".toCharArray()));
KeyStore.ProtectionParameter protectionParameter = new KeyStore.PasswordProtection(new char[0]);
keystore.setEntry("string_setting", new KeyStore.SecretKeyEntry(secretKey), protectionParameter);
byte[] base64Bytes = Base64.getEncoder().encode(fileBytes);
char[] chars = new char[base64Bytes.length];
for (int i = 0; i < chars.length; ++i) {
chars[i] = (char) base64Bytes[i]; // PBE only stores the lower 8 bits, so this narrowing is ok
}
secretKey = secretFactory.generateSecret(new PBEKeySpec(chars));
keystore.setEntry("file_setting", new KeyStore.SecretKeyEntry(secretKey), protectionParameter);
ByteArrayOutputStream keystoreBytesStream = new ByteArrayOutputStream();
keystore.store(keystoreBytesStream, new char[0]);
byte[] keystoreBytes = keystoreBytesStream.toByteArray();
output.writeInt(keystoreBytes.length);
output.writeBytes(keystoreBytes, keystoreBytes.length);
CodecUtil.writeFooter(output);
}
}
}

View File

@ -93,7 +93,7 @@ public class SSLReloadIntegTests extends SecurityIntegTestCase {
}
public void testThatSSLConfigurationReloadsOnModification() throws Exception {
assumeFalse("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0);
assumeTrue("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0);
Path keyPath = createTempDir().resolve("testnode_updated.pem");
Path certPath = createTempDir().resolve("testnode_updated.crt");
Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), keyPath);

View File

@ -7,7 +7,7 @@ apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
dependencies {
testCompile project(path: xpackModule('core'), configuration: 'runtime')
testCompile project(path: xpackModule('core'), configuration: 'shadow')
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
testCompile project(path: xpackModule('security'), configuration: 'testArtifacts')
}