Merge remote-tracking branch 'es/master' into ccr

* es/master: (21 commits)
  Tweaked Elasticsearch Service links for SEO
  Watcher: Store username on watch execution (#31873)
  Use correct formatting for links (#29460)
  Painless: Separate PainlessLookup into PainlessLookup and PainlessLookupBuilder (#32054)
  Scripting: Remove dead code from painless module (#32064)
  [Rollup] Replace RollupIT with a ESRestTestCase version (#31977)
  [TEST] Consistent algorithm usage (#32077)
  [Rollup] Fix duplicate field names in test (#32075)
  Ensure only parent breaker trips in unit test
  Unmute field collapsing rest tests
  Fix BWC check after backport
  [Tests] Fix failure due to changes exception message (#32036)
  Remove unused params from SSource and Walker (#31935)
  [Test] Mute MlJobIT#testDeleteJobAfterMissingAliases
  Turn off real-mem breaker in REST tests
  Turn off real-mem breaker in single node tests
  Fix broken OpenLDAP Vagrant QA test
  Cleanup Duplication in `PainlessScriptEngine` (#31991)
  SCRIPTING: Remove unused MultiSearchTemplateRequestBuilder (#32049)
  Fix compile issues introduced by merge (#32058)
  ...
This commit is contained in:
Martijn van Groningen 2018-07-16 21:49:57 +02:00
commit f5e2168260
No known key found for this signature in database
GPG Key ID: AB236F4FCF2AF12A
83 changed files with 2150 additions and 1673 deletions

View File

@ -331,6 +331,12 @@ class ClusterFormationTasks {
}
// increase script compilation limit since tests can rapid-fire script compilations
esConfig['script.max_compilations_rate'] = '2048/1m'
// Temporarily disable the real memory usage circuit breaker. It depends on real memory usage which we have no full control
// over and the REST client will not retry on circuit breaking exceptions yet (see #31986 for details). Once the REST client
// can retry on circuit breaking exceptions, we can revert again to the default configuration.
if (node.nodeVersion.major >= 7) {
esConfig['indices.breaker.total.use_real_memory'] = false
}
esConfig.putAll(node.config.settings)
Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)

View File

@ -295,7 +295,6 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
}
@SuppressWarnings({ "unused" })
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32029")
public void testSearchRequestAggregations() throws IOException {
RestHighLevelClient client = highLevelClient();
{
@ -338,8 +337,9 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
Range range = aggregations.get("by_company"); // <1>
// end::search-request-aggregations-get-wrongCast
} catch (ClassCastException ex) {
assertEquals("org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms"
+ " cannot be cast to org.elasticsearch.search.aggregations.bucket.range.Range", ex.getMessage());
String message = ex.getMessage();
assertThat(message, containsString("org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms"));
assertThat(message, containsString("org.elasticsearch.search.aggregations.bucket.range.Range"));
}
assertEquals(3, elasticBucket.getDocCount());
assertEquals(30, avg, 0.0);

View File

@ -3,8 +3,8 @@
This section includes a few recipes to help with common problems:
* mixing-exact-search-with-stemming
* consistent-scoring
* <<mixing-exact-search-with-stemming>>
* <<consistent-scoring>>
include::recipes/stemming.asciidoc[]
include::recipes/scoring.asciidoc[]

View File

@ -3,10 +3,12 @@
[float]
=== Hosted Elasticsearch
Elasticsearch can be run on your own hardware or using our hosted
Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
available on AWS and GCP. You can
https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
You can run Elasticsearch on your own hardware, or use our
https://www.elastic.co/cloud/elasticsearch-service[hosted Elasticsearch Service]
on Elastic Cloud. The Elasticsearch Service is available on both AWS and GCP.
https://www.elastic.co/cloud/elasticsearch-service/signup[Try out the
Elasticsearch Service for free].
[float]
=== Installing Elasticsearch Yourself

View File

@ -19,8 +19,6 @@
package org.elasticsearch.ingest.common;
import com.fasterxml.jackson.core.JsonFactory;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -48,7 +46,6 @@ import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationExcept
public final class ScriptProcessor extends AbstractProcessor {
public static final String TYPE = "script";
private static final JsonFactory JSON_FACTORY = new JsonFactory();
private final Script script;
private final ScriptService scriptService;

View File

@ -1,65 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script.mustache;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.ElasticsearchClient;
public class MultiSearchTemplateRequestBuilder
extends ActionRequestBuilder<MultiSearchTemplateRequest, MultiSearchTemplateResponse> {
protected MultiSearchTemplateRequestBuilder(ElasticsearchClient client, MultiSearchTemplateAction action) {
super(client, action, new MultiSearchTemplateRequest());
}
public MultiSearchTemplateRequestBuilder add(SearchTemplateRequest request) {
if (request.getRequest().indicesOptions() == IndicesOptions.strictExpandOpenAndForbidClosed()
&& request().indicesOptions() != IndicesOptions.strictExpandOpenAndForbidClosed()) {
request.getRequest().indicesOptions(request().indicesOptions());
}
super.request.add(request);
return this;
}
public MultiSearchTemplateRequestBuilder add(SearchTemplateRequestBuilder request) {
if (request.request().getRequest().indicesOptions() == IndicesOptions.strictExpandOpenAndForbidClosed()
&& request().indicesOptions() != IndicesOptions.strictExpandOpenAndForbidClosed()) {
request.request().getRequest().indicesOptions(request().indicesOptions());
}
super.request.add(request);
return this;
}
public MultiSearchTemplateRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
request().indicesOptions(indicesOptions);
return this;
}
/**
* Sets how many search requests specified in this multi search requests are allowed to be ran concurrently.
*/
public MultiSearchTemplateRequestBuilder setMaxConcurrentSearchRequests(int maxConcurrentSearchRequests) {
request().maxConcurrentSearchRequests(maxConcurrentSearchRequests);
return this;
}
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.painless;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessCast;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import java.util.Objects;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.painless;
import org.elasticsearch.painless.lookup.PainlessCast;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.Label;
import org.objectweb.asm.Opcodes;
@ -227,14 +227,6 @@ public final class MethodWriter extends GeneratorAdapter {
return Type.getType(clazz);
}
public void writeBranch(final Label tru, final Label fals) {
if (tru != null) {
visitJumpInsn(Opcodes.IFNE, tru);
} else if (fals != null) {
visitJumpInsn(Opcodes.IFEQ, fals);
}
}
/** Starts a new string concat.
* @return the size of arguments pushed to stack (the object that does string concats, e.g. a StringBuilder)
*/

View File

@ -24,7 +24,7 @@ import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.painless.Compiler.Loader;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptContext;
@ -102,9 +102,11 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
for (Map.Entry<ScriptContext<?>, List<Whitelist>> entry : contexts.entrySet()) {
ScriptContext<?> context = entry.getKey();
if (context.instanceClazz.equals(SearchScript.class) || context.instanceClazz.equals(ExecutableScript.class)) {
contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class, new PainlessLookup(entry.getValue())));
contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class,
new PainlessLookupBuilder(entry.getValue()).build()));
} else {
contextsToCompilers.put(context, new Compiler(context.instanceClazz, new PainlessLookup(entry.getValue())));
contextsToCompilers.put(context, new Compiler(context.instanceClazz,
new PainlessLookupBuilder(entry.getValue()).build()));
}
}
@ -366,44 +368,7 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
}
Object compile(Compiler compiler, String scriptName, String source, Map<String, String> params, Object... args) {
final CompilerSettings compilerSettings;
if (params.isEmpty()) {
// Use the default settings.
compilerSettings = defaultCompilerSettings;
} else {
// Use custom settings specified by params.
compilerSettings = new CompilerSettings();
// Except regexes enabled - this is a node level setting and can't be changed in the request.
compilerSettings.setRegexesEnabled(defaultCompilerSettings.areRegexesEnabled());
Map<String, String> copy = new HashMap<>(params);
String value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER);
if (value != null) {
compilerSettings.setMaxLoopCounter(Integer.parseInt(value));
}
value = copy.remove(CompilerSettings.PICKY);
if (value != null) {
compilerSettings.setPicky(Boolean.parseBoolean(value));
}
value = copy.remove(CompilerSettings.INITIAL_CALL_SITE_DEPTH);
if (value != null) {
compilerSettings.setInitialCallSiteDepth(Integer.parseInt(value));
}
value = copy.remove(CompilerSettings.REGEX_ENABLED.getKey());
if (value != null) {
throw new IllegalArgumentException("[painless.regex.enabled] can only be set on node startup.");
}
if (!copy.isEmpty()) {
throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy);
}
}
final CompilerSettings compilerSettings = buildCompilerSettings(params);
// Check we ourselves are not being called by unprivileged code.
SpecialPermission.check();
@ -434,14 +399,33 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
}, COMPILATION_CONTEXT);
// Note that it is safe to catch any of the following errors since Painless is stateless.
} catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) {
throw convertToScriptException(scriptName == null ? source : scriptName, source, e);
throw convertToScriptException(source, e);
}
}
void compile(Compiler compiler, Loader loader, MainMethodReserved reserved,
String scriptName, String source, Map<String, String> params) {
final CompilerSettings compilerSettings;
final CompilerSettings compilerSettings = buildCompilerSettings(params);
try {
// Drop all permissions to actually compile the code itself.
AccessController.doPrivileged(new PrivilegedAction<Void>() {
@Override
public Void run() {
String name = scriptName == null ? source : scriptName;
compiler.compile(loader, reserved, name, source, compilerSettings);
return null;
}
}, COMPILATION_CONTEXT);
// Note that it is safe to catch any of the following errors since Painless is stateless.
} catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) {
throw convertToScriptException(source, e);
}
}
private CompilerSettings buildCompilerSettings(Map<String, String> params) {
CompilerSettings compilerSettings;
if (params.isEmpty()) {
// Use the default settings.
compilerSettings = defaultCompilerSettings;
@ -478,25 +462,10 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy);
}
}
try {
// Drop all permissions to actually compile the code itself.
AccessController.doPrivileged(new PrivilegedAction<Void>() {
@Override
public Void run() {
String name = scriptName == null ? source : scriptName;
compiler.compile(loader, reserved, name, source, compilerSettings);
return null;
}
}, COMPILATION_CONTEXT);
// Note that it is safe to catch any of the following errors since Painless is stateless.
} catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) {
throw convertToScriptException(scriptName == null ? source : scriptName, source, e);
}
return compilerSettings;
}
private ScriptException convertToScriptException(String scriptName, String scriptSource, Throwable t) {
private ScriptException convertToScriptException(String scriptSource, Throwable t) {
// create a script stack: this is just the script portion
List<String> scriptStack = new ArrayList<>();
for (StackTraceElement element : t.getStackTrace()) {
@ -507,7 +476,7 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
scriptStack.add("<<< unknown portion of script >>>");
} else {
offset--; // offset is 1 based, line numbers must be!
int startOffset = getPreviousStatement(scriptSource, offset);
int startOffset = getPreviousStatement(offset);
int endOffset = getNextStatement(scriptSource, offset);
StringBuilder snippet = new StringBuilder();
if (startOffset > 0) {
@ -535,7 +504,7 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
}
// very simple heuristic: +/- 25 chars. can be improved later.
private int getPreviousStatement(String scriptSource, int offset) {
private int getPreviousStatement(int offset) {
return Math.max(0, offset - 25);
}

View File

@ -1,62 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.antlr;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenFactory;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.misc.Pair;
/**
* Token factory that preserves that last non-whitespace token so you can do token level lookbehind in the lexer.
*/
public class StashingTokenFactory<T extends Token> implements TokenFactory<T> {
private final TokenFactory<T> delegate;
private T lastToken;
public StashingTokenFactory(TokenFactory<T> delegate) {
this.delegate = delegate;
}
public T getLastToken() {
return lastToken;
}
@Override
public T create(Pair<TokenSource, CharStream> source, int type, String text, int channel, int start, int stop, int line,
int charPositionInLine) {
return maybeStash(delegate.create(source, type, text, channel, start, stop, line, charPositionInLine));
}
@Override
public T create(int type, String text) {
return maybeStash(delegate.create(type, text));
}
private T maybeStash(T token) {
if (token.getChannel() == Lexer.DEFAULT_TOKEN_CHANNEL) {
lastToken = token;
}
return token;
}
}

View File

@ -29,7 +29,6 @@ import org.antlr.v4.runtime.Recognizer;
import org.antlr.v4.runtime.atn.PredictionMode;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.Operation;
@ -107,6 +106,7 @@ import org.elasticsearch.painless.antlr.PainlessParser.TrueContext;
import org.elasticsearch.painless.antlr.PainlessParser.TryContext;
import org.elasticsearch.painless.antlr.PainlessParser.VariableContext;
import org.elasticsearch.painless.antlr.PainlessParser.WhileContext;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.node.AExpression;
import org.elasticsearch.painless.node.ANode;
import org.elasticsearch.painless.node.AStatement;
@ -184,7 +184,6 @@ public final class Walker extends PainlessParserBaseVisitor<ANode> {
private final CompilerSettings settings;
private final Printer debugStream;
private final String sourceName;
private final String sourceText;
private final PainlessLookup painlessLookup;
private final Deque<Reserved> reserved = new ArrayDeque<>();
@ -198,7 +197,6 @@ public final class Walker extends PainlessParserBaseVisitor<ANode> {
this.debugStream = debugStream;
this.settings = settings;
this.sourceName = Location.computeSourceName(sourceName);
this.sourceText = sourceText;
this.globals = new Globals(new BitSet(sourceText.length()));
this.painlessLookup = painlessLookup;
this.source = (SSource)visit(buildAntlrTree(sourceText));
@ -267,7 +265,7 @@ public final class Walker extends PainlessParserBaseVisitor<ANode> {
statements.add((AStatement)visit(ctx.dstatement()));
}
return new SSource(scriptClassInfo, settings, sourceName, sourceText, debugStream, (MainMethodReserved)reserved.pop(),
return new SSource(scriptClassInfo, settings, sourceName, debugStream, (MainMethodReserved)reserved.pop(),
location(ctx), functions, globals, statements);
}

View File

@ -19,25 +19,10 @@
package org.elasticsearch.painless.lookup;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistClass;
import org.elasticsearch.painless.spi.WhitelistConstructor;
import org.elasticsearch.painless.spi.WhitelistField;
import org.elasticsearch.painless.spi.WhitelistMethod;
import org.objectweb.asm.Type;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.regex.Pattern;
/**
* The entire API for Painless. Also used as a whitelist for checking for legal
@ -45,18 +30,6 @@ import java.util.regex.Pattern;
*/
public final class PainlessLookup {
private static final Map<String, PainlessMethod> methodCache = new HashMap<>();
private static final Map<String, PainlessField> fieldCache = new HashMap<>();
private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$");
/** Marker class for def type to be used during type analysis. */
public static final class def {
private def() {
}
}
public static Class<?> getBoxedType(Class<?> clazz) {
if (clazz == boolean.class) {
return Boolean.class;
@ -205,22 +178,6 @@ public final class PainlessLookup {
return clazz.getCanonicalName().replace('$', '.');
}
private static String buildMethodCacheKey(String structName, String methodName, List<Class<?>> arguments) {
StringBuilder key = new StringBuilder();
key.append(structName);
key.append(methodName);
for (Class<?> argument : arguments) {
key.append(argument.getName());
}
return key.toString();
}
private static String buildFieldCacheKey(String structName, String fieldName, String typeName) {
return structName + fieldName + typeName;
}
public Collection<PainlessClass> getStructs() {
return javaClassesToPainlessStructs.values();
}
@ -228,652 +185,9 @@ public final class PainlessLookup {
private final Map<String, Class<?>> painlessTypesToJavaClasses;
private final Map<Class<?>, PainlessClass> javaClassesToPainlessStructs;
public PainlessLookup(List<Whitelist> whitelists) {
painlessTypesToJavaClasses = new HashMap<>();
javaClassesToPainlessStructs = new HashMap<>();
String origin = null;
painlessTypesToJavaClasses.put("def", def.class);
javaClassesToPainlessStructs.put(def.class, new PainlessClass("def", Object.class, Type.getType(Object.class)));
try {
// first iteration collects all the Painless type names that
// are used for validation during the second iteration
for (Whitelist whitelist : whitelists) {
for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
PainlessClass painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) {
throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " +
"[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]");
}
origin = whitelistStruct.origin;
addStruct(whitelist.javaClassLoader, whitelistStruct);
painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
javaClassesToPainlessStructs.put(painlessStruct.clazz, painlessStruct);
}
}
// second iteration adds all the constructors, methods, and fields that will
// be available in Painless along with validating they exist and all their types have
// been white-listed during the first iteration
for (Whitelist whitelist : whitelists) {
for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) {
origin = whitelistConstructor.origin;
addConstructor(painlessTypeName, whitelistConstructor);
}
for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) {
origin = whitelistMethod.origin;
addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod);
}
for (WhitelistField whitelistField : whitelistStruct.whitelistFields) {
origin = whitelistField.origin;
addField(painlessTypeName, whitelistField);
}
}
}
} catch (Exception exception) {
throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception);
}
// goes through each Painless struct and determines the inheritance list,
// and then adds all inherited types to the Painless struct's whitelist
for (Class<?> javaClass : javaClassesToPainlessStructs.keySet()) {
PainlessClass painlessStruct = javaClassesToPainlessStructs.get(javaClass);
List<String> painlessSuperStructs = new ArrayList<>();
Class<?> javaSuperClass = painlessStruct.clazz.getSuperclass();
Stack<Class<?>> javaInteraceLookups = new Stack<>();
javaInteraceLookups.push(painlessStruct.clazz);
// adds super classes to the inheritance list
if (javaSuperClass != null && javaSuperClass.isInterface() == false) {
while (javaSuperClass != null) {
PainlessClass painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass);
if (painlessSuperStruct != null) {
painlessSuperStructs.add(painlessSuperStruct.name);
}
javaInteraceLookups.push(javaSuperClass);
javaSuperClass = javaSuperClass.getSuperclass();
}
}
// adds all super interfaces to the inheritance list
while (javaInteraceLookups.isEmpty() == false) {
Class<?> javaInterfaceLookup = javaInteraceLookups.pop();
for (Class<?> javaSuperInterface : javaInterfaceLookup.getInterfaces()) {
PainlessClass painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface);
if (painlessInterfaceStruct != null) {
String painlessInterfaceStructName = painlessInterfaceStruct.name;
if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) {
painlessSuperStructs.add(painlessInterfaceStructName);
}
for (Class<?> javaPushInterface : javaInterfaceLookup.getInterfaces()) {
javaInteraceLookups.push(javaPushInterface);
}
}
}
}
// copies methods and fields from super structs to the parent struct
copyStruct(painlessStruct.name, painlessSuperStructs);
// copies methods and fields from Object into interface types
if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) {
PainlessClass painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class);
if (painlessObjectStruct != null) {
copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name));
}
}
}
// precompute runtime classes
for (PainlessClass painlessStruct : javaClassesToPainlessStructs.values()) {
addRuntimeClass(painlessStruct);
}
// copy all structs to make them unmodifiable for outside users:
for (Map.Entry<Class<?>,PainlessClass> entry : javaClassesToPainlessStructs.entrySet()) {
entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue())));
}
}
private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) {
String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
String importedPainlessTypeName = painlessTypeName;
if (TYPE_NAME_PATTERN.matcher(painlessTypeName).matches() == false) {
throw new IllegalArgumentException("invalid struct type name [" + painlessTypeName + "]");
}
int index = whitelistStruct.javaClassName.lastIndexOf('.');
if (index != -1) {
importedPainlessTypeName = whitelistStruct.javaClassName.substring(index + 1).replace('$', '.');
}
Class<?> javaClass;
if ("void".equals(whitelistStruct.javaClassName)) javaClass = void.class;
else if ("boolean".equals(whitelistStruct.javaClassName)) javaClass = boolean.class;
else if ("byte".equals(whitelistStruct.javaClassName)) javaClass = byte.class;
else if ("short".equals(whitelistStruct.javaClassName)) javaClass = short.class;
else if ("char".equals(whitelistStruct.javaClassName)) javaClass = char.class;
else if ("int".equals(whitelistStruct.javaClassName)) javaClass = int.class;
else if ("long".equals(whitelistStruct.javaClassName)) javaClass = long.class;
else if ("float".equals(whitelistStruct.javaClassName)) javaClass = float.class;
else if ("double".equals(whitelistStruct.javaClassName)) javaClass = double.class;
else {
try {
javaClass = Class.forName(whitelistStruct.javaClassName, true, whitelistClassLoader);
} catch (ClassNotFoundException cnfe) {
throw new IllegalArgumentException("invalid java class name [" + whitelistStruct.javaClassName + "]" +
" for struct [" + painlessTypeName + "]");
}
}
PainlessClass existingStruct = javaClassesToPainlessStructs.get(javaClass);
if (existingStruct == null) {
PainlessClass struct = new PainlessClass(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass));
painlessTypesToJavaClasses.put(painlessTypeName, javaClass);
javaClassesToPainlessStructs.put(javaClass, struct);
} else if (existingStruct.clazz.equals(javaClass) == false) {
throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " +
"illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " +
"[" + existingStruct.clazz.getName() + "]");
}
if (painlessTypeName.equals(importedPainlessTypeName)) {
if (whitelistStruct.onlyFQNJavaClassName == false) {
throw new IllegalArgumentException("must use only_fqn parameter on type [" + painlessTypeName + "] with no package");
}
} else {
Class<?> importedJavaClass = painlessTypesToJavaClasses.get(importedPainlessTypeName);
if (importedJavaClass == null) {
if (whitelistStruct.onlyFQNJavaClassName == false) {
if (existingStruct != null) {
throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
}
painlessTypesToJavaClasses.put(importedPainlessTypeName, javaClass);
}
} else if (importedJavaClass.equals(javaClass) == false) {
throw new IllegalArgumentException("imported name [" + painlessTypeName + "] is used to " +
"illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] " +
"and [" + importedJavaClass.getName() + "]");
} else if (whitelistStruct.onlyFQNJavaClassName) {
throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
}
}
}
private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) {
PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
if (ownerStruct == null) {
throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " +
"parameters " + whitelistConstructor.painlessParameterTypeNames);
}
List<Class<?>> painlessParametersTypes = new ArrayList<>(whitelistConstructor.painlessParameterTypeNames.size());
Class<?>[] javaClassParameters = new Class<?>[whitelistConstructor.painlessParameterTypeNames.size()];
for (int parameterCount = 0; parameterCount < whitelistConstructor.painlessParameterTypeNames.size(); ++parameterCount) {
String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount);
try {
Class<?> painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
painlessParametersTypes.add(painlessParameterClass);
javaClassParameters[parameterCount] = defClassToObjectClass(painlessParameterClass);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " +
"with owner struct [" + ownerStructName + "] and constructor parameters " +
whitelistConstructor.painlessParameterTypeNames, iae);
}
}
java.lang.reflect.Constructor<?> javaConstructor;
try {
javaConstructor = ownerStruct.clazz.getConstructor(javaClassParameters);
} catch (NoSuchMethodException exception) {
throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
" with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception);
}
PainlessMethodKey painlessMethodKey = new PainlessMethodKey("<init>", whitelistConstructor.painlessParameterTypeNames.size());
PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey);
if (painlessConstructor == null) {
org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor);
MethodHandle javaHandle;
try {
javaHandle = MethodHandles.publicLookup().in(ownerStruct.clazz).unreflectConstructor(javaConstructor);
} catch (IllegalAccessException exception) {
throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
" with constructor parameters " + whitelistConstructor.painlessParameterTypeNames);
}
painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "<init>", painlessParametersTypes),
key -> new PainlessMethod("<init>", ownerStruct, null, void.class, painlessParametersTypes,
asmConstructor, javaConstructor.getModifiers(), javaHandle));
ownerStruct.constructors.put(painlessMethodKey, painlessConstructor);
} else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){
throw new IllegalArgumentException(
"illegal duplicate constructors [" + painlessMethodKey + "] found within the struct [" + ownerStruct.name + "] " +
"with parameters " + painlessParametersTypes + " and " + painlessConstructor.arguments);
}
}
private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) {
PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
if (ownerStruct == null) {
throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
"name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
}
if (TYPE_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) {
throw new IllegalArgumentException("invalid method name" +
" [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "].");
}
Class<?> javaAugmentedClass;
if (whitelistMethod.javaAugmentedClassName != null) {
try {
javaAugmentedClass = Class.forName(whitelistMethod.javaAugmentedClassName, true, whitelistClassLoader);
} catch (ClassNotFoundException cnfe) {
throw new IllegalArgumentException("augmented class [" + whitelistMethod.javaAugmentedClassName + "] " +
"not found for method with name [" + whitelistMethod.javaMethodName + "] " +
"and parameters " + whitelistMethod.painlessParameterTypeNames, cnfe);
}
} else {
javaAugmentedClass = null;
}
int augmentedOffset = javaAugmentedClass == null ? 0 : 1;
List<Class<?>> painlessParametersTypes = new ArrayList<>(whitelistMethod.painlessParameterTypeNames.size());
Class<?>[] javaClassParameters = new Class<?>[whitelistMethod.painlessParameterTypeNames.size() + augmentedOffset];
if (javaAugmentedClass != null) {
javaClassParameters[0] = ownerStruct.clazz;
}
for (int parameterCount = 0; parameterCount < whitelistMethod.painlessParameterTypeNames.size(); ++parameterCount) {
String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount);
try {
Class<?> painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
painlessParametersTypes.add(painlessParameterClass);
javaClassParameters[parameterCount + augmentedOffset] = defClassToObjectClass(painlessParameterClass);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " +
"with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
"and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
}
}
Class<?> javaImplClass = javaAugmentedClass == null ? ownerStruct.clazz : javaAugmentedClass;
java.lang.reflect.Method javaMethod;
try {
javaMethod = javaImplClass.getMethod(whitelistMethod.javaMethodName, javaClassParameters);
} catch (NoSuchMethodException nsme) {
throw new IllegalArgumentException("method with name [" + whitelistMethod.javaMethodName + "] " +
"and parameters " + whitelistMethod.painlessParameterTypeNames + " not found for class [" +
javaImplClass.getName() + "]", nsme);
}
Class<?> painlessReturnClass;
try {
painlessReturnClass = getJavaClassFromPainlessType(whitelistMethod.painlessReturnTypeName);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " +
"with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
"and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
}
if (javaMethod.getReturnType() != defClassToObjectClass(painlessReturnClass)) {
throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " +
"does not match the return type class [" + javaMethod.getReturnType() + "] for the " +
"method with name [" + whitelistMethod.javaMethodName + "] " +
"and parameters " + whitelistMethod.painlessParameterTypeNames);
}
PainlessMethodKey painlessMethodKey =
new PainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size());
if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) {
PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey);
if (painlessMethod == null) {
org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
MethodHandle javaMethodHandle;
try {
javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
} catch (IllegalAccessException exception) {
throw new IllegalArgumentException("method handle not found for method with name " +
"[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
}
painlessMethod = methodCache.computeIfAbsent(
buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass,
painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod);
} else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass &&
painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
throw new IllegalArgumentException("illegal duplicate static methods [" + painlessMethodKey + "] " +
"found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
"return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
"and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
}
} else {
PainlessMethod painlessMethod = ownerStruct.methods.get(painlessMethodKey);
if (painlessMethod == null) {
org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
MethodHandle javaMethodHandle;
try {
javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
} catch (IllegalAccessException exception) {
throw new IllegalArgumentException("method handle not found for method with name " +
"[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
}
painlessMethod = methodCache.computeIfAbsent(
buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass,
painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
ownerStruct.methods.put(painlessMethodKey, painlessMethod);
} else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) &&
painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
throw new IllegalArgumentException("illegal duplicate member methods [" + painlessMethodKey + "] " +
"found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
"return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
"and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
}
}
}
private void addField(String ownerStructName, WhitelistField whitelistField) {
PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
if (ownerStruct == null) {
throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
"name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName);
}
if (TYPE_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) {
throw new IllegalArgumentException("invalid field name " +
"[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "].");
}
java.lang.reflect.Field javaField;
try {
javaField = ownerStruct.clazz.getField(whitelistField.javaFieldName);
} catch (NoSuchFieldException exception) {
throw new IllegalArgumentException("field [" + whitelistField.javaFieldName + "] " +
"not found for class [" + ownerStruct.clazz.getName() + "].");
}
Class<?> painlessFieldClass;
try {
painlessFieldClass = getJavaClassFromPainlessType(whitelistField.painlessFieldTypeName);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " +
"with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae);
}
if (Modifier.isStatic(javaField.getModifiers())) {
if (Modifier.isFinal(javaField.getModifiers()) == false) {
throw new IllegalArgumentException("static [" + whitelistField.javaFieldName + "] " +
"with owner struct [" + ownerStruct.name + "] is not final");
}
PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName);
if (painlessField == null) {
painlessField = fieldCache.computeIfAbsent(
buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
ownerStruct, painlessFieldClass, javaField.getModifiers(), null, null));
ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField);
} else if (painlessField.clazz != painlessFieldClass) {
throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " +
"found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
}
} else {
MethodHandle javaMethodHandleGetter;
MethodHandle javaMethodHandleSetter;
try {
if (Modifier.isStatic(javaField.getModifiers()) == false) {
javaMethodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField);
javaMethodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField);
} else {
javaMethodHandleGetter = null;
javaMethodHandleSetter = null;
}
} catch (IllegalAccessException exception) {
throw new IllegalArgumentException("getter/setter [" + whitelistField.javaFieldName + "]" +
" not found for class [" + ownerStruct.clazz.getName() + "].");
}
PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName);
if (painlessField == null) {
painlessField = fieldCache.computeIfAbsent(
buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
ownerStruct, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter));
ownerStruct.members.put(whitelistField.javaFieldName, painlessField);
} else if (painlessField.clazz != painlessFieldClass) {
throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " +
"found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
}
}
}
private void copyStruct(String struct, List<String> children) {
final PainlessClass owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct));
if (owner == null) {
throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy.");
}
for (int count = 0; count < children.size(); ++count) {
final PainlessClass child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count)));
if (child == null) {
throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" +
" not defined for copy to owner struct [" + owner.name + "].");
}
if (!child.clazz.isAssignableFrom(owner.clazz)) {
throw new ClassCastException("Child struct [" + child.name + "]" +
" is not a super type of owner struct [" + owner.name + "] in copy.");
}
for (Map.Entry<PainlessMethodKey,PainlessMethod> kvPair : child.methods.entrySet()) {
PainlessMethodKey methodKey = kvPair.getKey();
PainlessMethod method = kvPair.getValue();
if (owner.methods.get(methodKey) == null) {
// TODO: some of these are no longer valid or outright don't work
// TODO: since classes may not come from the Painless classloader
// TODO: and it was dependent on the order of the extends which
// TODO: which no longer exists since this is generated automatically
// sanity check, look for missing covariant/generic override
/*if (owner.clazz.isInterface() && child.clazz == Object.class) {
// ok
} else if (child.clazz == Spliterator.OfPrimitive.class || child.clazz == PrimitiveIterator.class) {
// ok, we rely on generics erasure for these (its guaranteed in the javadocs though!!!!)
} else if (Constants.JRE_IS_MINIMUM_JAVA9 && owner.clazz == LocalDate.class) {
// ok, java 9 added covariant override for LocalDate.getEra() to return IsoEra:
// https://bugs.openjdk.java.net/browse/JDK-8072746
} else {
try {
// TODO: we *have* to remove all these public members and use getter methods to encapsulate!
final Class<?> impl;
final Class<?> arguments[];
if (method.augmentation != null) {
impl = method.augmentation;
arguments = new Class<?>[method.arguments.size() + 1];
arguments[0] = method.owner.clazz;
for (int i = 0; i < method.arguments.size(); i++) {
arguments[i + 1] = method.arguments.get(i).clazz;
}
} else {
impl = owner.clazz;
arguments = new Class<?>[method.arguments.size()];
for (int i = 0; i < method.arguments.size(); i++) {
arguments[i] = method.arguments.get(i).clazz;
}
}
java.lang.reflect.Method m = impl.getMethod(method.method.getName(), arguments);
if (m.getReturnType() != method.rtn.clazz) {
throw new IllegalStateException("missing covariant override for: " + m + " in " + owner.name);
}
if (m.isBridge() && !Modifier.isVolatile(method.modifiers)) {
// its a bridge in the destination, but not in the source, but it might still be ok, check generics:
java.lang.reflect.Method source = child.clazz.getMethod(method.method.getName(), arguments);
if (!Arrays.equals(source.getGenericParameterTypes(), source.getParameterTypes())) {
throw new IllegalStateException("missing generic override for: " + m + " in " + owner.name);
}
}
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}*/
owner.methods.put(methodKey, method);
}
}
for (PainlessField field : child.members.values()) {
if (owner.members.get(field.name) == null) {
owner.members.put(field.name,
new PainlessField(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter));
}
}
}
}
/**
* Precomputes a more efficient structure for dynamic method/field access.
*/
private void addRuntimeClass(final PainlessClass struct) {
// add all getters/setters
for (Map.Entry<PainlessMethodKey, PainlessMethod> method : struct.methods.entrySet()) {
String name = method.getKey().name;
PainlessMethod m = method.getValue();
if (m.arguments.size() == 0 &&
name.startsWith("get") &&
name.length() > 3 &&
Character.isUpperCase(name.charAt(3))) {
StringBuilder newName = new StringBuilder();
newName.append(Character.toLowerCase(name.charAt(3)));
newName.append(name.substring(4));
struct.getters.putIfAbsent(newName.toString(), m.handle);
} else if (m.arguments.size() == 0 &&
name.startsWith("is") &&
name.length() > 2 &&
Character.isUpperCase(name.charAt(2))) {
StringBuilder newName = new StringBuilder();
newName.append(Character.toLowerCase(name.charAt(2)));
newName.append(name.substring(3));
struct.getters.putIfAbsent(newName.toString(), m.handle);
}
if (m.arguments.size() == 1 &&
name.startsWith("set") &&
name.length() > 3 &&
Character.isUpperCase(name.charAt(3))) {
StringBuilder newName = new StringBuilder();
newName.append(Character.toLowerCase(name.charAt(3)));
newName.append(name.substring(4));
struct.setters.putIfAbsent(newName.toString(), m.handle);
}
}
// add all members
for (Map.Entry<String, PainlessField> member : struct.members.entrySet()) {
struct.getters.put(member.getKey(), member.getValue().getter);
struct.setters.put(member.getKey(), member.getValue().setter);
}
}
/** computes the functional interface method for a class, or returns null */
private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) {
if (!clazz.clazz.isInterface()) {
return null;
}
// if its marked with this annotation, we fail if the conditions don't hold (means whitelist bug)
// otherwise, this annotation is pretty useless.
boolean hasAnnotation = clazz.clazz.isAnnotationPresent(FunctionalInterface.class);
List<java.lang.reflect.Method> methods = new ArrayList<>();
for (java.lang.reflect.Method m : clazz.clazz.getMethods()) {
// default interface methods don't count
if (m.isDefault()) {
continue;
}
// static methods don't count
if (Modifier.isStatic(m.getModifiers())) {
continue;
}
// if its from Object, it doesn't count
try {
Object.class.getMethod(m.getName(), m.getParameterTypes());
continue;
} catch (ReflectiveOperationException e) {
// it counts
}
methods.add(m);
}
if (methods.size() != 1) {
if (hasAnnotation) {
throw new IllegalArgumentException("Class: " + clazz.name +
" is marked with FunctionalInterface but doesn't fit the bill: " + methods);
}
return null;
}
// inspect the one method found from the reflection API, it should match the whitelist!
java.lang.reflect.Method oneMethod = methods.get(0);
PainlessMethod painless = clazz.methods.get(new PainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount()));
if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) {
throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " +
"method is not whitelisted!");
}
return painless;
PainlessLookup(Map<String, Class<?>> painlessTypesToJavaClasses, Map<Class<?>, PainlessClass> javaClassesToPainlessStructs) {
this.painlessTypesToJavaClasses = Collections.unmodifiableMap(painlessTypesToJavaClasses);
this.javaClassesToPainlessStructs = Collections.unmodifiableMap(javaClassesToPainlessStructs);
}
public boolean isSimplePainlessType(String painlessType) {

View File

@ -0,0 +1,774 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.lookup;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistClass;
import org.elasticsearch.painless.spi.WhitelistConstructor;
import org.elasticsearch.painless.spi.WhitelistField;
import org.elasticsearch.painless.spi.WhitelistMethod;
import org.objectweb.asm.Type;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.regex.Pattern;
public class PainlessLookupBuilder {
private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$");
private static final Map<String, PainlessMethod> methodCache = new HashMap<>();
private static final Map<String, PainlessField> fieldCache = new HashMap<>();
private static String buildMethodCacheKey(String structName, String methodName, List<Class<?>> arguments) {
StringBuilder key = new StringBuilder();
key.append(structName);
key.append(methodName);
for (Class<?> argument : arguments) {
key.append(argument.getName());
}
return key.toString();
}
private static String buildFieldCacheKey(String structName, String fieldName, String typeName) {
return structName + fieldName + typeName;
}
private final Map<String, Class<?>> painlessTypesToJavaClasses;
private final Map<Class<?>, PainlessClass> javaClassesToPainlessStructs;
public PainlessLookupBuilder(List<Whitelist> whitelists) {
painlessTypesToJavaClasses = new HashMap<>();
javaClassesToPainlessStructs = new HashMap<>();
String origin = null;
painlessTypesToJavaClasses.put("def", def.class);
javaClassesToPainlessStructs.put(def.class, new PainlessClass("def", Object.class, Type.getType(Object.class)));
try {
// first iteration collects all the Painless type names that
// are used for validation during the second iteration
for (Whitelist whitelist : whitelists) {
for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
PainlessClass painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) {
throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " +
"[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]");
}
origin = whitelistStruct.origin;
addStruct(whitelist.javaClassLoader, whitelistStruct);
painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
javaClassesToPainlessStructs.put(painlessStruct.clazz, painlessStruct);
}
}
// second iteration adds all the constructors, methods, and fields that will
// be available in Painless along with validating they exist and all their types have
// been white-listed during the first iteration
for (Whitelist whitelist : whitelists) {
for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) {
origin = whitelistConstructor.origin;
addConstructor(painlessTypeName, whitelistConstructor);
}
for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) {
origin = whitelistMethod.origin;
addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod);
}
for (WhitelistField whitelistField : whitelistStruct.whitelistFields) {
origin = whitelistField.origin;
addField(painlessTypeName, whitelistField);
}
}
}
} catch (Exception exception) {
throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception);
}
// goes through each Painless struct and determines the inheritance list,
// and then adds all inherited types to the Painless struct's whitelist
for (Class<?> javaClass : javaClassesToPainlessStructs.keySet()) {
PainlessClass painlessStruct = javaClassesToPainlessStructs.get(javaClass);
List<String> painlessSuperStructs = new ArrayList<>();
Class<?> javaSuperClass = painlessStruct.clazz.getSuperclass();
Stack<Class<?>> javaInteraceLookups = new Stack<>();
javaInteraceLookups.push(painlessStruct.clazz);
// adds super classes to the inheritance list
if (javaSuperClass != null && javaSuperClass.isInterface() == false) {
while (javaSuperClass != null) {
PainlessClass painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass);
if (painlessSuperStruct != null) {
painlessSuperStructs.add(painlessSuperStruct.name);
}
javaInteraceLookups.push(javaSuperClass);
javaSuperClass = javaSuperClass.getSuperclass();
}
}
// adds all super interfaces to the inheritance list
while (javaInteraceLookups.isEmpty() == false) {
Class<?> javaInterfaceLookup = javaInteraceLookups.pop();
for (Class<?> javaSuperInterface : javaInterfaceLookup.getInterfaces()) {
PainlessClass painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface);
if (painlessInterfaceStruct != null) {
String painlessInterfaceStructName = painlessInterfaceStruct.name;
if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) {
painlessSuperStructs.add(painlessInterfaceStructName);
}
for (Class<?> javaPushInterface : javaInterfaceLookup.getInterfaces()) {
javaInteraceLookups.push(javaPushInterface);
}
}
}
}
// copies methods and fields from super structs to the parent struct
copyStruct(painlessStruct.name, painlessSuperStructs);
// copies methods and fields from Object into interface types
if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) {
PainlessClass painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class);
if (painlessObjectStruct != null) {
copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name));
}
}
}
// precompute runtime classes
for (PainlessClass painlessStruct : javaClassesToPainlessStructs.values()) {
addRuntimeClass(painlessStruct);
}
// copy all structs to make them unmodifiable for outside users:
for (Map.Entry<Class<?>,PainlessClass> entry : javaClassesToPainlessStructs.entrySet()) {
entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue())));
}
}
private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) {
String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
String importedPainlessTypeName = painlessTypeName;
if (TYPE_NAME_PATTERN.matcher(painlessTypeName).matches() == false) {
throw new IllegalArgumentException("invalid struct type name [" + painlessTypeName + "]");
}
int index = whitelistStruct.javaClassName.lastIndexOf('.');
if (index != -1) {
importedPainlessTypeName = whitelistStruct.javaClassName.substring(index + 1).replace('$', '.');
}
Class<?> javaClass;
if ("void".equals(whitelistStruct.javaClassName)) javaClass = void.class;
else if ("boolean".equals(whitelistStruct.javaClassName)) javaClass = boolean.class;
else if ("byte".equals(whitelistStruct.javaClassName)) javaClass = byte.class;
else if ("short".equals(whitelistStruct.javaClassName)) javaClass = short.class;
else if ("char".equals(whitelistStruct.javaClassName)) javaClass = char.class;
else if ("int".equals(whitelistStruct.javaClassName)) javaClass = int.class;
else if ("long".equals(whitelistStruct.javaClassName)) javaClass = long.class;
else if ("float".equals(whitelistStruct.javaClassName)) javaClass = float.class;
else if ("double".equals(whitelistStruct.javaClassName)) javaClass = double.class;
else {
try {
javaClass = Class.forName(whitelistStruct.javaClassName, true, whitelistClassLoader);
} catch (ClassNotFoundException cnfe) {
throw new IllegalArgumentException("invalid java class name [" + whitelistStruct.javaClassName + "]" +
" for struct [" + painlessTypeName + "]");
}
}
PainlessClass existingStruct = javaClassesToPainlessStructs.get(javaClass);
if (existingStruct == null) {
PainlessClass struct = new PainlessClass(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass));
painlessTypesToJavaClasses.put(painlessTypeName, javaClass);
javaClassesToPainlessStructs.put(javaClass, struct);
} else if (existingStruct.clazz.equals(javaClass) == false) {
throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " +
"illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " +
"[" + existingStruct.clazz.getName() + "]");
}
if (painlessTypeName.equals(importedPainlessTypeName)) {
if (whitelistStruct.onlyFQNJavaClassName == false) {
throw new IllegalArgumentException("must use only_fqn parameter on type [" + painlessTypeName + "] with no package");
}
} else {
Class<?> importedJavaClass = painlessTypesToJavaClasses.get(importedPainlessTypeName);
if (importedJavaClass == null) {
if (whitelistStruct.onlyFQNJavaClassName == false) {
if (existingStruct != null) {
throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
}
painlessTypesToJavaClasses.put(importedPainlessTypeName, javaClass);
}
} else if (importedJavaClass.equals(javaClass) == false) {
throw new IllegalArgumentException("imported name [" + painlessTypeName + "] is used to " +
"illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] " +
"and [" + importedJavaClass.getName() + "]");
} else if (whitelistStruct.onlyFQNJavaClassName) {
throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
}
}
}
private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) {
PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
if (ownerStruct == null) {
throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " +
"parameters " + whitelistConstructor.painlessParameterTypeNames);
}
List<Class<?>> painlessParametersTypes = new ArrayList<>(whitelistConstructor.painlessParameterTypeNames.size());
Class<?>[] javaClassParameters = new Class<?>[whitelistConstructor.painlessParameterTypeNames.size()];
for (int parameterCount = 0; parameterCount < whitelistConstructor.painlessParameterTypeNames.size(); ++parameterCount) {
String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount);
try {
Class<?> painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
painlessParametersTypes.add(painlessParameterClass);
javaClassParameters[parameterCount] = PainlessLookup.defClassToObjectClass(painlessParameterClass);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " +
"with owner struct [" + ownerStructName + "] and constructor parameters " +
whitelistConstructor.painlessParameterTypeNames, iae);
}
}
java.lang.reflect.Constructor<?> javaConstructor;
try {
javaConstructor = ownerStruct.clazz.getConstructor(javaClassParameters);
} catch (NoSuchMethodException exception) {
throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
" with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception);
}
PainlessMethodKey painlessMethodKey = new PainlessMethodKey("<init>", whitelistConstructor.painlessParameterTypeNames.size());
PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey);
if (painlessConstructor == null) {
org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor);
MethodHandle javaHandle;
try {
javaHandle = MethodHandles.publicLookup().in(ownerStruct.clazz).unreflectConstructor(javaConstructor);
} catch (IllegalAccessException exception) {
throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
" with constructor parameters " + whitelistConstructor.painlessParameterTypeNames);
}
painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "<init>", painlessParametersTypes),
key -> new PainlessMethod("<init>", ownerStruct, null, void.class, painlessParametersTypes,
asmConstructor, javaConstructor.getModifiers(), javaHandle));
ownerStruct.constructors.put(painlessMethodKey, painlessConstructor);
} else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){
throw new IllegalArgumentException(
"illegal duplicate constructors [" + painlessMethodKey + "] found within the struct [" + ownerStruct.name + "] " +
"with parameters " + painlessParametersTypes + " and " + painlessConstructor.arguments);
}
}
private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) {
PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
if (ownerStruct == null) {
throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
"name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
}
if (TYPE_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) {
throw new IllegalArgumentException("invalid method name" +
" [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "].");
}
Class<?> javaAugmentedClass;
if (whitelistMethod.javaAugmentedClassName != null) {
try {
javaAugmentedClass = Class.forName(whitelistMethod.javaAugmentedClassName, true, whitelistClassLoader);
} catch (ClassNotFoundException cnfe) {
throw new IllegalArgumentException("augmented class [" + whitelistMethod.javaAugmentedClassName + "] " +
"not found for method with name [" + whitelistMethod.javaMethodName + "] " +
"and parameters " + whitelistMethod.painlessParameterTypeNames, cnfe);
}
} else {
javaAugmentedClass = null;
}
int augmentedOffset = javaAugmentedClass == null ? 0 : 1;
List<Class<?>> painlessParametersTypes = new ArrayList<>(whitelistMethod.painlessParameterTypeNames.size());
Class<?>[] javaClassParameters = new Class<?>[whitelistMethod.painlessParameterTypeNames.size() + augmentedOffset];
if (javaAugmentedClass != null) {
javaClassParameters[0] = ownerStruct.clazz;
}
for (int parameterCount = 0; parameterCount < whitelistMethod.painlessParameterTypeNames.size(); ++parameterCount) {
String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount);
try {
Class<?> painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
painlessParametersTypes.add(painlessParameterClass);
javaClassParameters[parameterCount + augmentedOffset] = PainlessLookup.defClassToObjectClass(painlessParameterClass);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " +
"with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
"and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
}
}
Class<?> javaImplClass = javaAugmentedClass == null ? ownerStruct.clazz : javaAugmentedClass;
java.lang.reflect.Method javaMethod;
try {
javaMethod = javaImplClass.getMethod(whitelistMethod.javaMethodName, javaClassParameters);
} catch (NoSuchMethodException nsme) {
throw new IllegalArgumentException("method with name [" + whitelistMethod.javaMethodName + "] " +
"and parameters " + whitelistMethod.painlessParameterTypeNames + " not found for class [" +
javaImplClass.getName() + "]", nsme);
}
Class<?> painlessReturnClass;
try {
painlessReturnClass = getJavaClassFromPainlessType(whitelistMethod.painlessReturnTypeName);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " +
"with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
"and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
}
if (javaMethod.getReturnType() != PainlessLookup.defClassToObjectClass(painlessReturnClass)) {
throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " +
"does not match the return type class [" + javaMethod.getReturnType() + "] for the " +
"method with name [" + whitelistMethod.javaMethodName + "] " +
"and parameters " + whitelistMethod.painlessParameterTypeNames);
}
PainlessMethodKey painlessMethodKey =
new PainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size());
if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) {
PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey);
if (painlessMethod == null) {
org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
MethodHandle javaMethodHandle;
try {
javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
} catch (IllegalAccessException exception) {
throw new IllegalArgumentException("method handle not found for method with name " +
"[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
}
painlessMethod = methodCache.computeIfAbsent(
buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass,
painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod);
} else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass &&
painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
throw new IllegalArgumentException("illegal duplicate static methods [" + painlessMethodKey + "] " +
"found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
"return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
"and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
}
} else {
PainlessMethod painlessMethod = ownerStruct.methods.get(painlessMethodKey);
if (painlessMethod == null) {
org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
MethodHandle javaMethodHandle;
try {
javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
} catch (IllegalAccessException exception) {
throw new IllegalArgumentException("method handle not found for method with name " +
"[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
}
painlessMethod = methodCache.computeIfAbsent(
buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass,
painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
ownerStruct.methods.put(painlessMethodKey, painlessMethod);
} else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) &&
painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
throw new IllegalArgumentException("illegal duplicate member methods [" + painlessMethodKey + "] " +
"found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
"return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
"and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
}
}
}
private void addField(String ownerStructName, WhitelistField whitelistField) {
PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
if (ownerStruct == null) {
throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
"name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName);
}
if (TYPE_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) {
throw new IllegalArgumentException("invalid field name " +
"[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "].");
}
java.lang.reflect.Field javaField;
try {
javaField = ownerStruct.clazz.getField(whitelistField.javaFieldName);
} catch (NoSuchFieldException exception) {
throw new IllegalArgumentException("field [" + whitelistField.javaFieldName + "] " +
"not found for class [" + ownerStruct.clazz.getName() + "].");
}
Class<?> painlessFieldClass;
try {
painlessFieldClass = getJavaClassFromPainlessType(whitelistField.painlessFieldTypeName);
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " +
"with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae);
}
if (Modifier.isStatic(javaField.getModifiers())) {
if (Modifier.isFinal(javaField.getModifiers()) == false) {
throw new IllegalArgumentException("static [" + whitelistField.javaFieldName + "] " +
"with owner struct [" + ownerStruct.name + "] is not final");
}
PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName);
if (painlessField == null) {
painlessField = fieldCache.computeIfAbsent(
buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
ownerStruct, painlessFieldClass, javaField.getModifiers(), null, null));
ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField);
} else if (painlessField.clazz != painlessFieldClass) {
throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " +
"found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
}
} else {
MethodHandle javaMethodHandleGetter;
MethodHandle javaMethodHandleSetter;
try {
if (Modifier.isStatic(javaField.getModifiers()) == false) {
javaMethodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField);
javaMethodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField);
} else {
javaMethodHandleGetter = null;
javaMethodHandleSetter = null;
}
} catch (IllegalAccessException exception) {
throw new IllegalArgumentException("getter/setter [" + whitelistField.javaFieldName + "]" +
" not found for class [" + ownerStruct.clazz.getName() + "].");
}
PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName);
if (painlessField == null) {
painlessField = fieldCache.computeIfAbsent(
buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
ownerStruct, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter));
ownerStruct.members.put(whitelistField.javaFieldName, painlessField);
} else if (painlessField.clazz != painlessFieldClass) {
throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " +
"found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
}
}
}
private void copyStruct(String struct, List<String> children) {
final PainlessClass owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct));
if (owner == null) {
throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy.");
}
for (int count = 0; count < children.size(); ++count) {
final PainlessClass child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count)));
if (child == null) {
throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" +
" not defined for copy to owner struct [" + owner.name + "].");
}
if (!child.clazz.isAssignableFrom(owner.clazz)) {
throw new ClassCastException("Child struct [" + child.name + "]" +
" is not a super type of owner struct [" + owner.name + "] in copy.");
}
for (Map.Entry<PainlessMethodKey,PainlessMethod> kvPair : child.methods.entrySet()) {
PainlessMethodKey methodKey = kvPair.getKey();
PainlessMethod method = kvPair.getValue();
if (owner.methods.get(methodKey) == null) {
// TODO: some of these are no longer valid or outright don't work
// TODO: since classes may not come from the Painless classloader
// TODO: and it was dependent on the order of the extends which
// TODO: which no longer exists since this is generated automatically
// sanity check, look for missing covariant/generic override
/*if (owner.clazz.isInterface() && child.clazz == Object.class) {
// ok
} else if (child.clazz == Spliterator.OfPrimitive.class || child.clazz == PrimitiveIterator.class) {
// ok, we rely on generics erasure for these (its guaranteed in the javadocs though!!!!)
} else if (Constants.JRE_IS_MINIMUM_JAVA9 && owner.clazz == LocalDate.class) {
// ok, java 9 added covariant override for LocalDate.getEra() to return IsoEra:
// https://bugs.openjdk.java.net/browse/JDK-8072746
} else {
try {
// TODO: we *have* to remove all these public members and use getter methods to encapsulate!
final Class<?> impl;
final Class<?> arguments[];
if (method.augmentation != null) {
impl = method.augmentation;
arguments = new Class<?>[method.arguments.size() + 1];
arguments[0] = method.owner.clazz;
for (int i = 0; i < method.arguments.size(); i++) {
arguments[i + 1] = method.arguments.get(i).clazz;
}
} else {
impl = owner.clazz;
arguments = new Class<?>[method.arguments.size()];
for (int i = 0; i < method.arguments.size(); i++) {
arguments[i] = method.arguments.get(i).clazz;
}
}
java.lang.reflect.Method m = impl.getMethod(method.method.getName(), arguments);
if (m.getReturnType() != method.rtn.clazz) {
throw new IllegalStateException("missing covariant override for: " + m + " in " + owner.name);
}
if (m.isBridge() && !Modifier.isVolatile(method.modifiers)) {
// its a bridge in the destination, but not in the source, but it might still be ok, check generics:
java.lang.reflect.Method source = child.clazz.getMethod(method.method.getName(), arguments);
if (!Arrays.equals(source.getGenericParameterTypes(), source.getParameterTypes())) {
throw new IllegalStateException("missing generic override for: " + m + " in " + owner.name);
}
}
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}*/
owner.methods.put(methodKey, method);
}
}
for (PainlessField field : child.members.values()) {
if (owner.members.get(field.name) == null) {
owner.members.put(field.name,
new PainlessField(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter));
}
}
}
}
/**
* Precomputes a more efficient structure for dynamic method/field access.
*/
private void addRuntimeClass(final PainlessClass struct) {
// add all getters/setters
for (Map.Entry<PainlessMethodKey, PainlessMethod> method : struct.methods.entrySet()) {
String name = method.getKey().name;
PainlessMethod m = method.getValue();
if (m.arguments.size() == 0 &&
name.startsWith("get") &&
name.length() > 3 &&
Character.isUpperCase(name.charAt(3))) {
StringBuilder newName = new StringBuilder();
newName.append(Character.toLowerCase(name.charAt(3)));
newName.append(name.substring(4));
struct.getters.putIfAbsent(newName.toString(), m.handle);
} else if (m.arguments.size() == 0 &&
name.startsWith("is") &&
name.length() > 2 &&
Character.isUpperCase(name.charAt(2))) {
StringBuilder newName = new StringBuilder();
newName.append(Character.toLowerCase(name.charAt(2)));
newName.append(name.substring(3));
struct.getters.putIfAbsent(newName.toString(), m.handle);
}
if (m.arguments.size() == 1 &&
name.startsWith("set") &&
name.length() > 3 &&
Character.isUpperCase(name.charAt(3))) {
StringBuilder newName = new StringBuilder();
newName.append(Character.toLowerCase(name.charAt(3)));
newName.append(name.substring(4));
struct.setters.putIfAbsent(newName.toString(), m.handle);
}
}
// add all members
for (Map.Entry<String, PainlessField> member : struct.members.entrySet()) {
struct.getters.put(member.getKey(), member.getValue().getter);
struct.setters.put(member.getKey(), member.getValue().setter);
}
}
/** computes the functional interface method for a class, or returns null */
private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) {
if (!clazz.clazz.isInterface()) {
return null;
}
// if its marked with this annotation, we fail if the conditions don't hold (means whitelist bug)
// otherwise, this annotation is pretty useless.
boolean hasAnnotation = clazz.clazz.isAnnotationPresent(FunctionalInterface.class);
List<java.lang.reflect.Method> methods = new ArrayList<>();
for (java.lang.reflect.Method m : clazz.clazz.getMethods()) {
// default interface methods don't count
if (m.isDefault()) {
continue;
}
// static methods don't count
if (Modifier.isStatic(m.getModifiers())) {
continue;
}
// if its from Object, it doesn't count
try {
Object.class.getMethod(m.getName(), m.getParameterTypes());
continue;
} catch (ReflectiveOperationException e) {
// it counts
}
methods.add(m);
}
if (methods.size() != 1) {
if (hasAnnotation) {
throw new IllegalArgumentException("Class: " + clazz.name +
" is marked with FunctionalInterface but doesn't fit the bill: " + methods);
}
return null;
}
// inspect the one method found from the reflection API, it should match the whitelist!
java.lang.reflect.Method oneMethod = methods.get(0);
PainlessMethod painless = clazz.methods.get(new PainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount()));
if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) {
throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " +
"method is not whitelisted!");
}
return painless;
}
public Class<?> getJavaClassFromPainlessType(String painlessType) {
Class<?> javaClass = painlessTypesToJavaClasses.get(painlessType);
if (javaClass != null) {
return javaClass;
}
int arrayDimensions = 0;
int arrayIndex = painlessType.indexOf('[');
if (arrayIndex != -1) {
int length = painlessType.length();
while (arrayIndex < length) {
if (painlessType.charAt(arrayIndex) == '[' && ++arrayIndex < length && painlessType.charAt(arrayIndex++) == ']') {
++arrayDimensions;
} else {
throw new IllegalArgumentException("invalid painless type [" + painlessType + "].");
}
}
painlessType = painlessType.substring(0, painlessType.indexOf('['));
javaClass = painlessTypesToJavaClasses.get(painlessType);
char braces[] = new char[arrayDimensions];
Arrays.fill(braces, '[');
String descriptor = new String(braces);
if (javaClass == boolean.class) {
descriptor += "Z";
} else if (javaClass == byte.class) {
descriptor += "B";
} else if (javaClass == short.class) {
descriptor += "S";
} else if (javaClass == char.class) {
descriptor += "C";
} else if (javaClass == int.class) {
descriptor += "I";
} else if (javaClass == long.class) {
descriptor += "J";
} else if (javaClass == float.class) {
descriptor += "F";
} else if (javaClass == double.class) {
descriptor += "D";
} else {
descriptor += "L" + javaClass.getName() + ";";
}
try {
return Class.forName(descriptor);
} catch (ClassNotFoundException cnfe) {
throw new IllegalStateException("invalid painless type [" + painlessType + "]", cnfe);
}
}
throw new IllegalArgumentException("invalid painless type [" + painlessType + "]");
}
public PainlessLookup build() {
return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessStructs);
}
}

View File

@ -0,0 +1,28 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.lookup;
/** Marker class for def type to be used during type analysis. */
public final class def {
private def() {
}
}

View File

@ -23,7 +23,7 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.lookup.PainlessCast;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;

View File

@ -21,7 +21,7 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.PainlessMethodKey;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -21,7 +21,7 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.PainlessMethodKey;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.PainlessMethodKey;
import org.elasticsearch.painless.lookup.PainlessClass;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessField;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.PainlessClass;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.Variable;

View File

@ -21,9 +21,6 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Constant;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.PainlessMethodKey;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.Variable;
@ -32,6 +29,9 @@ import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.ScriptClassInfo;
import org.elasticsearch.painless.SimpleChecksAdapter;
import org.elasticsearch.painless.WriterConstants;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.PainlessMethodKey;
import org.elasticsearch.painless.node.SFunction.FunctionReserved;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
@ -130,7 +130,6 @@ public final class SSource extends AStatement {
private final ScriptClassInfo scriptClassInfo;
private final CompilerSettings settings;
private final String name;
private final String source;
private final Printer debugStream;
private final MainMethodReserved reserved;
private final List<SFunction> functions;
@ -141,14 +140,12 @@ public final class SSource extends AStatement {
private final List<org.objectweb.asm.commons.Method> getMethods;
private byte[] bytes;
public SSource(ScriptClassInfo scriptClassInfo, CompilerSettings settings, String name, String source, Printer debugStream,
MainMethodReserved reserved, Location location,
List<SFunction> functions, Globals globals, List<AStatement> statements) {
public SSource(ScriptClassInfo scriptClassInfo, CompilerSettings settings, String name, Printer debugStream,
MainMethodReserved reserved, Location location, List<SFunction> functions, Globals globals, List<AStatement> statements) {
super(location);
this.scriptClassInfo = Objects.requireNonNull(scriptClassInfo);
this.settings = Objects.requireNonNull(settings);
this.name = Objects.requireNonNull(name);
this.source = Objects.requireNonNull(source);
this.debugStream = debugStream;
this.reserved = Objects.requireNonNull(reserved);
// process any synthetic functions generated by walker (because right now, thats still easy)

View File

@ -25,7 +25,7 @@ import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessCast;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.PainlessMethodKey;
import org.elasticsearch.painless.lookup.PainlessLookup.def;
import org.elasticsearch.painless.lookup.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.Variable;

View File

@ -24,6 +24,7 @@ import java.util.HashMap;
import java.util.Map;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.spi.Whitelist;
import static java.util.Collections.emptyMap;
@ -37,7 +38,7 @@ import static org.hamcrest.Matchers.startsWith;
*/
public class BaseClassTests extends ScriptTestCase {
private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
public abstract static class Gets {

View File

@ -23,6 +23,7 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.script.ScriptException;
@ -36,7 +37,7 @@ import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;
public class DebugTests extends ScriptTestCase {
private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
public void testExplain() {
// Debug.explain can explain an object

View File

@ -19,7 +19,7 @@
package org.elasticsearch.painless;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.spi.Whitelist;
import org.objectweb.asm.util.Textifier;
@ -40,7 +40,7 @@ final class Debugger {
PrintWriter outputWriter = new PrintWriter(output);
Textifier textifier = new Textifier();
try {
new Compiler(iface, new PainlessLookup(Whitelist.BASE_WHITELISTS))
new Compiler(iface, new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build())
.compile("<debugging>", source, settings, textifier);
} catch (RuntimeException e) {
textifier.print(outputWriter);

View File

@ -28,11 +28,12 @@ import java.util.Collections;
import java.util.HashMap;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.test.ESTestCase;
public class DefBootstrapTests extends ESTestCase {
private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
/** calls toString() on integers, twice */
public void testOneType() throws Throwable {

View File

@ -20,13 +20,16 @@
package org.elasticsearch.painless;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessField;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.painless.lookup.PainlessClass;
import org.elasticsearch.painless.lookup.PainlessField;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.spi.Whitelist;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.reflect.Modifier;
@ -42,14 +45,13 @@ import java.util.function.Consumer;
import static java.util.Comparator.comparing;
import static java.util.stream.Collectors.toList;
import static org.elasticsearch.painless.spi.Whitelist.BASE_WHITELISTS;
/**
* Generates an API reference from the method and type whitelists in {@link PainlessLookup}.
*/
public class PainlessDocGenerator {
private static final PainlessLookup PAINLESS_LOOKUP = new PainlessLookup(BASE_WHITELISTS);
private static final PainlessLookup PAINLESS_LOOKUP = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class);
private static final Comparator<PainlessField> FIELD_NAME = comparing(f -> f.name);
private static final Comparator<PainlessMethod> METHOD_NAME = comparing(m -> m.name);

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.painless.antlr.Walker;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptContext;
@ -91,7 +92,7 @@ public abstract class ScriptTestCase extends ESTestCase {
public Object exec(String script, Map<String, Object> vars, Map<String,String> compileParams, Scorer scorer, boolean picky) {
// test for ambiguity errors before running the actual script if picky is true
if (picky) {
PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, GenericElasticsearchScript.class);
CompilerSettings pickySettings = new CompilerSettings();
pickySettings.setPicky(true);

View File

@ -23,6 +23,7 @@ import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessCast;
import org.elasticsearch.painless.lookup.PainlessField;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.PainlessMethodKey;
import org.elasticsearch.painless.lookup.PainlessClass;
@ -48,7 +49,7 @@ import static org.elasticsearch.painless.node.SSource.MainMethodReserved;
* Tests {@link Object#toString} implementations on all extensions of {@link ANode}.
*/
public class NodeToStringTests extends ESTestCase {
private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
public void testEAssignment() {
assertToString(

View File

@ -112,7 +112,7 @@ public class Netty4TcpChannel implements TcpChannel {
}
}
public Channel getLowLevelChannel() {
public Channel getNettyChannel() {
return channel;
}

View File

@ -51,8 +51,8 @@ public class HttpReadWriteHandler implements ReadWriteHandler {
private final NioHttpChannel nioHttpChannel;
private final NioHttpServerTransport transport;
HttpReadWriteHandler(NioHttpChannel nioHttpChannel, NioHttpServerTransport transport, HttpHandlingSettings settings,
NioCorsConfig corsConfig) {
public HttpReadWriteHandler(NioHttpChannel nioHttpChannel, NioHttpServerTransport transport, HttpHandlingSettings settings,
NioCorsConfig corsConfig) {
this.nioHttpChannel = nioHttpChannel;
this.transport = transport;

View File

@ -28,7 +28,7 @@ import java.nio.channels.SocketChannel;
public class NioHttpChannel extends NioSocketChannel implements HttpChannel {
NioHttpChannel(SocketChannel socketChannel) {
public NioHttpChannel(SocketChannel socketChannel) {
super(socketChannel);
}

View File

@ -23,12 +23,11 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.http.HttpServerChannel;
import org.elasticsearch.nio.NioServerSocketChannel;
import java.io.IOException;
import java.nio.channels.ServerSocketChannel;
public class NioHttpServerChannel extends NioServerSocketChannel implements HttpServerChannel {
NioHttpServerChannel(ServerSocketChannel serverSocketChannel) throws IOException {
public NioHttpServerChannel(ServerSocketChannel serverSocketChannel) {
super(serverSocketChannel);
}

View File

@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.http.AbstractHttpServerTransport;
import org.elasticsearch.http.HttpChannel;
import org.elasticsearch.http.HttpServerChannel;
import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.http.nio.cors.NioCorsConfig;
import org.elasticsearch.http.nio.cors.NioCorsConfigBuilder;
import org.elasticsearch.nio.BytesChannelContext;
@ -87,21 +86,21 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
(s) -> Integer.toString(EsExecutors.numberOfProcessors(s) * 2),
(s) -> Setting.parseInt(s, 1, "http.nio.worker_count"), Setting.Property.NodeScope);
private final PageCacheRecycler pageCacheRecycler;
protected final PageCacheRecycler pageCacheRecycler;
protected final NioCorsConfig corsConfig;
private final boolean tcpNoDelay;
private final boolean tcpKeepAlive;
private final boolean reuseAddress;
private final int tcpSendBufferSize;
private final int tcpReceiveBufferSize;
protected final boolean tcpNoDelay;
protected final boolean tcpKeepAlive;
protected final boolean reuseAddress;
protected final int tcpSendBufferSize;
protected final int tcpReceiveBufferSize;
private NioGroup nioGroup;
private HttpChannelFactory channelFactory;
private final NioCorsConfig corsConfig;
private ChannelFactory<NioHttpServerChannel, NioHttpChannel> channelFactory;
public NioHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays,
PageCacheRecycler pageCacheRecycler, ThreadPool threadPool, NamedXContentRegistry xContentRegistry,
HttpServerTransport.Dispatcher dispatcher) {
Dispatcher dispatcher) {
super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher);
this.pageCacheRecycler = pageCacheRecycler;
@ -136,7 +135,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
nioGroup = new NioGroup(daemonThreadFactory(this.settings, HTTP_SERVER_ACCEPTOR_THREAD_NAME_PREFIX), acceptorCount,
daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX), workerCount,
(s) -> new EventHandler(this::onNonChannelException, s));
channelFactory = new HttpChannelFactory();
channelFactory = channelFactory();
bindServer();
success = true;
} catch (IOException e) {
@ -162,6 +161,10 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
return nioGroup.bindServerChannel(socketAddress, channelFactory);
}
protected ChannelFactory<NioHttpServerChannel, NioHttpChannel> channelFactory() {
return new HttpChannelFactory();
}
static NioCorsConfig buildCorsConfig(Settings settings) {
if (SETTING_CORS_ENABLED.get(settings) == false) {
return NioCorsConfigBuilder.forOrigins().disable().build();
@ -194,7 +197,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
.build();
}
private void acceptChannel(NioSocketChannel socketChannel) {
protected void acceptChannel(NioSocketChannel socketChannel) {
super.serverAcceptedChannel((HttpChannel) socketChannel);
}

View File

@ -13,9 +13,8 @@ setup:
---
"Nested inner hits":
- skip:
version: "all"
reason: "https://github.com/elastic/elasticsearch/issues/32055"
version: " - 6.1.99"
reason: "<= 6.1 nodes don't always include index or id in nested inner hits"
- do:
index:
index: test
@ -46,8 +45,8 @@ setup:
"Nested doc version and seqIDs":
- skip:
version: "all"
reason: "https://github.com/elastic/elasticsearch/issues/32055"
version: " - 6.3.99"
reason: "object notation for docvalue_fields was introduced in 6.4"
- do:
index:

View File

@ -107,9 +107,6 @@ setup:
---
"field collapsing and inner_hits":
- skip:
version: "all"
reason: "https://github.com/elastic/elasticsearch/issues/32055"
- do:
search:
@ -149,9 +146,6 @@ setup:
---
"field collapsing, inner_hits and maxConcurrentGroupRequests":
- skip:
version: "all"
reason: "https://github.com/elastic/elasticsearch/issues/32055"
- do:
search:
@ -232,9 +226,6 @@ setup:
---
"no hits and inner_hits":
- skip:
version: "all"
reason: "https://github.com/elastic/elasticsearch/issues/32055"
- do:
search:
@ -249,9 +240,6 @@ setup:
---
"field collapsing and multiple inner_hits":
- skip:
version: "all"
reason: "https://github.com/elastic/elasticsearch/issues/32055"
- do:
search:
@ -304,9 +292,10 @@ setup:
---
"field collapsing, inner_hits and version":
- skip:
version: "all"
reason: "https://github.com/elastic/elasticsearch/issues/32055"
version: " - 6.1.0"
reason: "bug fixed in 6.1.1"
- do:
search:

View File

@ -199,7 +199,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
boolean hasChildren = in.readBoolean();
assert hasChildren == false;
}
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
this.innerCollapseBuilder = in.readOptionalWriteable(CollapseBuilder::new);
}
}
@ -247,7 +247,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
}
}
out.writeOptionalWriteable(highlightBuilder);
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
out.writeOptionalWriteable(innerCollapseBuilder);
}
}

View File

@ -206,7 +206,7 @@ public class HierarchyCircuitBreakerServiceTests extends ESTestCase {
Settings clusterSettings = Settings.builder()
.put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), Boolean.TRUE)
.put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "200b")
.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "300b")
.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "350b")
.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 2)
.build();

View File

@ -43,6 +43,7 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
import org.elasticsearch.node.MockNode;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.NodeValidationException;
@ -193,6 +194,9 @@ public abstract class ESSingleNodeTestCase extends ESTestCase {
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b")
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b")
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "1b")
// turning on the real memory circuit breaker leads to spurious test failures. As have no full control over heap usage, we
// turn it off for these tests.
.put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), false)
.put(nodeSettings()) // allow test cases to provide their own settings or override these
.build();
Collection<Class<? extends Plugin>> plugins = getPlugins();

View File

@ -263,7 +263,8 @@ This is an example of the output:
"type": "index"
}
]
}
},
"user": "test_admin" <4>
}
}
--------------------------------------------------
@ -281,6 +282,7 @@ This is an example of the output:
<1> The id of the watch record as it would be stored in the `.watcher-history` index.
<2> The watch record document as it would be stored in the `.watcher-history` index.
<3> The watch execution results.
<4> The user used to execute the watch.
You can set a different execution mode for every action by associating the mode
name with the action id:

View File

@ -88,13 +88,17 @@ public class Authentication {
throws IOException, IllegalArgumentException {
assert ctx.getTransient(AuthenticationField.AUTHENTICATION_KEY) == null;
Authentication authentication = decode(header);
ctx.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
return authentication;
}
public static Authentication decode(String header) throws IOException {
byte[] bytes = Base64.getDecoder().decode(header);
StreamInput input = StreamInput.wrap(bytes);
Version version = Version.readVersion(input);
input.setVersion(version);
Authentication authentication = new Authentication(input);
ctx.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
return authentication;
return new Authentication(input);
}
/**

View File

@ -8,6 +8,8 @@ package org.elasticsearch.xpack.core.watcher.execution;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.xpack.core.security.authc.Authentication;
import org.elasticsearch.xpack.core.security.authc.AuthenticationField;
import org.elasticsearch.xpack.core.watcher.actions.ActionWrapperResult;
import org.elasticsearch.xpack.core.watcher.condition.Condition;
import org.elasticsearch.xpack.core.watcher.history.WatchRecord;
@ -18,6 +20,7 @@ import org.elasticsearch.xpack.core.watcher.watch.Payload;
import org.elasticsearch.xpack.core.watcher.watch.Watch;
import org.joda.time.DateTime;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -43,6 +46,7 @@ public abstract class WatchExecutionContext {
private Transform.Result transformResult;
private ConcurrentMap<String, ActionWrapperResult> actionsResults = ConcurrentCollections.newConcurrentMap();
private String nodeId;
private String user;
public WatchExecutionContext(String watchId, DateTime executionTime, TriggerEvent triggerEvent, TimeValue defaultThrottlePeriod) {
this.id = new Wid(watchId, executionTime);
@ -85,6 +89,7 @@ public abstract class WatchExecutionContext {
public final void ensureWatchExists(CheckedSupplier<Watch, Exception> supplier) throws Exception {
if (watch == null) {
watch = supplier.get();
user = WatchExecutionContext.getUsernameFromWatch(watch);
}
}
@ -137,6 +142,11 @@ public abstract class WatchExecutionContext {
return nodeId;
}
/**
* @return The user that executes the watch, which will be stored in the watch history
*/
public String getUser() { return user; }
public void start() {
assert phase == ExecutionPhase.AWAITS_EXECUTION;
relativeStartTime = System.nanoTime();
@ -243,4 +253,19 @@ public abstract class WatchExecutionContext {
public WatchExecutionSnapshot createSnapshot(Thread executionThread) {
return new WatchExecutionSnapshot(this, executionThread.getStackTrace());
}
/**
* Given a watch, this extracts and decodes the relevant auth header and returns the principal of the user that is
* executing the watch.
*/
public static String getUsernameFromWatch(Watch watch) throws IOException {
if (watch != null && watch.status() != null && watch.status().getHeaders() != null) {
String header = watch.status().getHeaders().get(AuthenticationField.AUTHENTICATION_KEY);
if (header != null) {
Authentication auth = Authentication.decode(header);
return auth.getUser().principal();
}
}
return null;
}
}

View File

@ -43,12 +43,14 @@ public abstract class WatchRecord implements ToXContentObject {
private static final ParseField METADATA = new ParseField("metadata");
private static final ParseField EXECUTION_RESULT = new ParseField("result");
private static final ParseField EXCEPTION = new ParseField("exception");
private static final ParseField USER = new ParseField("user");
protected final Wid id;
protected final Watch watch;
private final String nodeId;
protected final TriggerEvent triggerEvent;
protected final ExecutionState state;
private final String user;
// only emitted to xcontent in "debug" mode
protected final Map<String, Object> vars;
@ -60,7 +62,7 @@ public abstract class WatchRecord implements ToXContentObject {
private WatchRecord(Wid id, TriggerEvent triggerEvent, ExecutionState state, Map<String, Object> vars, ExecutableInput input,
ExecutableCondition condition, Map<String, Object> metadata, Watch watch, WatchExecutionResult executionResult,
String nodeId) {
String nodeId, String user) {
this.id = id;
this.triggerEvent = triggerEvent;
this.state = state;
@ -71,15 +73,16 @@ public abstract class WatchRecord implements ToXContentObject {
this.executionResult = executionResult;
this.watch = watch;
this.nodeId = nodeId;
this.user = user;
}
private WatchRecord(Wid id, TriggerEvent triggerEvent, ExecutionState state, String nodeId) {
this(id, triggerEvent, state, Collections.emptyMap(), null, null, null, null, null, nodeId);
this(id, triggerEvent, state, Collections.emptyMap(), null, null, null, null, null, nodeId, null);
}
private WatchRecord(WatchRecord record, ExecutionState state) {
this(record.id, record.triggerEvent, state, record.vars, record.input, record.condition, record.metadata, record.watch,
record.executionResult, record.nodeId);
record.executionResult, record.nodeId, record.user);
}
private WatchRecord(WatchExecutionContext context, ExecutionState state) {
@ -88,12 +91,13 @@ public abstract class WatchRecord implements ToXContentObject {
context.watch() != null ? context.watch().condition() : null,
context.watch() != null ? context.watch().metadata() : null,
context.watch(),
null, context.getNodeId());
null, context.getNodeId(), context.getUser());
}
private WatchRecord(WatchExecutionContext context, WatchExecutionResult executionResult) {
this(context.id(), context.triggerEvent(), getState(executionResult), context.vars(), context.watch().input(),
context.watch().condition(), context.watch().metadata(), context.watch(), executionResult, context.getNodeId());
context.watch().condition(), context.watch().metadata(), context.watch(), executionResult, context.getNodeId(),
context.getUser());
}
public static ExecutionState getState(WatchExecutionResult executionResult) {
@ -152,6 +156,9 @@ public abstract class WatchRecord implements ToXContentObject {
builder.field(NODE.getPreferredName(), nodeId);
builder.field(STATE.getPreferredName(), state.id());
if (user != null) {
builder.field(USER.getPreferredName(), user);
}
if (watch != null && watch.status() != null) {
builder.field(STATUS.getPreferredName(), watch.status(), params);
}

View File

@ -13,8 +13,9 @@ public final class WatcherIndexTemplateRegistryField {
// version 6: upgrade to ES 6, removal of _status field
// version 7: add full exception stack traces for better debugging
// version 8: fix slack attachment property not to be dynamic, causing field type issues
// version 9: add a user field defining which user executed the watch
// Note: if you change this, also inform the kibana team around the watcher-ui
public static final String INDEX_TEMPLATE_VERSION = "8";
public static final String INDEX_TEMPLATE_VERSION = "9";
public static final String HISTORY_TEMPLATE_NAME = ".watch-history-" + INDEX_TEMPLATE_VERSION;
public static final String TRIGGERED_TEMPLATE_NAME = ".triggered_watches";
public static final String WATCHES_TEMPLATE_NAME = ".watches";

View File

@ -120,6 +120,9 @@
"messages": {
"type": "text"
},
"user": {
"type": "text"
},
"exception" : {
"type" : "object",
"enabled" : false

View File

@ -94,7 +94,7 @@ public class ConfigTestHelpers {
if (ESTestCase.randomBoolean()) {
dateHistoBuilder.setDelay(new DateHistogramInterval(randomPositiveTimeValue()));
}
dateHistoBuilder.setField(ESTestCase.randomAlphaOfLengthBetween(1, 10 ));
dateHistoBuilder.setField(ESTestCase.randomAlphaOfLengthBetween(5, 10));
return dateHistoBuilder;
}
@ -112,8 +112,8 @@ public class ConfigTestHelpers {
}
public static List<String> getFields() {
return IntStream.range(0, ESTestCase.randomIntBetween(1,10))
.mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(1,10))
return IntStream.range(0, ESTestCase.randomIntBetween(1, 10))
.mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(5, 10))
.collect(Collectors.toList());
}

View File

@ -1,6 +1,3 @@
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import org.elasticsearch.gradle.BuildPlugin
evaluationDependsOn(xpackModule('core'))
apply plugin: 'elasticsearch.esplugin'
@ -23,33 +20,8 @@ dependencies {
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
}
dependencyLicenses {
ignoreSha 'x-pack-core'
}
run {
plugin xpackModule('core')
}
integTest.enabled = false
// Instead we create a separate task to run the
// tests based on ESIntegTestCase
task internalClusterTest(type: RandomizedTestingTask,
group: JavaBasePlugin.VERIFICATION_GROUP,
description: 'Multi-node tests',
dependsOn: test.dependsOn) {
configure(BuildPlugin.commonTestConfig(project))
classpath = project.test.classpath
testClassesDirs = project.test.testClassesDirs
include '**/*IT.class'
systemProperty 'es.set.netty.runtime.available.processors', 'false'
}
check.dependsOn internalClusterTest
internalClusterTest.mustRunAfter test
// also add an "alias" task to make typing on the command line easier task icTest {
task icTest {
dependsOn internalClusterTest
}

View File

@ -1,498 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.rollup;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.analysis.common.CommonAnalysisPlugin;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.license.LicenseService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.Netty4Plugin;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction;
import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction;
import org.elasticsearch.xpack.core.rollup.action.PutRollupJobAction;
import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction;
import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction;
import org.elasticsearch.xpack.core.rollup.action.StopRollupJobAction;
import org.elasticsearch.xpack.core.rollup.job.DateHistoGroupConfig;
import org.elasticsearch.xpack.core.rollup.job.GroupConfig;
import org.elasticsearch.xpack.core.rollup.job.IndexerState;
import org.elasticsearch.xpack.core.rollup.job.MetricConfig;
import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig;
import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
import static org.hamcrest.core.IsEqual.equalTo;
@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
public class RollupIT extends ESIntegTestCase {
private String taskId = "test-bigID";
@Override
protected boolean ignoreExternalCluster() {
return true;
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(LocalStateRollup.class, CommonAnalysisPlugin.class, Netty4Plugin.class);
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return nodePlugins();
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
Settings.Builder builder = Settings.builder();
builder.put(XPackSettings.ROLLUP_ENABLED.getKey(), true);
builder.put(XPackSettings.SECURITY_ENABLED.getKey(), false);
builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
return builder.build();
}
@Override
protected Settings externalClusterClientSettings() {
return nodeSettings(0);
}
@Override
protected Settings transportClientSettings() {
return Settings.builder().put(super.transportClientSettings())
.put(XPackSettings.ROLLUP_ENABLED.getKey(), true)
.put(XPackSettings.SECURITY_ENABLED.getKey(), false)
.build();
}
@Before
public void createIndex() {
client().admin().indices().prepareCreate("test-1").addMapping("doc", "{\"doc\": {\"properties\": {" +
"\"date_histo\": {\"type\": \"date\"}, " +
"\"histo\": {\"type\": \"integer\"}, " +
"\"terms\": {\"type\": \"keyword\"}}}}", XContentType.JSON).get();
client().admin().cluster().prepareHealth("test-1").setWaitForYellowStatus().get();
BulkRequestBuilder bulk = client().prepareBulk();
Map<String, Object> source = new HashMap<>(3);
for (int i = 0; i < 20; i++) {
for (int j = 0; j < 20; j++) {
for (int k = 0; k < 20; k++) {
source.put("date_histo", new DateTime().minusDays(i).toString());
source.put("histo", Integer.toString(j * 100));
source.put("terms", Integer.toString(k * 100));
source.put("foo", k);
bulk.add(new IndexRequest("test-1", "doc").source(source));
source.clear();
}
}
}
bulk.get();
client().admin().indices().prepareRefresh("test-1").get();
}
public void testGetJob() throws ExecutionException, InterruptedException {
MetricConfig metricConfig = new MetricConfig.Builder()
.setField("foo")
.setMetrics(Arrays.asList("sum", "min", "max", "avg"))
.build();
DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
datehistoGroupConfig.setField("date_histo");
datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
GroupConfig.Builder groupConfig = new GroupConfig.Builder();
groupConfig.setDateHisto(datehistoGroupConfig.build());
RollupJobConfig.Builder config = new RollupJobConfig.Builder();
config.setIndexPattern("test-1");
config.setRollupIndex("rolled");
config.setId("testGet");
config.setGroupConfig(groupConfig.build());
config.setMetricsConfig(Collections.singletonList(metricConfig));
config.setCron("* * * * * ? *");
config.setPageSize(10);
PutRollupJobAction.Request request = new PutRollupJobAction.Request();
request.setConfig(config.build());
client().execute(PutRollupJobAction.INSTANCE, request).get();
GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("testGet");
GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
assertThat(response.getJobs().size(), equalTo(1));
assertThat(response.getJobs().get(0).getJob().getId(), equalTo("testGet"));
}
public void testIndexPattern() throws Exception {
MetricConfig metricConfig = new MetricConfig.Builder()
.setField("foo")
.setMetrics(Arrays.asList("sum", "min", "max", "avg"))
.build();
DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
datehistoGroupConfig.setField("date_histo");
datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
GroupConfig.Builder groupConfig = new GroupConfig.Builder();
groupConfig.setDateHisto(datehistoGroupConfig.build());
RollupJobConfig.Builder config = new RollupJobConfig.Builder();
config.setIndexPattern("test-*");
config.setId("testIndexPattern");
config.setRollupIndex("rolled");
config.setGroupConfig(groupConfig.build());
config.setMetricsConfig(Collections.singletonList(metricConfig));
config.setCron("* * * * * ? *");
config.setPageSize(10);
PutRollupJobAction.Request request = new PutRollupJobAction.Request();
request.setConfig(config.build());
client().execute(PutRollupJobAction.INSTANCE, request).get();
StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request("testIndexPattern");
StartRollupJobAction.Response startResponse = client().execute(StartRollupJobAction.INSTANCE, startRequest).get();
Assert.assertThat(startResponse.isStarted(), equalTo(true));
// Make sure it started
ESTestCase.assertBusy(() -> {
RollupJobStatus rollupJobStatus = getRollupJobStatus("testIndexPattern");
if (rollupJobStatus == null) {
fail("null");
}
IndexerState state = rollupJobStatus.getIndexerState();
assertTrue(state.equals(IndexerState.STARTED) || state.equals(IndexerState.INDEXING));
}, 60, TimeUnit.SECONDS);
// And wait for it to finish
ESTestCase.assertBusy(() -> {
RollupJobStatus rollupJobStatus = getRollupJobStatus("testIndexPattern");
if (rollupJobStatus == null) {
fail("null");
}
IndexerState state = rollupJobStatus.getIndexerState();
assertTrue(state.equals(IndexerState.STARTED) && rollupJobStatus.getPosition() != null);
}, 60, TimeUnit.SECONDS);
GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("testIndexPattern");
GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
Assert.assertThat(response.getJobs().size(), equalTo(1));
Assert.assertThat(response.getJobs().get(0).getJob().getId(), equalTo("testIndexPattern"));
GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().addIndices("rolled").get();
Assert.assertThat(getIndexResponse.indices().length, Matchers.greaterThan(0));
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30290")
public void testTwoJobsStartStopDeleteOne() throws Exception {
MetricConfig metricConfig = new MetricConfig.Builder()
.setField("foo")
.setMetrics(Arrays.asList("sum", "min", "max", "avg"))
.build();
DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
datehistoGroupConfig.setField("date_histo");
datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
GroupConfig.Builder groupConfig = new GroupConfig.Builder();
groupConfig.setDateHisto(datehistoGroupConfig.build());
RollupJobConfig.Builder config = new RollupJobConfig.Builder();
config.setIndexPattern("test-1");
config.setRollupIndex("rolled");
config.setId("job1");
config.setGroupConfig(groupConfig.build());
config.setMetricsConfig(Collections.singletonList(metricConfig));
config.setCron("* * * * * ? *");
config.setPageSize(10);
PutRollupJobAction.Request request = new PutRollupJobAction.Request();
request.setConfig(config.build());
client().execute(PutRollupJobAction.INSTANCE, request).get();
RollupJobConfig.Builder config2 = new RollupJobConfig.Builder();
config2.setIndexPattern("test-1");
config2.setRollupIndex("rolled");
config2.setId("job2");
config2.setGroupConfig(groupConfig.build());
config2.setMetricsConfig(Collections.singletonList(metricConfig));
config2.setCron("* * * * * ? *");
config2.setPageSize(10);
PutRollupJobAction.Request request2 = new PutRollupJobAction.Request();
request2.setConfig(config2.build());
client().execute(PutRollupJobAction.INSTANCE, request2).get();
StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request("job1");
StartRollupJobAction.Response response = client().execute(StartRollupJobAction.INSTANCE, startRequest).get();
Assert.assertThat(response.isStarted(), equalTo(true));
// Make sure it started
ESTestCase.assertBusy(() -> {
RollupJobStatus rollupJobStatus = getRollupJobStatus("job1");
if (rollupJobStatus == null) {
fail("null");
}
IndexerState state = rollupJobStatus.getIndexerState();
assertTrue(state.equals(IndexerState.STARTED) || state.equals(IndexerState.INDEXING));
}, 60, TimeUnit.SECONDS);
//but not the other task
ESTestCase.assertBusy(() -> {
RollupJobStatus rollupJobStatus = getRollupJobStatus("job2");
IndexerState state = rollupJobStatus.getIndexerState();
assertTrue(state.equals(IndexerState.STOPPED));
}, 60, TimeUnit.SECONDS);
// Delete the task
DeleteRollupJobAction.Request deleteRequest = new DeleteRollupJobAction.Request("job1");
DeleteRollupJobAction.Response deleteResponse = client().execute(DeleteRollupJobAction.INSTANCE, deleteRequest).get();
Assert.assertTrue(deleteResponse.isAcknowledged());
// Make sure the first job's task is gone
ESTestCase.assertBusy(() -> {
RollupJobStatus rollupJobStatus = getRollupJobStatus("job1");
assertTrue(rollupJobStatus == null);
}, 60, TimeUnit.SECONDS);
// And that we don't see it in the GetJobs API
GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("job1");
GetRollupJobsAction.Response getResponse = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
Assert.assertThat(getResponse.getJobs().size(), equalTo(0));
// But make sure the other job is still there
getRequest = new GetRollupJobsAction.Request("job2");
getResponse = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
Assert.assertThat(getResponse.getJobs().size(), equalTo(1));
Assert.assertThat(getResponse.getJobs().get(0).getJob().getId(), equalTo("job2"));
// and still STOPPED
ESTestCase.assertBusy(() -> {
RollupJobStatus rollupJobStatus = getRollupJobStatus("job2");
IndexerState state = rollupJobStatus.getIndexerState();
assertTrue(state.equals(IndexerState.STOPPED));
}, 60, TimeUnit.SECONDS);
}
public void testBig() throws Exception {
client().admin().indices().prepareCreate("test-big")
.addMapping("test-big", "{\"test-big\": {\"properties\": {\"timestamp\": {\"type\": \"date\"}, " +
"\"thefield\": {\"type\": \"integer\"}}}}", XContentType.JSON)
.setSettings(Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get();
client().admin().cluster().prepareHealth("test-big").setWaitForYellowStatus().get();
client().admin().indices().prepareCreate("test-verify")
.addMapping("test-big", "{\"test-big\": {\"properties\": {\"timestamp\": {\"type\": \"date\"}, " +
"\"thefield\": {\"type\": \"integer\"}}}}", XContentType.JSON)
.setSettings(Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get();
client().admin().cluster().prepareHealth("test-verify").setWaitForYellowStatus().get();
BulkRequestBuilder bulk = client().prepareBulk();
Map<String, Object> source = new HashMap<>(3);
int numDays = 90;
int numDocsPerDay = 100;
for (int i = 0; i < numDays; i++) {
DateTime ts = new DateTime().minusDays(i);
for (int j = 0; j < numDocsPerDay; j++) {
int value = ESTestCase.randomIntBetween(0,100);
source.put("timestamp", ts.toString());
source.put("thefield", value);
bulk.add(new IndexRequest("test-big", "test-big").source(source));
bulk.add(new IndexRequest("test-verify", "test-big").source(source));
source.clear();
}
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
bulk.get();
bulk = client().prepareBulk();
logger.info("Day: [" + i + "]: " + ts.toString() + " [" + ts.getMillis() + "]" );
}
client().admin().indices().prepareRefresh("test-big").get();
client().admin().indices().prepareRefresh("test-verify").get();
MetricConfig metricConfig = new MetricConfig.Builder()
.setField("thefield")
.setMetrics(Arrays.asList("sum", "min", "max", "avg"))
.build();
DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
datehistoGroupConfig.setField("timestamp");
datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
GroupConfig.Builder groupConfig = new GroupConfig.Builder();
groupConfig.setDateHisto(datehistoGroupConfig.build());
RollupJobConfig.Builder config = new RollupJobConfig.Builder();
config.setIndexPattern("test-big");
config.setRollupIndex("rolled");
config.setId(taskId);
config.setGroupConfig(groupConfig.build());
config.setMetricsConfig(Collections.singletonList(metricConfig));
config.setCron("* * * * * ? *");
config.setPageSize(1000);
PutRollupJobAction.Request request = new PutRollupJobAction.Request();
request.setConfig(config.build());
client().execute(PutRollupJobAction.INSTANCE, request).get();
StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request(taskId);
StartRollupJobAction.Response response = client().execute(StartRollupJobAction.INSTANCE, startRequest).get();
Assert.assertThat(response.isStarted(), equalTo(true));
ESTestCase.assertBusy(() -> {
RollupJobStatus rollupJobStatus = getRollupJobStatus(taskId);
if (rollupJobStatus == null) {
fail("null");
}
IndexerState state = rollupJobStatus.getIndexerState();
logger.error("state: [" + state + "]");
assertTrue(state.equals(IndexerState.STARTED) && rollupJobStatus.getPosition() != null);
}, 60, TimeUnit.SECONDS);
RollupJobStatus rollupJobStatus = getRollupJobStatus(taskId);
if (rollupJobStatus == null) {
Assert.fail("rollup job status should not be null");
}
client().admin().indices().prepareRefresh("rolled").get();
SearchResponse count = client().prepareSearch("rolled").setSize(10).get();
// total document is numDays minus 1 because we don't build rollup for
// buckets that are not full (bucket for the current day).
Assert.assertThat(count.getHits().totalHits, equalTo(Long.valueOf(numDays-1)));
if (ESTestCase.randomBoolean()) {
client().admin().indices().prepareDelete("test-big").get();
client().admin().indices().prepareRefresh().get();
}
// Execute the rollup search
SearchRequest rollupRequest = new SearchRequest("rolled")
.source(new SearchSourceBuilder()
.aggregation(dateHistogram("timestamp")
.interval(1000*86400)
.field("timestamp"))
.size(0));
SearchResponse searchResponse = client().execute(RollupSearchAction.INSTANCE, rollupRequest).get();
Assert.assertNotNull(searchResponse);
// And a regular search against the verification index
SearchRequest verifyRequest = new SearchRequest("test-verify")
.source(new SearchSourceBuilder()
.aggregation(dateHistogram("timestamp")
.interval(1000*86400)
.field("timestamp"))
.size(0));
SearchResponse verifyResponse = client().execute(SearchAction.INSTANCE, verifyRequest).get();
Map<String, Aggregation> rollupAggs = searchResponse.getAggregations().asMap();
for (Aggregation agg : verifyResponse.getAggregations().asList()) {
Aggregation rollupAgg = rollupAggs.get(agg.getName());
Assert.assertNotNull(rollupAgg);
Assert.assertThat(rollupAgg.getType(), equalTo(agg.getType()));
verifyAgg((InternalDateHistogram)agg, (InternalDateHistogram)rollupAgg);
}
// And a quick sanity check for doc type
SearchRequest rollupRawRequest = new SearchRequest("rolled")
.source(new SearchSourceBuilder().query(new MatchAllQueryBuilder())
.size(1));
SearchResponse searchRawResponse = client().execute(SearchAction.INSTANCE, rollupRawRequest).get();
Assert.assertNotNull(searchRawResponse);
assertThat(searchRawResponse.getHits().getAt(0).getType(), equalTo("_doc"));
}
private void verifyAgg(InternalDateHistogram verify, InternalDateHistogram rollup) {
for (int i = 0; i < rollup.getBuckets().size(); i++) {
InternalDateHistogram.Bucket verifyBucket = verify.getBuckets().get(i);
InternalDateHistogram.Bucket rollupBucket = rollup.getBuckets().get(i);
Assert.assertThat(rollupBucket.getDocCount(), equalTo(verifyBucket.getDocCount()));
Assert.assertThat(((DateTime)rollupBucket.getKey()).getMillis(), equalTo(((DateTime)verifyBucket.getKey()).getMillis()));
Assert.assertTrue(rollupBucket.getAggregations().equals(verifyBucket.getAggregations()));
}
}
private RollupJobStatus getRollupJobStatus(final String taskId) {
final GetRollupJobsAction.Request request = new GetRollupJobsAction.Request(taskId);
final GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, request).actionGet();
if (response.getJobs() != null && response.getJobs().isEmpty() == false) {
assertThat("Expect 1 rollup job with id " + taskId, response.getJobs().size(), equalTo(1));
return response.getJobs().iterator().next().getStatus();
}
return null;
}
@After
public void cleanup() throws ExecutionException, InterruptedException {
GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("_all");
GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
for (GetRollupJobsAction.JobWrapper job : response.getJobs()) {
StopRollupJobAction.Request stopRequest = new StopRollupJobAction.Request(job.getJob().getId());
try {
client().execute(StopRollupJobAction.INSTANCE, stopRequest).get();
} catch (ElasticsearchException e) {
//
}
DeleteRollupJobAction.Request deleteRequest = new DeleteRollupJobAction.Request(job.getJob().getId());
client().execute(DeleteRollupJobAction.INSTANCE, deleteRequest).get();
}
}
}

View File

@ -200,11 +200,13 @@ import org.elasticsearch.xpack.security.rest.action.user.RestHasPrivilegesAction
import org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction;
import org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction;
import org.elasticsearch.xpack.security.support.SecurityIndexManager;
import org.elasticsearch.xpack.security.transport.SecurityHttpSettings;
import org.elasticsearch.xpack.security.transport.SecurityServerTransportInterceptor;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4HttpServerTransport;
import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4ServerTransport;
import org.elasticsearch.xpack.core.template.TemplateUtils;
import org.elasticsearch.xpack.security.transport.nio.SecurityNioHttpServerTransport;
import org.elasticsearch.xpack.security.transport.nio.SecurityNioTransport;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@ -511,21 +513,22 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
if (NetworkModule.HTTP_TYPE_SETTING.exists(settings)) {
final String httpType = NetworkModule.HTTP_TYPE_SETTING.get(settings);
if (httpType.equals(SecurityField.NAME4)) {
SecurityNetty4HttpServerTransport.overrideSettings(builder, settings);
if (httpType.equals(SecurityField.NAME4) || httpType.equals(SecurityField.NIO)) {
SecurityHttpSettings.overrideSettings(builder, settings);
} else {
final String message = String.format(
Locale.ROOT,
"http type setting [%s] must be [%s] but is [%s]",
"http type setting [%s] must be [%s] or [%s] but is [%s]",
NetworkModule.HTTP_TYPE_KEY,
SecurityField.NAME4,
SecurityField.NIO,
httpType);
throw new IllegalArgumentException(message);
}
} else {
// default to security4
builder.put(NetworkModule.HTTP_TYPE_KEY, SecurityField.NAME4);
SecurityNetty4HttpServerTransport.overrideSettings(builder, settings);
SecurityHttpSettings.overrideSettings(builder, settings);
}
builder.put(SecuritySettings.addUserSettings(settings));
return builder.build();
@ -869,8 +872,14 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
if (enabled == false) { // don't register anything if we are not enabled
return Collections.emptyMap();
}
return Collections.singletonMap(SecurityField.NAME4, () -> new SecurityNetty4HttpServerTransport(settings,
networkService, bigArrays, ipFilter.get(), getSslService(), threadPool, xContentRegistry, dispatcher));
Map<String, Supplier<HttpServerTransport>> httpTransports = new HashMap<>();
httpTransports.put(SecurityField.NAME4, () -> new SecurityNetty4HttpServerTransport(settings, networkService, bigArrays,
ipFilter.get(), getSslService(), threadPool, xContentRegistry, dispatcher));
httpTransports.put(SecurityField.NIO, () -> new SecurityNioHttpServerTransport(settings, networkService, bigArrays,
pageCacheRecycler, threadPool, xContentRegistry, dispatcher, ipFilter.get(), getSslService()));
return httpTransports;
}
@Override

View File

@ -5,8 +5,6 @@
*/
package org.elasticsearch.xpack.security.rest;
import io.netty.channel.Channel;
import io.netty.handler.ssl.SslHandler;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
@ -15,7 +13,6 @@ import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.http.HttpChannel;
import org.elasticsearch.http.netty4.Netty4HttpChannel;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestChannel;
@ -24,7 +21,7 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestRequest.Method;
import org.elasticsearch.xpack.core.security.rest.RestRequestFilter;
import org.elasticsearch.xpack.security.authc.AuthenticationService;
import org.elasticsearch.xpack.security.transport.ServerTransportFilter;
import org.elasticsearch.xpack.security.transport.SSLEngineUtils;
import java.io.IOException;
@ -53,10 +50,7 @@ public class SecurityRestFilter implements RestHandler {
// CORS - allow for preflight unauthenticated OPTIONS request
if (extractClientCertificate) {
HttpChannel httpChannel = request.getHttpChannel();
Channel nettyChannel = ((Netty4HttpChannel) httpChannel).getNettyChannel();
SslHandler handler = nettyChannel.pipeline().get(SslHandler.class);
assert handler != null;
ServerTransportFilter.extractClientCertificates(logger, threadContext, handler.engine(), nettyChannel);
SSLEngineUtils.extractClientCertificates(logger, threadContext, httpChannel);
}
service.authenticate(maybeWrapRestRequest(request), ActionListener.wrap(
authentication -> {

View File

@ -0,0 +1,93 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.transport;
import io.netty.channel.Channel;
import io.netty.handler.ssl.SslHandler;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.http.HttpChannel;
import org.elasticsearch.http.netty4.Netty4HttpChannel;
import org.elasticsearch.http.nio.NioHttpChannel;
import org.elasticsearch.nio.SocketChannelContext;
import org.elasticsearch.transport.TcpChannel;
import org.elasticsearch.transport.netty4.Netty4TcpChannel;
import org.elasticsearch.transport.nio.NioTcpChannel;
import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
import org.elasticsearch.xpack.security.transport.nio.SSLChannelContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLPeerUnverifiedException;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
public class SSLEngineUtils {
private SSLEngineUtils() {}
public static void extractClientCertificates(Logger logger, ThreadContext threadContext, HttpChannel httpChannel) {
SSLEngine sslEngine = getSSLEngine(httpChannel);
extract(logger, threadContext, sslEngine, httpChannel);
}
public static void extractClientCertificates(Logger logger, ThreadContext threadContext, TcpChannel tcpChannel) {
SSLEngine sslEngine = getSSLEngine(tcpChannel);
extract(logger, threadContext, sslEngine, tcpChannel);
}
public static SSLEngine getSSLEngine(HttpChannel httpChannel) {
if (httpChannel instanceof Netty4HttpChannel) {
Channel nettyChannel = ((Netty4HttpChannel) httpChannel).getNettyChannel();
SslHandler handler = nettyChannel.pipeline().get(SslHandler.class);
assert handler != null : "Must have SslHandler";
return handler.engine();
} else if (httpChannel instanceof NioHttpChannel) {
SocketChannelContext context = ((NioHttpChannel) httpChannel).getContext();
assert context instanceof SSLChannelContext : "Must be SSLChannelContext.class, found: " + context.getClass();
return ((SSLChannelContext) context).getSSLEngine();
} else {
throw new AssertionError("Unknown channel class type: " + httpChannel.getClass());
}
}
public static SSLEngine getSSLEngine(TcpChannel tcpChannel) {
if (tcpChannel instanceof Netty4TcpChannel) {
Channel nettyChannel = ((Netty4TcpChannel) tcpChannel).getNettyChannel();
SslHandler handler = nettyChannel.pipeline().get(SslHandler.class);
assert handler != null : "Must have SslHandler";
return handler.engine();
} else if (tcpChannel instanceof NioTcpChannel) {
SocketChannelContext context = ((NioTcpChannel) tcpChannel).getContext();
assert context instanceof SSLChannelContext : "Must be SSLChannelContext.class, found: " + context.getClass();
return ((SSLChannelContext) context).getSSLEngine();
} else {
throw new AssertionError("Unknown channel class type: " + tcpChannel.getClass());
}
}
private static void extract(Logger logger, ThreadContext threadContext, SSLEngine sslEngine, Object channel) {
try {
Certificate[] certs = sslEngine.getSession().getPeerCertificates();
if (certs instanceof X509Certificate[]) {
threadContext.putTransient(PkiRealm.PKI_CERT_HEADER_NAME, certs);
}
} catch (SSLPeerUnverifiedException e) {
// this happens when client authentication is optional and the client does not provide credentials. If client
// authentication was required then this connection should be closed before ever getting into this class
assert sslEngine.getNeedClientAuth() == false;
assert sslEngine.getWantClientAuth();
if (logger.isTraceEnabled()) {
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"SSL Peer did not present a certificate on channel [{}]", channel), e);
} else if (logger.isDebugEnabled()) {
logger.debug("SSL Peer did not present a certificate on channel [{}]", channel);
}
}
}
}

View File

@ -0,0 +1,64 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.transport;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.network.CloseableChannel;
import org.elasticsearch.http.HttpChannel;
import java.util.function.BiConsumer;
import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isCloseDuringHandshakeException;
import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isNotSslRecordException;
import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isReceivedCertificateUnknownException;
public final class SecurityHttpExceptionHandler implements BiConsumer<HttpChannel, Exception> {
private final Lifecycle lifecycle;
private final Logger logger;
private final BiConsumer<HttpChannel, Exception> fallback;
public SecurityHttpExceptionHandler(Logger logger, Lifecycle lifecycle, BiConsumer<HttpChannel, Exception> fallback) {
this.lifecycle = lifecycle;
this.logger = logger;
this.fallback = fallback;
}
public void accept(HttpChannel channel, Exception e) {
if (!lifecycle.started()) {
return;
}
if (isNotSslRecordException(e)) {
if (logger.isTraceEnabled()) {
logger.trace(new ParameterizedMessage("received plaintext http traffic on a https channel, closing connection {}",
channel), e);
} else {
logger.warn("received plaintext http traffic on a https channel, closing connection {}", channel);
}
CloseableChannel.closeChannel(channel);
} else if (isCloseDuringHandshakeException(e)) {
if (logger.isTraceEnabled()) {
logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e);
} else {
logger.warn("connection {} closed during ssl handshake", channel);
}
CloseableChannel.closeChannel(channel);
} else if (isReceivedCertificateUnknownException(e)) {
if (logger.isTraceEnabled()) {
logger.trace(new ParameterizedMessage("http client did not trust server's certificate, closing connection {}",
channel), e);
} else {
logger.warn("http client did not trust this server's certificate, closing connection {}", channel);
}
CloseableChannel.closeChannel(channel);
} else {
fallback.accept(channel, e);
}
}
}

View File

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.transport;
import org.elasticsearch.common.settings.Settings;
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION;
import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED;
public final class SecurityHttpSettings {
private SecurityHttpSettings() {}
public static void overrideSettings(Settings.Builder settingsBuilder, Settings settings) {
if (HTTP_SSL_ENABLED.get(settings) && SETTING_HTTP_COMPRESSION.exists(settings) == false) {
settingsBuilder.put(SETTING_HTTP_COMPRESSION.getKey(), false);
}
}
}

View File

@ -5,11 +5,7 @@
*/
package org.elasticsearch.xpack.security.transport;
import io.netty.channel.Channel;
import io.netty.handler.ssl.SslHandler;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.IndicesRequest;
@ -20,11 +16,13 @@ import org.elasticsearch.action.support.DestructiveOperations;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.transport.TaskTransportChannel;
import org.elasticsearch.transport.TcpChannel;
import org.elasticsearch.transport.TcpTransportChannel;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.netty4.Netty4TcpChannel;
import org.elasticsearch.transport.nio.NioTcpChannel;
import org.elasticsearch.xpack.core.security.SecurityContext;
import org.elasticsearch.xpack.core.security.authc.Authentication;
import org.elasticsearch.xpack.core.security.user.KibanaUser;
@ -32,16 +30,10 @@ import org.elasticsearch.xpack.core.security.user.SystemUser;
import org.elasticsearch.xpack.core.security.user.User;
import org.elasticsearch.xpack.security.action.SecurityActionMapper;
import org.elasticsearch.xpack.security.authc.AuthenticationService;
import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
import org.elasticsearch.xpack.security.authz.AuthorizationService;
import org.elasticsearch.xpack.security.authz.AuthorizationUtils;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLPeerUnverifiedException;
import java.io.IOException;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError;
@ -115,13 +107,12 @@ public interface ServerTransportFilter {
unwrappedChannel = ((TaskTransportChannel) unwrappedChannel).getChannel();
}
if (extractClientCert && (unwrappedChannel instanceof TcpTransportChannel) &&
((TcpTransportChannel) unwrappedChannel).getChannel() instanceof Netty4TcpChannel) {
Channel channel = ((Netty4TcpChannel) ((TcpTransportChannel) unwrappedChannel).getChannel()).getLowLevelChannel();
SslHandler sslHandler = channel.pipeline().get(SslHandler.class);
if (channel.isOpen()) {
assert sslHandler != null : "channel [" + channel + "] did not have a ssl handler. pipeline " + channel.pipeline();
extractClientCertificates(logger, threadContext, sslHandler.engine(), channel);
if (extractClientCert && (unwrappedChannel instanceof TcpTransportChannel)) {
TcpChannel tcpChannel = ((TcpTransportChannel) unwrappedChannel).getChannel();
if (tcpChannel instanceof Netty4TcpChannel || tcpChannel instanceof NioTcpChannel) {
if (tcpChannel.isOpen()) {
SSLEngineUtils.extractClientCertificates(logger, threadContext, tcpChannel);
}
}
}
@ -172,27 +163,6 @@ public interface ServerTransportFilter {
}
}
static void extractClientCertificates(Logger logger, ThreadContext threadContext, SSLEngine sslEngine, Channel channel) {
try {
Certificate[] certs = sslEngine.getSession().getPeerCertificates();
if (certs instanceof X509Certificate[]) {
threadContext.putTransient(PkiRealm.PKI_CERT_HEADER_NAME, certs);
}
} catch (SSLPeerUnverifiedException e) {
// this happens when client authentication is optional and the client does not provide credentials. If client
// authentication was required then this connection should be closed before ever getting into this class
assert sslEngine.getNeedClientAuth() == false;
assert sslEngine.getWantClientAuth();
if (logger.isTraceEnabled()) {
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"SSL Peer did not present a certificate on channel [{}]", channel), e);
} else if (logger.isDebugEnabled()) {
logger.debug("SSL Peer did not present a certificate on channel [{}]", channel);
}
}
}
/**
* A server transport filter rejects internal calls, which should be used on connections
* where only clients connect to. This ensures that no client can send any internal actions

View File

@ -8,8 +8,6 @@ package org.elasticsearch.xpack.security.transport.netty4;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandler;
import io.netty.handler.ssl.SslHandler;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.network.CloseableChannel;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
@ -19,18 +17,16 @@ import org.elasticsearch.http.netty4.Netty4HttpServerTransport;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ssl.SSLConfiguration;
import org.elasticsearch.xpack.core.ssl.SSLService;
import org.elasticsearch.xpack.security.transport.SecurityHttpExceptionHandler;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import javax.net.ssl.SSLEngine;
import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION;
import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED;
import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isCloseDuringHandshakeException;
import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isNotSslRecordException;
import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isReceivedCertificateUnknownException;
public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport {
private final SecurityHttpExceptionHandler securityExceptionHandler;
private final IPFilter ipFilter;
private final SSLService sslService;
private final SSLConfiguration sslConfiguration;
@ -39,6 +35,7 @@ public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport
SSLService sslService, ThreadPool threadPool, NamedXContentRegistry xContentRegistry,
Dispatcher dispatcher) {
super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher);
this.securityExceptionHandler = new SecurityHttpExceptionHandler(logger, lifecycle, (c, e) -> super.onException(c, e));
this.ipFilter = ipFilter;
final boolean ssl = HTTP_SSL_ENABLED.get(settings);
this.sslService = sslService;
@ -51,41 +48,11 @@ public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport
} else {
this.sslConfiguration = null;
}
}
@Override
protected void onException(HttpChannel channel, Exception e) {
if (!lifecycle.started()) {
return;
}
if (isNotSslRecordException(e)) {
if (logger.isTraceEnabled()) {
logger.trace(new ParameterizedMessage("received plaintext http traffic on a https channel, closing connection {}",
channel), e);
} else {
logger.warn("received plaintext http traffic on a https channel, closing connection {}", channel);
}
CloseableChannel.closeChannel(channel);
} else if (isCloseDuringHandshakeException(e)) {
if (logger.isTraceEnabled()) {
logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e);
} else {
logger.warn("connection {} closed during ssl handshake", channel);
}
CloseableChannel.closeChannel(channel);
} else if (isReceivedCertificateUnknownException(e)) {
if (logger.isTraceEnabled()) {
logger.trace(new ParameterizedMessage("http client did not trust server's certificate, closing connection {}",
channel), e);
} else {
logger.warn("http client did not trust this server's certificate, closing connection {}", channel);
}
CloseableChannel.closeChannel(channel);
} else {
super.onException(channel, e);
}
securityExceptionHandler.accept(channel, e);
}
@Override
@ -115,10 +82,4 @@ public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport
ch.pipeline().addFirst("ip_filter", new IpFilterRemoteAddressFilter(ipFilter, IPFilter.HTTP_PROFILE_NAME));
}
}
public static void overrideSettings(Settings.Builder settingsBuilder, Settings settings) {
if (HTTP_SSL_ENABLED.get(settings) && SETTING_HTTP_COMPRESSION.exists(settings) == false) {
settingsBuilder.put(SETTING_HTTP_COMPRESSION.getKey(), false);
}
}
}

View File

@ -0,0 +1,32 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.transport.nio;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.nio.NioSocketChannel;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import java.util.function.Predicate;
public final class NioIPFilter implements Predicate<NioSocketChannel> {
private final IPFilter filter;
private final String profile;
NioIPFilter(@Nullable IPFilter filter, String profile) {
this.filter = filter;
this.profile = profile;
}
@Override
public boolean test(NioSocketChannel nioChannel) {
if (filter != null) {
return filter.accept(profile, nioChannel.getRemoteAddress());
} else {
return true;
}
}
}

View File

@ -14,6 +14,7 @@ import org.elasticsearch.nio.SocketChannelContext;
import org.elasticsearch.nio.NioSelector;
import org.elasticsearch.nio.WriteOperation;
import javax.net.ssl.SSLEngine;
import java.io.IOException;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
@ -164,6 +165,10 @@ public final class SSLChannelContext extends SocketChannelContext {
}
}
public SSLEngine getSSLEngine() {
return sslDriver.getSSLEngine();
}
private static class CloseNotifyOperation implements WriteOperation {
private static final BiConsumer<Void, Exception> LISTENER = (v, t) -> {};

View File

@ -96,6 +96,10 @@ public class SSLDriver implements AutoCloseable {
}
}
public SSLEngine getSSLEngine() {
return engine;
}
public boolean hasFlushPending() {
return networkWriteBuffer.hasRemaining();
}

View File

@ -0,0 +1,132 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.transport.nio;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.PageCacheRecycler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.http.nio.HttpReadWriteHandler;
import org.elasticsearch.http.nio.NioHttpChannel;
import org.elasticsearch.http.nio.NioHttpServerChannel;
import org.elasticsearch.http.nio.NioHttpServerTransport;
import org.elasticsearch.nio.BytesChannelContext;
import org.elasticsearch.nio.ChannelFactory;
import org.elasticsearch.nio.InboundChannelBuffer;
import org.elasticsearch.nio.NioSelector;
import org.elasticsearch.nio.NioSocketChannel;
import org.elasticsearch.nio.ServerChannelContext;
import org.elasticsearch.nio.SocketChannelContext;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ssl.SSLConfiguration;
import org.elasticsearch.xpack.core.ssl.SSLService;
import org.elasticsearch.xpack.security.transport.SecurityHttpExceptionHandler;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import javax.net.ssl.SSLEngine;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.util.function.Consumer;
import java.util.function.Supplier;
import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED;
public class SecurityNioHttpServerTransport extends NioHttpServerTransport {
private final SecurityHttpExceptionHandler securityExceptionHandler;
private final IPFilter ipFilter;
private final NioIPFilter nioIpFilter;
private final SSLService sslService;
private final SSLConfiguration sslConfiguration;
private final boolean sslEnabled;
public SecurityNioHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays,
PageCacheRecycler pageCacheRecycler, ThreadPool threadPool,
NamedXContentRegistry xContentRegistry, Dispatcher dispatcher, IPFilter ipFilter,
SSLService sslService) {
super(settings, networkService, bigArrays, pageCacheRecycler, threadPool, xContentRegistry, dispatcher);
this.securityExceptionHandler = new SecurityHttpExceptionHandler(logger, lifecycle, (c, e) -> super.onException(c, e));
this.ipFilter = ipFilter;
this.nioIpFilter = new NioIPFilter(ipFilter, IPFilter.HTTP_PROFILE_NAME);
this.sslEnabled = HTTP_SSL_ENABLED.get(settings);
this.sslService = sslService;
if (sslEnabled) {
this.sslConfiguration = sslService.getHttpTransportSSLConfiguration();
if (sslService.isConfigurationValidForServerUsage(sslConfiguration) == false) {
throw new IllegalArgumentException("a key must be provided to run as a server. the key should be configured using the " +
"[xpack.security.http.ssl.key] or [xpack.security.http.ssl.keystore.path] setting");
}
} else {
this.sslConfiguration = null;
}
}
@Override
protected void doStart() {
super.doStart();
ipFilter.setBoundHttpTransportAddress(this.boundAddress());
}
protected SecurityHttpChannelFactory channelFactory() {
return new SecurityHttpChannelFactory();
}
class SecurityHttpChannelFactory extends ChannelFactory<NioHttpServerChannel, NioHttpChannel> {
private SecurityHttpChannelFactory() {
super(new RawChannelFactory(tcpNoDelay, tcpKeepAlive, reuseAddress, tcpSendBufferSize, tcpReceiveBufferSize));
}
@Override
public NioHttpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException {
NioHttpChannel httpChannel = new NioHttpChannel(channel);
Supplier<InboundChannelBuffer.Page> pageSupplier = () -> {
Recycler.V<byte[]> bytes = pageCacheRecycler.bytePage(false);
return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close);
};
HttpReadWriteHandler httpHandler = new HttpReadWriteHandler(httpChannel,SecurityNioHttpServerTransport.this,
handlingSettings, corsConfig);
InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier);
Consumer<Exception> exceptionHandler = (e) -> securityExceptionHandler.accept(httpChannel, e);
SocketChannelContext context;
if (sslEnabled) {
SSLEngine sslEngine;
boolean hostnameVerificationEnabled = sslConfiguration.verificationMode().isHostnameVerificationEnabled();
if (hostnameVerificationEnabled) {
InetSocketAddress address = (InetSocketAddress) channel.getRemoteAddress();
// we create the socket based on the name given. don't reverse DNS
sslEngine = sslService.createSSLEngine(sslConfiguration, address.getHostString(), address.getPort());
} else {
sslEngine = sslService.createSSLEngine(sslConfiguration, null, -1);
}
SSLDriver sslDriver = new SSLDriver(sslEngine, false);
context = new SSLChannelContext(httpChannel, selector, exceptionHandler, sslDriver, httpHandler, buffer, nioIpFilter);
} else {
context = new BytesChannelContext(httpChannel, selector, exceptionHandler, httpHandler, buffer, nioIpFilter);
}
httpChannel.setContext(context);
return httpChannel;
}
@Override
public NioHttpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) {
NioHttpServerChannel httpServerChannel = new NioHttpServerChannel(channel);
Consumer<Exception> exceptionHandler = (e) -> onServerException(httpServerChannel, e);
Consumer<NioSocketChannel> acceptor = SecurityNioHttpServerTransport.this::acceptChannel;
ServerChannelContext context = new ServerChannelContext(httpServerChannel, this, selector, acceptor, exceptionHandler);
httpServerChannel.setContext(context);
return httpServerChannel;
}
}
}

View File

@ -44,7 +44,6 @@ import java.nio.channels.SocketChannel;
import java.util.Collections;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.function.Supplier;
import static org.elasticsearch.xpack.core.security.SecurityField.setting;
@ -129,19 +128,11 @@ public class SecurityNioTransport extends NioTransport {
return new SecurityTcpChannelFactory(profileSettings, isClient);
}
private boolean validateChannel(NioSocketChannel channel) {
if (authenticator != null) {
NioTcpChannel nioTcpChannel = (NioTcpChannel) channel;
return authenticator.accept(nioTcpChannel.getProfile(), nioTcpChannel.getRemoteAddress());
} else {
return true;
}
}
private class SecurityTcpChannelFactory extends TcpChannelFactory {
private final String profileName;
private final boolean isClient;
private final NioIPFilter ipFilter;
private SecurityTcpChannelFactory(ProfileSettings profileSettings, boolean isClient) {
super(new RawChannelFactory(profileSettings.tcpNoDelay,
@ -151,12 +142,12 @@ public class SecurityNioTransport extends NioTransport {
Math.toIntExact(profileSettings.receiveBufferSize.getBytes())));
this.profileName = profileSettings.profileName;
this.isClient = isClient;
this.ipFilter = new NioIPFilter(authenticator, profileName);
}
@Override
public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException {
NioTcpChannel nioChannel = new NioTcpChannel(profileName, channel);
SocketChannelContext context;
Supplier<InboundChannelBuffer.Page> pageSupplier = () -> {
Recycler.V<byte[]> bytes = pageCacheRecycler.bytePage(false);
return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close);
@ -164,8 +155,8 @@ public class SecurityNioTransport extends NioTransport {
TcpReadWriteHandler readWriteHandler = new TcpReadWriteHandler(nioChannel, SecurityNioTransport.this);
InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier);
Consumer<Exception> exceptionHandler = (e) -> onException(nioChannel, e);
Predicate<NioSocketChannel> filter = SecurityNioTransport.this::validateChannel;
SocketChannelContext context;
if (sslEnabled) {
SSLEngine sslEngine;
SSLConfiguration defaultConfig = profileConfiguration.get(TcpTransport.DEFAULT_PROFILE);
@ -179,9 +170,9 @@ public class SecurityNioTransport extends NioTransport {
sslEngine = sslService.createSSLEngine(sslConfig, null, -1);
}
SSLDriver sslDriver = new SSLDriver(sslEngine, isClient);
context = new SSLChannelContext(nioChannel, selector, exceptionHandler, sslDriver, readWriteHandler, buffer, filter);
context = new SSLChannelContext(nioChannel, selector, exceptionHandler, sslDriver, readWriteHandler, buffer, ipFilter);
} else {
context = new BytesChannelContext(nioChannel, selector, exceptionHandler, readWriteHandler, buffer, filter);
context = new BytesChannelContext(nioChannel, selector, exceptionHandler, readWriteHandler, buffer, ipFilter);
}
nioChannel.setContext(context);

View File

@ -244,6 +244,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase {
builder.put(customSettings, false); // handle secure settings separately
builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
builder.put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO);
builder.put(NetworkModule.HTTP_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO);
Settings.Builder customBuilder = Settings.builder().put(customSettings);
if (customBuilder.getSecureSettings() != null) {
SecuritySettingsSource.addSecureSettings(builder, secureSettings ->

View File

@ -126,6 +126,7 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas
Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal))
.put(XPackSettings.SECURITY_ENABLED.getKey(), true)
.put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO)
.put(NetworkModule.HTTP_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO)
//TODO: for now isolate security tests from watcher & monitoring (randomize this later)
.put(XPackSettings.WATCHER_ENABLED.getKey(), false)
.put(XPackSettings.MONITORING_ENABLED.getKey(), false)

View File

@ -49,18 +49,16 @@ public class TransportChangePasswordActionTests extends ESTestCase {
public void testAnonymousUser() {
final String hashingAlgorithm = randomFrom("pbkdf2", "pbkdf2_1000", "bcrypt", "bcrypt9");
Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser").build();
Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser")
.put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), hashingAlgorithm).build();
AnonymousUser anonymousUser = new AnonymousUser(settings);
NativeUsersStore usersStore = mock(NativeUsersStore.class);
Settings passwordHashingSettings = Settings.builder().
put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), hashingAlgorithm).build();
TransportService transportService = new TransportService(passwordHashingSettings, mock(Transport.class), null,
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
TransportChangePasswordAction action = new TransportChangePasswordAction(settings, transportService,
mock(ActionFilters.class), usersStore);
ChangePasswordRequest request = new ChangePasswordRequest();
// Request will fail before the request hashing algorithm is checked, but we use the same algorithm as in settings for consistency
ChangePasswordRequest request = new ChangePasswordRequest();
request.username(anonymousUser.principal());
request.passwordHash(Hasher.resolve(hashingAlgorithm).hash(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING));
@ -89,14 +87,13 @@ public class TransportChangePasswordActionTests extends ESTestCase {
NativeUsersStore usersStore = mock(NativeUsersStore.class);
Settings passwordHashingSettings = Settings.builder().
put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), hashingAlgorithm).build();
TransportService transportService = new TransportService(passwordHashingSettings, mock(Transport.class), null,
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService,
TransportChangePasswordAction action = new TransportChangePasswordAction(passwordHashingSettings, transportService,
mock(ActionFilters.class), usersStore);
// Request will fail before the request hashing algorithm is checked, but we use the same algorithm as in settings for consistency
ChangePasswordRequest request = new ChangePasswordRequest();
request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal()));
// Request will fail before the request hashing algorithm is checked, but we use the same algorithm as in settings for consistency
request.passwordHash(Hasher.resolve(hashingAlgorithm).hash(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING));
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();

View File

@ -62,7 +62,7 @@ public class LdapTestUtils {
final SSLConfiguration sslConfiguration;
if (useGlobalSSL) {
sslConfiguration = sslService.getSSLConfiguration("_global");
sslConfiguration = sslService.getSSLConfiguration("xpack.ssl");
} else {
sslConfiguration = sslService.getSSLConfiguration("xpack.security.authc.realms.foo.ssl");
}

View File

@ -0,0 +1,44 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.transport;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.XPackSettings;
import static org.hamcrest.Matchers.is;
public class SecurityHttpSettingsTests extends ESTestCase {
public void testDisablesCompressionByDefaultForSsl() {
Settings settings = Settings.builder()
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
Settings.Builder pluginSettingsBuilder = Settings.builder();
SecurityHttpSettings.overrideSettings(pluginSettingsBuilder, settings);
assertThat(HttpTransportSettings.SETTING_HTTP_COMPRESSION.get(pluginSettingsBuilder.build()), is(false));
}
public void testLeavesCompressionOnIfNotSsl() {
Settings settings = Settings.builder()
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), false).build();
Settings.Builder pluginSettingsBuilder = Settings.builder();
SecurityHttpSettings.overrideSettings(pluginSettingsBuilder, settings);
assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
}
public void testDoesNotChangeExplicitlySetCompression() {
Settings settings = Settings.builder()
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put(HttpTransportSettings.SETTING_HTTP_COMPRESSION.getKey(), true)
.build();
Settings.Builder pluginSettingsBuilder = Settings.builder();
SecurityHttpSettings.overrideSettings(pluginSettingsBuilder, settings);
assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
}
}

View File

@ -14,7 +14,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.http.NullDispatcher;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
@ -144,34 +143,6 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase {
assertThat(customEngine.getEnabledProtocols(), not(equalTo(defaultEngine.getEnabledProtocols())));
}
public void testDisablesCompressionByDefaultForSsl() throws Exception {
Settings settings = Settings.builder()
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
Settings.Builder pluginSettingsBuilder = Settings.builder();
SecurityNetty4HttpServerTransport.overrideSettings(pluginSettingsBuilder, settings);
assertThat(HttpTransportSettings.SETTING_HTTP_COMPRESSION.get(pluginSettingsBuilder.build()), is(false));
}
public void testLeavesCompressionOnIfNotSsl() throws Exception {
Settings settings = Settings.builder()
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), false).build();
Settings.Builder pluginSettingsBuilder = Settings.builder();
SecurityNetty4HttpServerTransport.overrideSettings(pluginSettingsBuilder, settings);
assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
}
public void testDoesNotChangeExplicitlySetCompression() throws Exception {
Settings settings = Settings.builder()
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put(HttpTransportSettings.SETTING_HTTP_COMPRESSION.getKey(), true)
.build();
Settings.Builder pluginSettingsBuilder = Settings.builder();
SecurityNetty4HttpServerTransport.overrideSettings(pluginSettingsBuilder, settings);
assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
}
public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode");

View File

@ -0,0 +1,91 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.transport.nio;
import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.nio.NioSocketChannel;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.xpack.security.audit.AuditTrailService;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import org.junit.Before;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NioIPFilterTests extends ESTestCase {
private NioIPFilter nioIPFilter;
@Before
public void init() throws Exception {
Settings settings = Settings.builder()
.put("xpack.security.transport.filter.allow", "127.0.0.1")
.put("xpack.security.transport.filter.deny", "10.0.0.0/8")
.build();
boolean isHttpEnabled = randomBoolean();
Transport transport = mock(Transport.class);
TransportAddress address = new TransportAddress(InetAddress.getLoopbackAddress(), 9300);
when(transport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { address }, address));
when(transport.lifecycleState()).thenReturn(Lifecycle.State.STARTED);
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(
IPFilter.HTTP_FILTER_ALLOW_SETTING,
IPFilter.HTTP_FILTER_DENY_SETTING,
IPFilter.IP_FILTER_ENABLED_HTTP_SETTING,
IPFilter.IP_FILTER_ENABLED_SETTING,
IPFilter.TRANSPORT_FILTER_ALLOW_SETTING,
IPFilter.TRANSPORT_FILTER_DENY_SETTING,
IPFilter.PROFILE_FILTER_ALLOW_SETTING,
IPFilter.PROFILE_FILTER_DENY_SETTING)));
XPackLicenseState licenseState = mock(XPackLicenseState.class);
when(licenseState.isIpFilteringAllowed()).thenReturn(true);
when(licenseState.isSecurityEnabled()).thenReturn(true);
AuditTrailService auditTrailService = new AuditTrailService(settings, Collections.emptyList(), licenseState);
IPFilter ipFilter = new IPFilter(settings, auditTrailService, clusterSettings, licenseState);
ipFilter.setBoundTransportAddress(transport.boundAddress(), transport.profileBoundAddresses());
if (isHttpEnabled) {
HttpServerTransport httpTransport = mock(HttpServerTransport.class);
TransportAddress httpAddress = new TransportAddress(InetAddress.getLoopbackAddress(), 9200);
when(httpTransport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { httpAddress }, httpAddress));
when(httpTransport.lifecycleState()).thenReturn(Lifecycle.State.STARTED);
ipFilter.setBoundHttpTransportAddress(httpTransport.boundAddress());
}
if (isHttpEnabled) {
nioIPFilter = new NioIPFilter(ipFilter, IPFilter.HTTP_PROFILE_NAME);
} else {
nioIPFilter = new NioIPFilter(ipFilter, "default");
}
}
public void testThatFilteringWorksByIp() throws Exception {
InetSocketAddress localhostAddr = new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 12345);
NioSocketChannel channel1 = mock(NioSocketChannel.class);
when(channel1.getRemoteAddress()).thenReturn(localhostAddr);
assertThat(nioIPFilter.test(channel1), is(true));
InetSocketAddress remoteAddr = new InetSocketAddress(InetAddresses.forString("10.0.0.8"), 12345);
NioSocketChannel channel2 = mock(NioSocketChannel.class);
when(channel2.getRemoteAddress()).thenReturn(remoteAddr);
assertThat(nioIPFilter.test(channel2), is(false));
}
}

View File

@ -0,0 +1,207 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.transport.nio;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.MockSecureSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.PageCacheRecycler;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.http.NullDispatcher;
import org.elasticsearch.http.nio.NioHttpChannel;
import org.elasticsearch.nio.NioSelector;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.ssl.SSLClientAuth;
import org.elasticsearch.xpack.core.ssl.SSLService;
import org.elasticsearch.xpack.security.transport.SSLEngineUtils;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import org.junit.Before;
import javax.net.ssl.SSLEngine;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.nio.channels.SocketChannel;
import java.nio.file.Path;
import java.util.Collections;
import java.util.Locale;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class SecurityNioHttpServerTransportTests extends ESTestCase {
private SSLService sslService;
private Environment env;
private InetSocketAddress address = new InetSocketAddress(InetAddress.getLoopbackAddress(), 0);
@Before
public void createSSLService() {
Path testNodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks");
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode");
Settings settings = Settings.builder()
.put("xpack.ssl.keystore.path", testNodeStore)
.put("path.home", createTempDir())
.setSecureSettings(secureSettings)
.build();
env = TestEnvironment.newEnvironment(settings);
sslService = new SSLService(settings, env);
}
public void testDefaultClientAuth() throws IOException {
Settings settings = Settings.builder()
.put(env.settings())
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
sslService = new SSLService(settings, env);
SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
SocketChannel socketChannel = mock(SocketChannel.class);
when(socketChannel.getRemoteAddress()).thenReturn(address);
NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
assertThat(engine.getNeedClientAuth(), is(false));
assertThat(engine.getWantClientAuth(), is(false));
}
public void testOptionalClientAuth() throws IOException {
String value = randomFrom(SSLClientAuth.OPTIONAL.name(), SSLClientAuth.OPTIONAL.name().toLowerCase(Locale.ROOT));
Settings settings = Settings.builder()
.put(env.settings())
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put("xpack.security.http.ssl.client_authentication", value).build();
sslService = new SSLService(settings, env);
SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
SocketChannel socketChannel = mock(SocketChannel.class);
when(socketChannel.getRemoteAddress()).thenReturn(address);
NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
assertThat(engine.getNeedClientAuth(), is(false));
assertThat(engine.getWantClientAuth(), is(true));
}
public void testRequiredClientAuth() throws IOException {
String value = randomFrom(SSLClientAuth.REQUIRED.name(), SSLClientAuth.REQUIRED.name().toLowerCase(Locale.ROOT));
Settings settings = Settings.builder()
.put(env.settings())
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put("xpack.security.http.ssl.client_authentication", value).build();
sslService = new SSLService(settings, env);
SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
SocketChannel socketChannel = mock(SocketChannel.class);
when(socketChannel.getRemoteAddress()).thenReturn(address);
NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
assertThat(engine.getNeedClientAuth(), is(true));
assertThat(engine.getWantClientAuth(), is(false));
}
public void testNoClientAuth() throws IOException {
String value = randomFrom(SSLClientAuth.NONE.name(), SSLClientAuth.NONE.name().toLowerCase(Locale.ROOT));
Settings settings = Settings.builder()
.put(env.settings())
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put("xpack.security.http.ssl.client_authentication", value).build();
sslService = new SSLService(settings, env);
SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
SocketChannel socketChannel = mock(SocketChannel.class);
when(socketChannel.getRemoteAddress()).thenReturn(address);
NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
assertThat(engine.getNeedClientAuth(), is(false));
assertThat(engine.getWantClientAuth(), is(false));
}
public void testCustomSSLConfiguration() throws IOException {
Settings settings = Settings.builder()
.put(env.settings())
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
sslService = new SSLService(settings, env);
SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
SocketChannel socketChannel = mock(SocketChannel.class);
when(socketChannel.getRemoteAddress()).thenReturn(address);
NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
SSLEngine defaultEngine = SSLEngineUtils.getSSLEngine(channel);
settings = Settings.builder()
.put(env.settings())
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put("xpack.security.http.ssl.supported_protocols", "TLSv1.2")
.build();
sslService = new SSLService(settings, TestEnvironment.newEnvironment(settings));
transport = new SecurityNioHttpServerTransport(settings,
new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
factory = transport.channelFactory();
channel = factory.createChannel(mock(NioSelector.class), socketChannel);
SSLEngine customEngine = SSLEngineUtils.getSSLEngine(channel);
assertThat(customEngine.getEnabledProtocols(), arrayContaining("TLSv1.2"));
assertThat(customEngine.getEnabledProtocols(), not(equalTo(defaultEngine.getEnabledProtocols())));
}
public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode");
Settings settings = Settings.builder()
.put("xpack.ssl.truststore.path",
getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"))
.setSecureSettings(secureSettings)
.put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
.put("path.home", createTempDir())
.build();
env = TestEnvironment.newEnvironment(settings);
sslService = new SSLService(settings, env);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> new SecurityNioHttpServerTransport(settings,
new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService));
assertThat(e.getMessage(), containsString("key must be provided"));
}
public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode");
Settings settings = Settings.builder()
.put("xpack.ssl.truststore.path",
getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"))
.setSecureSettings(secureSettings)
.put("path.home", createTempDir())
.build();
env = TestEnvironment.newEnvironment(settings);
sslService = new SSLService(settings, env);
SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
}
}

View File

@ -31,6 +31,9 @@ import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.security.authc.Authentication;
import org.elasticsearch.xpack.core.security.authc.AuthenticationField;
import org.elasticsearch.xpack.core.security.user.User;
import org.elasticsearch.xpack.core.watcher.actions.Action;
import org.elasticsearch.xpack.core.watcher.actions.ActionStatus;
import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper;
@ -85,6 +88,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.instanceOf;
@ -1072,6 +1076,33 @@ public class ExecutionServiceTests extends ESTestCase {
assertThat(watchRecord.state(), is(ExecutionState.EXECUTED));
}
public void testLoadingWatchExecutionUser() throws Exception {
DateTime now = now(UTC);
Watch watch = mock(Watch.class);
WatchStatus status = mock(WatchStatus.class);
ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now);
// Should be null
TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5));
context.ensureWatchExists(() -> watch);
assertNull(context.getUser());
// Should still be null, header is not yet set
when(watch.status()).thenReturn(status);
context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5));
context.ensureWatchExists(() -> watch);
assertNull(context.getUser());
Authentication authentication = new Authentication(new User("joe", "admin"),
new Authentication.RealmRef("native_realm", "native", "node1"), null);
// Should no longer be null now that the proper header is set
when(status.getHeaders()).thenReturn(Collections.singletonMap(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()));
context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5));
context.ensureWatchExists(() -> watch);
assertThat(context.getUser(), equalTo("joe"));
}
private WatchExecutionContext createMockWatchExecutionContext(String watchId, DateTime executionTime) {
WatchExecutionContext ctx = mock(WatchExecutionContext.class);
when(ctx.id()).thenReturn(new Wid(watchId, executionTime));

View File

@ -438,6 +438,7 @@ public class MlJobIT extends ESRestTestCase {
client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32034")
public void testDeleteJobAfterMissingAliases() throws Exception {
String jobId = "delete-job-after-missing-alias-job";
String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId);

View File

@ -0,0 +1,326 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.multi_node;
import org.apache.http.HttpStatus;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.core.rollup.job.RollupJob;
import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath;
import org.junit.After;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.isOneOf;
public class RollupIT extends ESRestTestCase {
@Override
protected Settings restClientSettings() {
return getClientSettings("super-user", "x-pack-super-password");
}
@Override
protected Settings restAdminSettings() {
return getClientSettings("super-user", "x-pack-super-password");
}
private Settings getClientSettings(final String username, final String password) {
final String token = basicAuthHeaderValue(username, new SecureString(password.toCharArray()));
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
static Map<String, Object> toMap(Response response) throws IOException {
return toMap(EntityUtils.toString(response.getEntity()));
}
static Map<String, Object> toMap(String response) throws IOException {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false);
}
@After
public void clearRollupMetadata() throws Exception {
deleteAllJobs();
waitForPendingTasks();
// indices will be deleted by the ESRestTestCase class
}
public void testBigRollup() throws Exception {
final int numDocs = 200;
// index documents for the rollup job
final StringBuilder bulk = new StringBuilder();
for (int i = 0; i < numDocs; i++) {
bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"_doc\"}}\n");
ZonedDateTime zdt = ZonedDateTime.ofInstant(Instant.ofEpochSecond(1531221196 + (60*i)), ZoneId.of("UTC"));
String date = zdt.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n");
}
bulk.append("\r\n");
final Request bulkRequest = new Request("POST", "/_bulk");
bulkRequest.addParameter("refresh", "true");
bulkRequest.setJsonEntity(bulk.toString());
client().performRequest(bulkRequest);
// create the rollup job
final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test");
createRollupJobRequest.setJsonEntity("{"
+ "\"index_pattern\":\"rollup-*\","
+ "\"rollup_index\":\"results-rollup\","
+ "\"cron\":\"*/1 * * * * ?\"," // fast cron and big page size so test runs quickly
+ "\"page_size\":20,"
+ "\"groups\":{"
+ " \"date_histogram\":{"
+ " \"field\":\"timestamp\","
+ " \"interval\":\"5m\""
+ " }"
+ "},"
+ "\"metrics\":["
+ " {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}"
+ "]"
+ "}");
Map<String, Object> createRollupJobResponse = toMap(client().performRequest(createRollupJobRequest));
assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE));
// start the rollup job
final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-job-test/_start");
Map<String, Object> startRollupJobResponse = toMap(client().performRequest(startRollupJobRequest));
assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE));
assertRollUpJob("rollup-job-test");
// Wait for the job to finish, by watching how many rollup docs we've indexed
assertBusy(() -> {
final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/rollup-job-test");
Response getRollupJobResponse = client().performRequest(getRollupJobRequest);
assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
Map<String, Object> job = getJob(getRollupJobResponse, "rollup-job-test");
if (job != null) {
assertThat(ObjectPath.eval("status.job_state", job), equalTo("started"));
assertThat(ObjectPath.eval("stats.rollups_indexed", job), equalTo(41));
}
}, 30L, TimeUnit.SECONDS);
// Refresh the rollup index to make sure all newly indexed docs are searchable
final Request refreshRollupIndex = new Request("POST", "results-rollup/_refresh");
toMap(client().performRequest(refreshRollupIndex));
String jsonRequestBody = "{\n" +
" \"size\": 0,\n" +
" \"query\": {\n" +
" \"match_all\": {}\n" +
" },\n" +
" \"aggs\": {\n" +
" \"date_histo\": {\n" +
" \"date_histogram\": {\n" +
" \"field\": \"timestamp\",\n" +
" \"interval\": \"1h\"\n" +
" },\n" +
" \"aggs\": {\n" +
" \"the_max\": {\n" +
" \"max\": {\n" +
" \"field\": \"value\"\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
"}";
Request request = new Request("GET", "rollup-docs/_search");
request.setJsonEntity(jsonRequestBody);
Response liveResponse = client().performRequest(request);
Map<String, Object> liveBody = toMap(liveResponse);
request = new Request("GET", "results-rollup/_rollup_search");
request.setJsonEntity(jsonRequestBody);
Response rollupResponse = client().performRequest(request);
Map<String, Object> rollupBody = toMap(rollupResponse);
// Do the live agg results match the rollup agg results?
assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
equalTo(ObjectPath.eval("aggregations.date_histo.buckets", rollupBody)));
request = new Request("GET", "rollup-docs/_rollup_search");
request.setJsonEntity(jsonRequestBody);
Response liveRollupResponse = client().performRequest(request);
Map<String, Object> liveRollupBody = toMap(liveRollupResponse);
// Does searching the live index via rollup_search work match the live search?
assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
equalTo(ObjectPath.eval("aggregations.date_histo.buckets", liveRollupBody)));
}
@SuppressWarnings("unchecked")
private void assertRollUpJob(final String rollupJob) throws Exception {
String[] states = new String[]{"indexing", "started"};
waitForRollUpJob(rollupJob, states);
// check that the rollup job is started using the RollUp API
final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob);
Map<String, Object> getRollupJobResponse = toMap(client().performRequest(getRollupJobRequest));
Map<String, Object> job = getJob(getRollupJobResponse, rollupJob);
if (job != null) {
assertThat(ObjectPath.eval("status.job_state", job), isOneOf(states));
}
// check that the rollup job is started using the Tasks API
final Request taskRequest = new Request("GET", "_tasks");
taskRequest.addParameter("detailed", "true");
taskRequest.addParameter("actions", "xpack/rollup/*");
Map<String, Object> taskResponse = toMap(client().performRequest(taskRequest));
Map<String, Object> taskResponseNodes = (Map<String, Object>) taskResponse.get("nodes");
Map<String, Object> taskResponseNode = (Map<String, Object>) taskResponseNodes.values().iterator().next();
Map<String, Object> taskResponseTasks = (Map<String, Object>) taskResponseNode.get("tasks");
Map<String, Object> taskResponseStatus = (Map<String, Object>) taskResponseTasks.values().iterator().next();
assertThat(ObjectPath.eval("status.job_state", taskResponseStatus), isOneOf(states));
// check that the rollup job is started using the Cluster State API
final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata");
Map<String, Object> clusterStateResponse = toMap(client().performRequest(clusterStateRequest));
List<Map<String, Object>> rollupJobTasks = ObjectPath.eval("metadata.persistent_tasks.tasks", clusterStateResponse);
boolean hasRollupTask = false;
for (Map<String, Object> task : rollupJobTasks) {
if (ObjectPath.eval("id", task).equals(rollupJob)) {
hasRollupTask = true;
final String jobStateField = "task.xpack/rollup/job.state.job_state";
assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + task.get("id"),
ObjectPath.eval(jobStateField, task), isOneOf(states));
break;
}
}
if (hasRollupTask == false) {
fail("Expected persistent task for [" + rollupJob + "] but none found.");
}
}
private void waitForRollUpJob(final String rollupJob,String[] expectedStates) throws Exception {
assertBusy(() -> {
final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob);
Response getRollupJobResponse = client().performRequest(getRollupJobRequest);
assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
Map<String, Object> job = getJob(getRollupJobResponse, rollupJob);
if (job != null) {
assertThat(ObjectPath.eval("status.job_state", job), isOneOf(expectedStates));
}
}, 30L, TimeUnit.SECONDS);
}
private Map<String, Object> getJob(Response response, String targetJobId) throws IOException {
return getJob(ESRestTestCase.entityAsMap(response), targetJobId);
}
@SuppressWarnings("unchecked")
private Map<String, Object> getJob(Map<String, Object> jobsMap, String targetJobId) throws IOException {
List<Map<String, Object>> jobs =
(List<Map<String, Object>>) XContentMapValues.extractValue("jobs", jobsMap);
if (jobs == null) {
return null;
}
for (Map<String, Object> job : jobs) {
String jobId = (String) ((Map<String, Object>) job.get("config")).get("id");
if (jobId.equals(targetJobId)) {
return job;
}
}
return null;
}
private void waitForPendingTasks() throws Exception {
ESTestCase.assertBusy(() -> {
try {
Request request = new Request("GET", "/_cat/tasks");
request.addParameter("detailed", "true");
Response response = adminClient().performRequest(request);
if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
try (BufferedReader responseReader = new BufferedReader(
new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
int activeTasks = 0;
String line;
StringBuilder tasksListString = new StringBuilder();
while ((line = responseReader.readLine()) != null) {
// We only care about Rollup jobs, otherwise this fails too easily due to unrelated tasks
if (line.startsWith(RollupJob.NAME) == true) {
activeTasks++;
tasksListString.append(line);
tasksListString.append('\n');
}
}
assertEquals(activeTasks + " active tasks found:\n" + tasksListString, 0, activeTasks);
}
}
} catch (IOException e) {
throw new AssertionError("Error getting active tasks list", e);
}
});
}
@SuppressWarnings("unchecked")
private void deleteAllJobs() throws Exception {
Request request = new Request("GET", "/_xpack/rollup/job/_all");
Response response = adminClient().performRequest(request);
Map<String, Object> jobs = ESRestTestCase.entityAsMap(response);
@SuppressWarnings("unchecked")
List<Map<String, Object>> jobConfigs =
(List<Map<String, Object>>) XContentMapValues.extractValue("jobs", jobs);
if (jobConfigs == null) {
return;
}
for (Map<String, Object> jobConfig : jobConfigs) {
logger.debug(jobConfig);
String jobId = (String) ((Map<String, Object>) jobConfig.get("config")).get("id");
logger.debug("Deleting job " + jobId);
try {
request = new Request("DELETE", "/_xpack/rollup/job/" + jobId);
adminClient().performRequest(request);
} catch (Exception e) {
// ok
}
}
}
private static String responseEntityToString(Response response) throws Exception {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
return reader.lines().collect(Collectors.joining("\n"));
}
}
}

View File

@ -104,7 +104,13 @@ public class OpenLdapTests extends ESTestCase {
builder.put("xpack.security.authc.realms." + REALM_NAME + ".ssl.truststore.path", truststore);
mockSecureSettings.setString("xpack.security.authc.realms." + REALM_NAME + ".ssl.truststore.secure_password", "changeit");
builder.put("xpack.security.authc.realms." + REALM_NAME + ".ssl.verification_mode", VerificationMode.CERTIFICATE);
// If not using global ssl, need to set the truststore for the "full verification" realm
builder.put("xpack.security.authc.realms.vmode_full.ssl.truststore.path", truststore);
mockSecureSettings.setString("xpack.security.authc.realms.vmode_full.ssl.truststore.secure_password", "changeit");
}
builder.put("xpack.security.authc.realms.vmode_full.ssl.verification_mode", VerificationMode.FULL);
globalSettings = builder.setSecureSettings(mockSecureSettings).build();
Environment environment = TestEnvironment.newEnvironment(globalSettings);
sslService = new SSLService(globalSettings, environment);
@ -188,10 +194,10 @@ public class OpenLdapTests extends ESTestCase {
Settings settings = Settings.builder()
// The certificate used in the vagrant box is valid for "localhost", but not for "127.0.0.1"
.put(buildLdapSettings(OPEN_LDAP_IP_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL))
.put("ssl.verification_mode", VerificationMode.FULL)
.build();
RealmConfig config = new RealmConfig("oldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
// Pick up the "full" verification mode config
RealmConfig config = new RealmConfig("vmode_full", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
new ThreadContext(Settings.EMPTY));
LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool);
@ -211,10 +217,10 @@ public class OpenLdapTests extends ESTestCase {
Settings settings = Settings.builder()
// The certificate used in the vagrant box is valid for "localhost" (but not for "127.0.0.1")
.put(buildLdapSettings(OPEN_LDAP_DNS_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL))
.put("ssl.verification_mode", VerificationMode.FULL)
.build();
RealmConfig config = new RealmConfig("oldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
// Pick up the "full" verification mode config
RealmConfig config = new RealmConfig("vmode_full", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
new ThreadContext(Settings.EMPTY));
LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool);

View File

@ -21,6 +21,7 @@ watcher_manager:
run_as:
- powerless_user
- watcher_manager
- x_pack_rest_user
watcher_monitor:
cluster:

View File

@ -74,10 +74,63 @@ teardown:
id: "my_watch"
- match: { watch_record.watch_id: "my_watch" }
- match: { watch_record.state: "executed" }
- match: { watch_record.user: "watcher_manager" }
---
"Test watch is runas user properly recorded":
- do:
xpack.watcher.put_watch:
id: "my_watch"
body: >
{
"trigger": {
"schedule" : { "cron" : "0 0 0 1 * ? 2099" }
},
"input": {
"search" : {
"request" : {
"indices" : [ "my_test_index" ],
"body" :{
"query" : { "match_all": {} }
}
}
}
},
"condition" : {
"compare" : {
"ctx.payload.hits.total" : {
"gte" : 1
}
}
},
"actions": {
"logging": {
"logging": {
"text": "Successfully ran my_watch to test for search input"
}
}
}
}
- match: { _id: "my_watch" }
- do:
xpack.watcher.get_watch:
id: "my_watch"
- match: { _id: "my_watch" }
- is_false: watch.status.headers
- do:
headers: { es-security-runas-user: x_pack_rest_user }
xpack.watcher.execute_watch:
id: "my_watch"
- match: { watch_record.watch_id: "my_watch" }
- match: { watch_record.state: "executed" }
- match: { watch_record.user: "x_pack_rest_user" }
---
"Test watch search input does not work against index user is not allowed to read":
@ -130,6 +183,7 @@ teardown:
- match: { watch_record.watch_id: "my_watch" }
# because we are not allowed to read the index, there wont be any data
- match: { watch_record.state: "execution_not_needed" }
- match: { watch_record.user: "watcher_manager" }
---
@ -272,6 +326,7 @@ teardown:
id: "my_watch"
- match: { watch_record.watch_id: "my_watch" }
- match: { watch_record.state: "executed" }
- match: { watch_record.user: "watcher_manager" }
- do:
get:
@ -320,6 +375,7 @@ teardown:
id: "my_watch"
- match: { watch_record.watch_id: "my_watch" }
- match: { watch_record.state: "executed" }
- match: { watch_record.user: "watcher_manager" }
- do:
get: