Merge branch 'master' into index-lifecycle

This commit is contained in:
Tal Levy 2018-08-01 10:15:18 -07:00
commit ea1e26ca27
171 changed files with 5720 additions and 806 deletions

View File

@ -100,6 +100,12 @@ JDK 10 and testing on a JDK 8 runtime; to do this, set `RUNTIME_JAVA_HOME`
pointing to the Java home of a JDK 8 installation. Note that this mechanism can
be used to test against other JDKs as well, this is not only limited to JDK 8.
> Note: It is also required to have `JAVA7_HOME`, `JAVA8_HOME` and
`JAVA10_HOME` available so that the tests can pass.
> Warning: do not use `sdkman` for Java installations which do not have proper
`jrunscript` for jdk distributions.
Elasticsearch uses the Gradle wrapper for its build. You can execute Gradle
using the wrapper via the `gradlew` script in the root of the repository.

View File

@ -11,22 +11,25 @@ The microbenchmark suite is also handy for ad-hoc microbenchmarks but please rem
## Getting Started
Just run `gradle :benchmarks:jmh` from the project root directory. It will build all microbenchmarks, execute them and print the result.
Just run `gradlew -p benchmarks run` from the project root
directory. It will build all microbenchmarks, execute them and print
the result.
## Running Microbenchmarks
Benchmarks are always run via Gradle with `gradle :benchmarks:jmh`.
Running via an IDE is not supported as the results are meaningless
because we have no control over the JVM running the benchmarks.
Running via an IDE is not supported as the results are meaningless (we have no control over the JVM running the benchmarks).
If you want to run a specific benchmark class, e.g. `org.elasticsearch.benchmark.MySampleBenchmark` or have special requirements
generate the uberjar with `gradle :benchmarks:jmhJar` and run it directly with:
If you want to run a specific benchmark class like, say,
`MemoryStatsBenchmark`, you can use `--args`:
```
java -jar benchmarks/build/distributions/elasticsearch-benchmarks-*.jar
gradlew -p benchmarks run --args ' MemoryStatsBenchmark'
```
JMH supports lots of command line parameters. Add `-h` to the command above to see the available command line options.
Everything in the `'` gets sent on the command line to JMH. The leading ` `
inside the `'`s is important. Without it parameters are sometimes sent to
gradle.
## Adding Microbenchmarks

View File

@ -18,11 +18,8 @@
*/
apply plugin: 'elasticsearch.build'
// order of this section matters, see: https://github.com/johnrengelman/shadow/issues/336
apply plugin: 'application' // have the shadow plugin provide the runShadow task
apply plugin: 'application'
mainClassName = 'org.openjdk.jmh.Main'
apply plugin: 'com.github.johnrengelman.shadow' // build an uberjar with all benchmarks
// Not published so no need to assemble
tasks.remove(assemble)
@ -50,10 +47,8 @@ compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-u
// needs to be added separately otherwise Gradle will quote it and javac will fail
compileJava.options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"])
forbiddenApis {
// classes generated by JMH can use all sorts of forbidden APIs but we have no influence at all and cannot exclude these classes
ignoreFailures = true
}
// classes generated by JMH can use all sorts of forbidden APIs but we have no influence at all and cannot exclude these classes
forbiddenApisMain.enabled = false
// No licenses for our benchmark deps (we don't ship benchmarks)
dependencyLicenses.enabled = false
@ -69,20 +64,3 @@ thirdPartyAudit.excludes = [
'org.openjdk.jmh.profile.HotspotRuntimeProfiler',
'org.openjdk.jmh.util.Utils'
]
runShadow {
executable = new File(project.runtimeJavaHome, 'bin/java')
}
// alias the shadowJar and runShadow tasks to abstract from the concrete plugin that we are using and provide a more consistent interface
task jmhJar(
dependsOn: shadowJar,
description: 'Generates an uberjar with the microbenchmarks and all dependencies',
group: 'Benchmark'
)
task jmh(
dependsOn: runShadow,
description: 'Runs all microbenchmarks',
group: 'Benchmark'
)

View File

@ -777,11 +777,16 @@ class BuildPlugin implements Plugin<Project> {
systemProperty property.getKey(), property.getValue()
}
}
// TODO: remove this once joda time is removed from scriptin in 7.0
systemProperty 'es.scripting.use_java_time', 'true'
// Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM
if (project.inFipsJvm) {
systemProperty 'javax.net.ssl.trustStorePassword', 'password'
systemProperty 'javax.net.ssl.keyStorePassword', 'password'
}
boolean assertionsEnabled = Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))
enableSystemAssertions assertionsEnabled
enableAssertions assertionsEnabled

View File

@ -2,10 +2,18 @@
1. Build `client-benchmark-noop-api-plugin` with `gradle :client:client-benchmark-noop-api-plugin:assemble`
2. Install it on the target host with `bin/elasticsearch-plugin install file:///full/path/to/client-benchmark-noop-api-plugin.zip`
3. Start Elasticsearch on the target host (ideally *not* on the same machine)
4. Build an uberjar with `gradle :client:benchmark:shadowJar` and execute it.
3. Start Elasticsearch on the target host (ideally *not* on the machine
that runs the benchmarks)
4. Run the benchmark with
```
./gradlew -p client/benchmark run --args ' params go here'
```
Repeat all steps above for the other benchmark candidate.
Everything in the `'` gets sent on the command line to JMH. The leading ` `
inside the `'`s is important. Without it parameters are sometimes sent to
gradle.
See below for some example invocations.
### Example benchmark
@ -13,15 +21,18 @@ In general, you should define a few GC-related settings `-Xms8192M -Xmx8192M -XX
#### Bulk indexing
Download benchmark data from http://benchmarks.elastic.co/corpora/geonames/documents.json.bz2 and decompress them.
Download benchmark data from http://benchmarks.elasticsearch.org.s3.amazonaws.com/corpora/geonames and decompress them.
Example command line parameters:
Example invocation:
```
rest bulk 192.168.2.2 ./documents.json geonames type 8647880 5000
wget http://benchmarks.elasticsearch.org.s3.amazonaws.com/corpora/geonames/documents-2.json.bz2
bzip2 -d documents-2.json.bz2
mv documents-2.json client/benchmark/build
gradlew -p client/benchmark run --args ' rest bulk localhost build/documents-2.json geonames type 8647880 5000'
```
The parameters are in order:
The parameters are all in the `'`s and are in order:
* Client type: Use either "rest" or "transport"
* Benchmark type: Use either "bulk" or "search"
@ -33,12 +44,12 @@ The parameters are in order:
* bulk size
#### Bulk indexing
#### Search
Example command line parameters:
Example invocation:
```
rest search 192.168.2.2 geonames "{ \"query\": { \"match_phrase\": { \"name\": \"Sankt Georgen\" } } }\"" 500,1000,1100,1200
gradlew -p client/benchmark run --args ' rest search localhost geonames {"query":{"match_phrase":{"name":"Sankt Georgen"}}} 500,1000,1100,1200'
```
The parameters are in order:
@ -49,5 +60,3 @@ The parameters are in order:
* name of the index
* a search request body (remember to escape double quotes). The `TransportClientBenchmark` uses `QueryBuilders.wrapperQuery()` internally which automatically adds a root key `query`, so it must not be present in the command line parameter.
* A comma-separated list of target throughput rates

View File

@ -18,9 +18,6 @@
*/
apply plugin: 'elasticsearch.build'
// build an uberjar with all benchmarks
apply plugin: 'com.github.johnrengelman.shadow'
// have the shadow plugin provide the runShadow task
apply plugin: 'application'
group = 'org.elasticsearch.client'
@ -32,7 +29,6 @@ build.dependsOn.remove('assemble')
archivesBaseName = 'client-benchmarks'
mainClassName = 'org.elasticsearch.client.benchmark.BenchmarkMain'
// never try to invoke tests on the benchmark project - there aren't any
test.enabled = false

View File

@ -37,6 +37,9 @@ integTestCluster {
extraConfigFile 'hunspell/en_US/en_US.dic', '../server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.dic'
// Whitelist reindexing from the local node so we can test it.
setting 'reindex.remote.whitelist', '127.0.0.1:*'
// TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults
systemProperty 'es.scripting.use_java_time', 'false'
}
// remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed

View File

@ -198,7 +198,7 @@ POST hockey/player/1/_update
==== Dates
Date fields are exposed as
`ReadableDateTime`
`ReadableDateTime` or
so they support methods like
`getYear`,
and `getDayOfWeek`.
@ -220,6 +220,11 @@ GET hockey/_search
}
----------------------------------------------------------------
// CONSOLE
// TEST[warning:The joda time api for doc values is deprecated. Use -Des.scripting.use_java_time=true to use the java time api for date field doc values]
NOTE: Date fields are changing in 7.0 to be exposed as `ZonedDateTime`
from Java 8's time API. To switch to this functionality early,
add `-Des.scripting.use_java_time=true` to `jvm.options`.
[float]
[[modules-scripting-painless-regex]]

View File

@ -37,7 +37,6 @@ bin/elasticsearch-keystore add discovery.ec2.secret_key
The following are the available discovery settings. All should be prefixed with `discovery.ec2.`.
Those that must be stored in the keystore are marked as `Secure`.
`access_key`::
An ec2 access key. The `secret_key` setting must also be specified. (Secure)
@ -122,6 +121,10 @@ Defaults to `private_ip`.
How long the list of hosts is cached to prevent further requests to the AWS API.
Defaults to `10s`.
*All* secure settings of this plugin are {ref}/secure-settings.html#reloadable-secure-settings[reloadable].
After you reload the settings, an aws sdk client with the latest settings
from the keystore will be used.
[IMPORTANT]
.Binding the network host
==============================================

View File

@ -11,7 +11,7 @@ include::install_remove.asciidoc[]
==== Azure Repository
To enable Azure repositories, you have first to define your azure storage settings as
{ref}/secure-settings.html[secured settings]:
{ref}/secure-settings.html[secure settings], before starting up the node:
[source,sh]
----------------------------------------------------------------
@ -20,6 +20,7 @@ bin/elasticsearch-keystore add azure.client.default.key
----------------------------------------------------------------
Where `account` is the azure account name and `key` the azure secret key.
These settings are used by the repository's internal azure client.
Note that you can also define more than one account:
@ -31,7 +32,18 @@ bin/elasticsearch-keystore add azure.client.secondary.account
bin/elasticsearch-keystore add azure.client.secondary.key
----------------------------------------------------------------
`default` is the default account name which will be used by a repository unless you set an explicit one.
`default` is the default account name which will be used by a repository,
unless you set an explicit one in the
<<repository-azure-repository-settings, repository settings>>.
Both `account` and `key` storage settings are
{ref}/secure-settings.html#reloadable-secure-settings[reloadable]. After you
reload the settings, the internal azure clients, which are used to transfer the
snapshot, will utilize the latest settings from the keystore.
NOTE: In progress snapshot/restore jobs will not be preempted by a *reload*
of the storage secure settings. They will complete using the client as it was built
when the operation started.
You can set the client side timeout to use when making any single request. It can be defined globally, per account or both.
It's not set by default which means that Elasticsearch is using the

View File

@ -112,6 +112,15 @@ PUT _snapshot/my_gcs_repository
// CONSOLE
// TEST[skip:we don't have gcs setup while testing this]
The `credentials_file` settings are {ref}/secure-settings.html#reloadable-secure-settings[reloadable].
After you reload the settings, the internal `gcs` clients, used to transfer the
snapshot contents, will utilize the latest settings from the keystore.
NOTE: In progress snapshot/restore jobs will not be preempted by a *reload*
of the client's `credentials_file` settings. They will complete using the client
as it was built when the operation started.
[[repository-gcs-client]]
==== Client Settings

View File

@ -35,9 +35,9 @@ PUT _snapshot/my_s3_repository
==== Client Settings
The client used to connect to S3 has a number of settings available. Client setting names are of
the form `s3.client.CLIENT_NAME.SETTING_NAME` and specified inside `elasticsearch.yml`. The
default client name looked up by a `s3` repository is called `default`, but can be customized
with the repository setting `client`. For example:
the form `s3.client.CLIENT_NAME.SETTING_NAME`. The default client name, which is looked up by
an `s3` repository, is called `default`. It can be modified using the
<<repository-s3-repository, repository setting>> `client`. For example:
[source,js]
----
@ -53,8 +53,11 @@ PUT _snapshot/my_s3_repository
// CONSOLE
// TEST[skip:we don't have s3 setup while testing this]
Some settings are sensitive and must be stored in the {ref}/secure-settings.html[elasticsearch keystore].
For example, to use explicit AWS access keys:
Most client settings are specified inside `elasticsearch.yml`, but some are
sensitive and must be stored in the {ref}/secure-settings.html[elasticsearch keystore].
For example, before you start the node, run these commands to add AWS access
key settings to the keystore:
[source,sh]
----
@ -62,8 +65,19 @@ bin/elasticsearch-keystore add s3.client.default.access_key
bin/elasticsearch-keystore add s3.client.default.secret_key
----
The following are the available client settings. Those that must be stored in the keystore
are marked as `Secure`.
*All* client secure settings of this plugin are
{ref}/secure-settings.html#reloadable-secure-settings[reloadable]. After you
reload the settings, the internal `s3` clients, used to transfer the snapshot
contents, will utilize the latest settings from the keystore. Any existing `s3`
repositories, as well as any newly created ones, will pick up the new values
stored in the keystore.
NOTE: In progress snapshot/restore tasks will not be preempted by a *reload*
of the client's secure settings. The task will complete using the client as it
was built when the operation started.
The following is the list of all the available client settings.
Those that must be stored in the keystore are marked as `Secure` and are *reloadable*.
`access_key`::

View File

@ -425,6 +425,7 @@ POST /sales/_search?size=0
--------------------------------------------------
// CONSOLE
// TEST[setup:sales]
// TEST[warning:The joda time api for doc values is deprecated. Use -Des.scripting.use_java_time=true to use the java time api for date field doc values]
Response:

View File

@ -75,3 +75,34 @@ To remove a setting from the keystore, use the `remove` command:
bin/elasticsearch-keystore remove the.setting.name.to.remove
----------------------------------------------------------------
[float]
[[reloadable-secure-settings]]
=== Reloadable secure settings
Just like the settings values in `elasticsearch.yml`, changes to the
keystore contents are not automatically applied to the running
elasticsearch node. Re-reading settings requires a node restart.
However, certain secure settings are marked as *reloadable*. Such settings
can be re-read and applied on a running node.
The values of all secure settings, *reloadable* or not, must be identical
across all cluster nodes. After making the desired secure settings changes,
using the `bin/elasticsearch-keystore add` command, call:
[source,js]
----
POST _nodes/reload_secure_settings
{
"secure_settings_password": ""
}
----
// CONSOLE
This API will decrypt and re-read the entire keystore, on every cluster node,
but only the *reloadable* secure settings will be applied. Changes to other
settings will not go into effect until the next restart. Once the call returns,
the reload has been completed, meaning that all internal datastructures dependent
on these settings have been changed. Everything should look as if the settings
had the new value from the start.
When changing multiple *reloadable* secure settings, modify all of them, on
each cluster node, and then issue a `reload_secure_settings` call, instead
of reloading after each modification.

View File

@ -17,8 +17,6 @@
* under the License.
*/
esplugin {
description 'An easy, safe and fast scripting language for Elasticsearch'
classname 'org.elasticsearch.painless.PainlessPlugin'
@ -26,6 +24,7 @@ esplugin {
integTestCluster {
module project.project(':modules:mapper-extras')
systemProperty 'es.scripting.use_java_time', 'true'
}
dependencies {

View File

@ -238,7 +238,7 @@ public final class Def {
int numArguments = callSiteType.parameterCount();
// simple case: no lambdas
if (recipeString.isEmpty()) {
return lookupMethodInternal(painlessLookup, receiverClass, name, numArguments - 1).handle;
return lookupMethodInternal(painlessLookup, receiverClass, name, numArguments - 1).methodHandle;
}
// convert recipe string to a bitset for convenience (the code below should be refactored...)
@ -262,7 +262,7 @@ public final class Def {
// lookup the method with the proper arity, then we know everything (e.g. interface types of parameters).
// based on these we can finally link any remaining lambdas that were deferred.
PainlessMethod method = lookupMethodInternal(painlessLookup, receiverClass, name, arity);
MethodHandle handle = method.handle;
MethodHandle handle = method.methodHandle;
int replaced = 0;
upTo = 1;
@ -281,7 +281,7 @@ public final class Def {
captures[capture] = callSiteType.parameterType(i + 1 + capture);
}
MethodHandle filter;
Class<?> interfaceType = method.arguments.get(i - 1 - replaced);
Class<?> interfaceType = method.typeParameters.get(i - 1 - replaced);
if (signature.charAt(0) == 'S') {
// the implementation is strongly typed, now that we know the interface type,
// we have everything.
@ -331,10 +331,11 @@ public final class Def {
if (interfaceMethod == null) {
throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface");
}
int arity = interfaceMethod.arguments.size();
int arity = interfaceMethod.typeParameters.size();
PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity);
return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType,
PainlessLookupUtility.typeToCanonicalTypeName(implMethod.target), implMethod.name, receiverClass);
PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass),
implMethod.javaMethod.getName(), receiverClass);
}
/** Returns a method handle to an implementation of clazz, given method reference signature. */
@ -349,7 +350,7 @@ public final class Def {
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
"to [" + PainlessLookupUtility.typeToCanonicalTypeName(clazz) + "], not a functional interface");
}
int arity = interfaceMethod.arguments.size() + captures.length;
int arity = interfaceMethod.typeParameters.size() + captures.length;
final MethodHandle handle;
try {
MethodHandle accessor = methodHandlesLookup.findStaticGetter(methodHandlesLookup.lookupClass(),
@ -360,7 +361,7 @@ public final class Def {
// is it a synthetic method? If we generated the method ourselves, be more helpful. It can only fail
// because the arity does not match the expected interface type.
if (call.contains("$")) {
throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name +
throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.javaMethod.getName() +
"] in [" + clazz + "]");
}
throw new IllegalArgumentException("Unknown call [" + call + "] with [" + arity + "] arguments.");

View File

@ -19,6 +19,7 @@
package org.elasticsearch.painless;
import org.elasticsearch.painless.Locals.LocalMethod;
import org.elasticsearch.painless.lookup.PainlessClass;
import org.elasticsearch.painless.lookup.PainlessConstructor;
import org.elasticsearch.painless.lookup.PainlessLookup;
@ -108,24 +109,24 @@ public class FunctionRef {
Constructor<?> javaConstructor = delegateConstructor.javaConstructor;
MethodType delegateMethodType = delegateConstructor.methodType;
interfaceMethodName = interfaceMethod.name;
factoryMethodType = MethodType.methodType(expected,
this.interfaceMethodName = interfaceMethod.javaMethod.getName();
this.factoryMethodType = MethodType.methodType(expected,
delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount()));
interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
delegateClassName = javaConstructor.getDeclaringClass().getName();
isDelegateInterface = false;
delegateInvokeType = H_NEWINVOKESPECIAL;
delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME;
this.delegateClassName = javaConstructor.getDeclaringClass().getName();
this.isDelegateInterface = false;
this.delegateInvokeType = H_NEWINVOKESPECIAL;
this.delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME;
this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures);
this.interfaceMethod = interfaceMethod;
delegateTypeParameters = delegateConstructor.typeParameters;
delegateReturnType = void.class;
this.delegateTypeParameters = delegateConstructor.typeParameters;
this.delegateReturnType = void.class;
factoryDescriptor = factoryMethodType.toMethodDescriptorString();
interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString());
delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString());
this.factoryDescriptor = factoryMethodType.toMethodDescriptorString();
this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString());
this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString());
}
/**
@ -138,41 +139,63 @@ public class FunctionRef {
public FunctionRef(Class<?> expected, PainlessMethod interfaceMethod, PainlessMethod delegateMethod, int numCaptures) {
MethodType delegateMethodType = delegateMethod.methodType;
interfaceMethodName = interfaceMethod.name;
factoryMethodType = MethodType.methodType(expected,
this.interfaceMethodName = interfaceMethod.javaMethod.getName();
this.factoryMethodType = MethodType.methodType(expected,
delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount()));
interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
// the Painless$Script class can be inferred if owner is null
if (delegateMethod.target == null) {
delegateClassName = CLASS_NAME;
isDelegateInterface = false;
} else if (delegateMethod.augmentation != null) {
delegateClassName = delegateMethod.augmentation.getName();
isDelegateInterface = delegateMethod.augmentation.isInterface();
this.delegateClassName = delegateMethod.javaMethod.getDeclaringClass().getName();
this.isDelegateInterface = delegateMethod.javaMethod.getDeclaringClass().isInterface();
if (Modifier.isStatic(delegateMethod.javaMethod.getModifiers())) {
this.delegateInvokeType = H_INVOKESTATIC;
} else if (delegateMethod.javaMethod.getDeclaringClass().isInterface()) {
this.delegateInvokeType = H_INVOKEINTERFACE;
} else {
delegateClassName = delegateMethod.target.getName();
isDelegateInterface = delegateMethod.target.isInterface();
this.delegateInvokeType = H_INVOKEVIRTUAL;
}
if (Modifier.isStatic(delegateMethod.modifiers)) {
delegateInvokeType = H_INVOKESTATIC;
} else if (delegateMethod.target.isInterface()) {
delegateInvokeType = H_INVOKEINTERFACE;
} else {
delegateInvokeType = H_INVOKEVIRTUAL;
}
delegateMethodName = delegateMethod.name;
this.delegateMethodName = delegateMethod.javaMethod.getName();
this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures);
this.interfaceMethod = interfaceMethod;
delegateTypeParameters = delegateMethod.arguments;
delegateReturnType = delegateMethod.rtn;
this.delegateTypeParameters = delegateMethod.typeParameters;
this.delegateReturnType = delegateMethod.returnType;
factoryDescriptor = factoryMethodType.toMethodDescriptorString();
interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString());
delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString());
this.factoryDescriptor = factoryMethodType.toMethodDescriptorString();
this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString());
this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString());
}
/**
* Creates a new FunctionRef (already resolved)
* @param expected functional interface type to implement
* @param interfaceMethod functional interface method
* @param delegateMethod implementation method
* @param numCaptures number of captured arguments
*/
public FunctionRef(Class<?> expected, PainlessMethod interfaceMethod, LocalMethod delegateMethod, int numCaptures) {
MethodType delegateMethodType = delegateMethod.methodType;
this.interfaceMethodName = interfaceMethod.javaMethod.getName();
this.factoryMethodType = MethodType.methodType(expected,
delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount()));
this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
this.delegateClassName = CLASS_NAME;
this.isDelegateInterface = false;
this.delegateInvokeType = H_INVOKESTATIC;
this.delegateMethodName = delegateMethod.name;
this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures);
this.interfaceMethod = interfaceMethod;
this.delegateTypeParameters = delegateMethod.typeParameters;
this.delegateReturnType = delegateMethod.returnType;
this.factoryDescriptor = factoryMethodType.toMethodDescriptorString();
this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString());
this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString());
}
/**
@ -181,24 +204,24 @@ public class FunctionRef {
*/
public FunctionRef(Class<?> expected,
PainlessMethod interfaceMethod, String delegateMethodName, MethodType delegateMethodType, int numCaptures) {
interfaceMethodName = interfaceMethod.name;
factoryMethodType = MethodType.methodType(expected,
this.interfaceMethodName = interfaceMethod.javaMethod.getName();
this.factoryMethodType = MethodType.methodType(expected,
delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount()));
interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1);
delegateClassName = CLASS_NAME;
delegateInvokeType = H_INVOKESTATIC;
this.delegateClassName = CLASS_NAME;
this.delegateInvokeType = H_INVOKESTATIC;
this.delegateMethodName = delegateMethodName;
this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures);
isDelegateInterface = false;
this.isDelegateInterface = false;
this.interfaceMethod = null;
delegateTypeParameters = null;
delegateReturnType = null;
this.delegateTypeParameters = null;
this.delegateReturnType = null;
factoryDescriptor = null;
interfaceType = null;
delegateType = null;
this.factoryDescriptor = null;
this.interfaceType = null;
this.delegateType = null;
}
/**
@ -215,7 +238,7 @@ public class FunctionRef {
// lookup requested constructor
PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(painlessLookup.getJavaClassFromPainlessType(type));
PainlessConstructor impl = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(method.arguments.size()));
PainlessConstructor impl = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(method.typeParameters.size()));
if (impl == null) {
throw new IllegalArgumentException("Unknown reference [" + type + "::new] matching [" + expected + "]");
@ -242,16 +265,16 @@ public class FunctionRef {
final PainlessMethod impl;
// look for a static impl first
PainlessMethod staticImpl =
struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.arguments.size()));
struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.typeParameters.size()));
if (staticImpl == null) {
// otherwise a virtual impl
final int arity;
if (receiverCaptured) {
// receiver captured
arity = method.arguments.size();
arity = method.typeParameters.size();
} else {
// receiver passed
arity = method.arguments.size() - 1;
arity = method.typeParameters.size() - 1;
}
impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity));
} else {

View File

@ -22,8 +22,8 @@ package org.elasticsearch.painless;
import org.elasticsearch.painless.ScriptClassInfo.MethodArgument;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
import org.elasticsearch.painless.lookup.PainlessMethod;
import java.lang.invoke.MethodType;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@ -38,6 +38,30 @@ import java.util.Set;
*/
public final class Locals {
/**
* Constructs a local method key used to lookup local methods from a painless class.
*/
public static String buildLocalMethodKey(String methodName, int methodArity) {
return methodName + "/" + methodArity;
}
/**
* Stores information about methods directly callable on the generated script class.
*/
public static class LocalMethod {
public final String name;
public final Class<?> returnType;
public final List<Class<?>> typeParameters;
public final MethodType methodType;
public LocalMethod(String name, Class<?> returnType, List<Class<?>> typeParameters, MethodType methodType) {
this.name = name;
this.returnType = returnType;
this.typeParameters = typeParameters;
this.methodType = methodType;
}
}
/** Reserved word: loop counter */
public static final String LOOP = "#loop";
/** Reserved word: unused */
@ -110,9 +134,9 @@ public final class Locals {
}
/** Creates a new program scope: the list of methods. It is the parent for all methods */
public static Locals newProgramScope(PainlessLookup painlessLookup, Collection<PainlessMethod> methods) {
public static Locals newProgramScope(PainlessLookup painlessLookup, Collection<LocalMethod> methods) {
Locals locals = new Locals(null, painlessLookup, null, null);
for (PainlessMethod method : methods) {
for (LocalMethod method : methods) {
locals.addMethod(method);
}
return locals;
@ -143,8 +167,8 @@ public final class Locals {
}
/** Looks up a method. Returns null if the method does not exist. */
public PainlessMethod getMethod(String key) {
PainlessMethod method = lookupMethod(key);
public LocalMethod getMethod(String key) {
LocalMethod method = lookupMethod(key);
if (method != null) {
return method;
}
@ -199,7 +223,7 @@ public final class Locals {
// variable name -> variable
private Map<String,Variable> variables;
// method name+arity -> methods
private Map<String,PainlessMethod> methods;
private Map<String,LocalMethod> methods;
/**
* Create a new Locals
@ -237,7 +261,7 @@ public final class Locals {
}
/** Looks up a method at this scope only. Returns null if the method does not exist. */
private PainlessMethod lookupMethod(String key) {
private LocalMethod lookupMethod(String key) {
if (methods == null) {
return null;
}
@ -256,11 +280,11 @@ public final class Locals {
return variable;
}
private void addMethod(PainlessMethod method) {
private void addMethod(LocalMethod method) {
if (methods == null) {
methods = new HashMap<>();
}
methods.put(PainlessLookupUtility.buildPainlessMethodKey(method.name, method.arguments.size()), method);
methods.put(buildLocalMethodKey(method.name, method.typeParameters.size()), method);
// TODO: check result
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.painless;
import org.elasticsearch.painless.lookup.PainlessCast;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.def;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.Label;
@ -28,6 +29,7 @@ import org.objectweb.asm.Type;
import org.objectweb.asm.commons.GeneratorAdapter;
import org.objectweb.asm.commons.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
@ -415,4 +417,26 @@ public final class MethodWriter extends GeneratorAdapter {
System.arraycopy(params, 0, args, 2, params.length);
invokeDynamic(name, methodType.getDescriptor(), DEF_BOOTSTRAP_HANDLE, args);
}
public void invokeMethodCall(PainlessMethod painlessMethod) {
Type type = Type.getType(painlessMethod.javaMethod.getDeclaringClass());
Method method = Method.getMethod(painlessMethod.javaMethod);
if (Modifier.isStatic(painlessMethod.javaMethod.getModifiers())) {
// invokeStatic assumes that the owner class is not an interface, so this is a
// special case for interfaces where the interface method boolean needs to be set to
// true to reference the appropriate class constant when calling a static interface
// method since java 8 did not check, but java 9 and 10 do
if (painlessMethod.javaMethod.getDeclaringClass().isInterface()) {
visitMethodInsn(Opcodes.INVOKESTATIC, type.getInternalName(),
painlessMethod.javaMethod.getName(), painlessMethod.methodType.toMethodDescriptorString(), true);
} else {
invokeStatic(type, method);
}
} else if (painlessMethod.javaMethod.getDeclaringClass().isInterface()) {
invokeInterface(type, method);
} else {
invokeVirtual(type, method);
}
}
}

View File

@ -20,24 +20,20 @@
package org.elasticsearch.painless.lookup;
import java.lang.invoke.MethodHandle;
import java.lang.reflect.Field;
public final class PainlessField {
public final String name;
public final Class<?> target;
public final Class<?> clazz;
public final String javaName;
public final int modifiers;
public final MethodHandle getter;
public final MethodHandle setter;
public final Field javaField;
public final Class<?> typeParameter;
PainlessField(String name, String javaName, Class<?> target, Class<?> clazz, int modifiers,
MethodHandle getter, MethodHandle setter) {
this.name = name;
this.javaName = javaName;
this.target = target;
this.clazz = clazz;
this.modifiers = modifiers;
this.getter = getter;
this.setter = setter;
public final MethodHandle getterMethodHandle;
public final MethodHandle setterMethodHandle;
PainlessField(Field javaField, Class<?> typeParameter, MethodHandle getterMethodHandle, MethodHandle setterMethodHandle) {
this.javaField = javaField;
this.typeParameter = typeParameter;
this.getterMethodHandle = getterMethodHandle;
this.setterMethodHandle = setterMethodHandle;
}
}

View File

@ -540,7 +540,6 @@ public final class PainlessLookupBuilder {
PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey);
if (painlessMethod == null) {
org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
MethodHandle methodHandle;
try {
@ -554,19 +553,17 @@ public final class PainlessLookupBuilder {
painlessMethod = painlessMethodCache.computeIfAbsent(
new PainlessMethodCacheKey(targetClass, methodName, typeParameters),
key -> new PainlessMethod(methodName, targetClass, null, returnType,
typeParameters, asmMethod, javaMethod.getModifiers(), methodHandle, methodType));
key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType));
painlessClassBuilder.staticMethods.put(painlessMethodKey, painlessMethod);
} else if ((painlessMethod.name.equals(methodName) && painlessMethod.rtn == returnType &&
painlessMethod.arguments.equals(typeParameters)) == false) {
} else if (painlessMethod.returnType == returnType && painlessMethod.typeParameters.equals(typeParameters) == false) {
throw new IllegalArgumentException("cannot have static methods " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(returnType) + "], " +
typesToCanonicalTypeNames(typeParameters) + "] and " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(painlessMethod.rtn) + "], " +
typesToCanonicalTypeNames(painlessMethod.arguments) + "] " +
"[" + typeToCanonicalTypeName(painlessMethod.returnType) + "], " +
typesToCanonicalTypeNames(painlessMethod.typeParameters) + "] " +
"with the same arity and different return type or type parameters");
}
} else {
@ -597,19 +594,17 @@ public final class PainlessLookupBuilder {
painlessMethod = painlessMethodCache.computeIfAbsent(
new PainlessMethodCacheKey(targetClass, methodName, typeParameters),
key -> new PainlessMethod(methodName, targetClass, augmentedClass, returnType,
typeParameters, asmMethod, javaMethod.getModifiers(), methodHandle, methodType));
key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType));
painlessClassBuilder.methods.put(painlessMethodKey, painlessMethod);
} else if ((painlessMethod.name.equals(methodName) && painlessMethod.rtn == returnType &&
painlessMethod.arguments.equals(typeParameters)) == false) {
} else if (painlessMethod.returnType == returnType && painlessMethod.typeParameters.equals(typeParameters) == false) {
throw new IllegalArgumentException("cannot have methods " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(returnType) + "], " +
typesToCanonicalTypeNames(typeParameters) + "] and " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(painlessMethod.rtn) + "], " +
typesToCanonicalTypeNames(painlessMethod.arguments) + "] " +
"[" + typeToCanonicalTypeName(painlessMethod.returnType) + "], " +
typesToCanonicalTypeNames(painlessMethod.typeParameters) + "] " +
"with the same arity and different return type or type parameters");
}
}
@ -684,31 +679,6 @@ public final class PainlessLookupBuilder {
"for field [[" + targetCanonicalClassName + "], [" + fieldName + "]");
}
String painlessFieldKey = buildPainlessFieldKey(fieldName);
if (Modifier.isStatic(javaField.getModifiers())) {
if (Modifier.isFinal(javaField.getModifiers()) == false) {
throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "]. [" + fieldName + "]] must be final");
}
PainlessField painlessField = painlessClassBuilder.staticFields.get(painlessFieldKey);
if (painlessField == null) {
painlessField = painlessFieldCache.computeIfAbsent(
new PainlessFieldCacheKey(targetClass, fieldName, typeParameter),
key -> new PainlessField(fieldName, javaField.getName(), targetClass,
typeParameter, javaField.getModifiers(), null, null));
painlessClassBuilder.staticFields.put(painlessFieldKey, painlessField);
} else if (painlessField.clazz != typeParameter) {
throw new IllegalArgumentException("cannot have static fields " +
"[[" + targetCanonicalClassName + "], [" + fieldName + "], [" +
typeToCanonicalTypeName(typeParameter) + "] and " +
"[[" + targetCanonicalClassName + "], [" + painlessField.name + "], " +
typeToCanonicalTypeName(painlessField.clazz) + "] " +
"with the same and different type parameters");
}
} else {
MethodHandle methodHandleGetter;
try {
@ -718,6 +688,30 @@ public final class PainlessLookupBuilder {
"getter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]");
}
String painlessFieldKey = buildPainlessFieldKey(fieldName);
if (Modifier.isStatic(javaField.getModifiers())) {
if (Modifier.isFinal(javaField.getModifiers()) == false) {
throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "], [" + fieldName + "]] must be final");
}
PainlessField painlessField = painlessClassBuilder.staticFields.get(painlessFieldKey);
if (painlessField == null) {
painlessField = painlessFieldCache.computeIfAbsent(
new PainlessFieldCacheKey(targetClass, fieldName, typeParameter),
key -> new PainlessField(javaField, typeParameter, methodHandleGetter, null));
painlessClassBuilder.staticFields.put(painlessFieldKey, painlessField);
} else if (painlessField.typeParameter != typeParameter) {
throw new IllegalArgumentException("cannot have static fields " +
"[[" + targetCanonicalClassName + "], [" + fieldName + "], [" +
typeToCanonicalTypeName(typeParameter) + "] and " +
"[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " +
typeToCanonicalTypeName(painlessField.typeParameter) + "] " +
"with the same name and different type parameters");
}
} else {
MethodHandle methodHandleSetter;
try {
@ -732,17 +726,16 @@ public final class PainlessLookupBuilder {
if (painlessField == null) {
painlessField = painlessFieldCache.computeIfAbsent(
new PainlessFieldCacheKey(targetClass, painlessFieldKey, typeParameter),
key -> new PainlessField(fieldName, javaField.getName(), targetClass,
typeParameter, javaField.getModifiers(), methodHandleGetter, methodHandleSetter));
key -> new PainlessField(javaField, typeParameter, methodHandleGetter, methodHandleSetter));
painlessClassBuilder.fields.put(fieldName, painlessField);
} else if (painlessField.clazz != typeParameter) {
} else if (painlessField.typeParameter != typeParameter) {
throw new IllegalArgumentException("cannot have fields " +
"[[" + targetCanonicalClassName + "], [" + fieldName + "], [" +
typeToCanonicalTypeName(typeParameter) + "] and " +
"[[" + targetCanonicalClassName + "], [" + painlessField.name + "], " +
typeToCanonicalTypeName(painlessField.clazz) + "] " +
"with the same and different type parameters");
"[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " +
typeToCanonicalTypeName(painlessField.typeParameter) + "] " +
"with the same name and different type parameters");
}
}
}
@ -806,8 +799,8 @@ public final class PainlessLookupBuilder {
PainlessMethod newPainlessMethod = painlessMethodEntry.getValue();
PainlessMethod existingPainlessMethod = targetPainlessClassBuilder.methods.get(painlessMethodKey);
if (existingPainlessMethod == null || existingPainlessMethod.target != newPainlessMethod.target &&
existingPainlessMethod.target.isAssignableFrom(newPainlessMethod.target)) {
if (existingPainlessMethod == null || existingPainlessMethod.targetClass != newPainlessMethod.targetClass &&
existingPainlessMethod.targetClass.isAssignableFrom(newPainlessMethod.targetClass)) {
targetPainlessClassBuilder.methods.put(painlessMethodKey, newPainlessMethod);
}
}
@ -817,8 +810,9 @@ public final class PainlessLookupBuilder {
PainlessField newPainlessField = painlessFieldEntry.getValue();
PainlessField existingPainlessField = targetPainlessClassBuilder.fields.get(painlessFieldKey);
if (existingPainlessField == null || existingPainlessField.target != newPainlessField.target &&
existingPainlessField.target.isAssignableFrom(newPainlessField.target)) {
if (existingPainlessField == null ||
existingPainlessField.javaField.getDeclaringClass() != newPainlessField.javaField.getDeclaringClass() &&
existingPainlessField.javaField.getDeclaringClass().isAssignableFrom(newPainlessField.javaField.getDeclaringClass())) {
targetPainlessClassBuilder.fields.put(painlessFieldKey, newPainlessField);
}
}
@ -832,27 +826,27 @@ public final class PainlessLookupBuilder {
private void cacheRuntimeHandles(PainlessClassBuilder painlessClassBuilder) {
for (PainlessMethod painlessMethod : painlessClassBuilder.methods.values()) {
String methodName = painlessMethod.name;
int typeParametersSize = painlessMethod.arguments.size();
String methodName = painlessMethod.javaMethod.getName();
int typeParametersSize = painlessMethod.typeParameters.size();
if (typeParametersSize == 0 && methodName.startsWith("get") && methodName.length() > 3 &&
Character.isUpperCase(methodName.charAt(3))) {
painlessClassBuilder.getterMethodHandles.putIfAbsent(
Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), painlessMethod.handle);
Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), painlessMethod.methodHandle);
} else if (typeParametersSize == 0 && methodName.startsWith("is") && methodName.length() > 2 &&
Character.isUpperCase(methodName.charAt(2))) {
painlessClassBuilder.getterMethodHandles.putIfAbsent(
Character.toLowerCase(methodName.charAt(2)) + methodName.substring(3), painlessMethod.handle);
Character.toLowerCase(methodName.charAt(2)) + methodName.substring(3), painlessMethod.methodHandle);
} else if (typeParametersSize == 1 && methodName.startsWith("set") && methodName.length() > 3 &&
Character.isUpperCase(methodName.charAt(3))) {
painlessClassBuilder.setterMethodHandles.putIfAbsent(
Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), painlessMethod.handle);
Character.toLowerCase(methodName.charAt(3)) + methodName.substring(4), painlessMethod.methodHandle);
}
}
for (PainlessField painlessField : painlessClassBuilder.fields.values()) {
painlessClassBuilder.getterMethodHandles.put(painlessField.name, painlessField.getter);
painlessClassBuilder.setterMethodHandles.put(painlessField.name, painlessField.setter);
painlessClassBuilder.getterMethodHandles.put(painlessField.javaField.getName(), painlessField.getterMethodHandle);
painlessClassBuilder.setterMethodHandles.put(painlessField.javaField.getName(), painlessField.setterMethodHandle);
}
}

View File

@ -19,67 +19,28 @@
package org.elasticsearch.painless.lookup;
import org.elasticsearch.painless.MethodWriter;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodType;
import java.lang.reflect.Modifier;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.List;
public class PainlessMethod {
public final String name;
public final Class<?> target;
public final Class<?> augmentation;
public final Class<?> rtn;
public final List<Class<?>> arguments;
public final org.objectweb.asm.commons.Method method;
public final int modifiers;
public final MethodHandle handle;
public final Method javaMethod;
public final Class<?> targetClass;
public final Class<?> returnType;
public final List<Class<?>> typeParameters;
public final MethodHandle methodHandle;
public final MethodType methodType;
public PainlessMethod(String name, Class<?> target, Class<?> augmentation, Class<?> rtn, List<Class<?>> arguments,
org.objectweb.asm.commons.Method method, int modifiers, MethodHandle handle, MethodType methodType) {
this.name = name;
this.augmentation = augmentation;
this.target = target;
this.rtn = rtn;
this.arguments = Collections.unmodifiableList(arguments);
this.method = method;
this.modifiers = modifiers;
this.handle = handle;
public PainlessMethod(Method javaMethod, Class<?> targetClass, Class<?> returnType, List<Class<?>> typeParameters,
MethodHandle methodHandle, MethodType methodType) {
this.javaMethod = javaMethod;
this.targetClass = targetClass;
this.returnType = returnType;
this.typeParameters = Collections.unmodifiableList(typeParameters);
this.methodHandle = methodHandle;
this.methodType = methodType;
}
public void write(MethodWriter writer) {
final org.objectweb.asm.Type type;
final Class<?> clazz;
if (augmentation != null) {
assert Modifier.isStatic(modifiers);
clazz = augmentation;
type = org.objectweb.asm.Type.getType(augmentation);
} else {
clazz = target;
type = Type.getType(target);
}
if (Modifier.isStatic(modifiers)) {
// invokeStatic assumes that the owner class is not an interface, so this is a
// special case for interfaces where the interface method boolean needs to be set to
// true to reference the appropriate class constant when calling a static interface
// method since java 8 did not check, but java 9 and 10 do
if (Modifier.isInterface(clazz.getModifiers())) {
writer.visitMethodInsn(Opcodes.INVOKESTATIC,
type.getInternalName(), name, methodType.toMethodDescriptorString(), true);
} else {
writer.invokeStatic(type, method);
}
} else if (Modifier.isInterface(clazz.getModifiers())) {
writer.invokeInterface(type, method);
} else {
writer.invokeVirtual(type, method);
}
}
}

View File

@ -21,10 +21,11 @@ package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.LocalMethod;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.objectweb.asm.commons.Method;
import java.util.List;
import java.util.Objects;
@ -40,7 +41,7 @@ public final class ECallLocal extends AExpression {
private final String name;
private final List<AExpression> arguments;
private PainlessMethod method = null;
private LocalMethod method = null;
public ECallLocal(Location location, String name, List<AExpression> arguments) {
super(location);
@ -68,14 +69,14 @@ public final class ECallLocal extends AExpression {
for (int argument = 0; argument < arguments.size(); ++argument) {
AExpression expression = arguments.get(argument);
expression.expected = method.arguments.get(argument);
expression.expected = method.typeParameters.get(argument);
expression.internal = true;
expression.analyze(locals);
arguments.set(argument, expression.cast(locals));
}
statement = true;
actual = method.rtn;
actual = method.returnType;
}
@Override
@ -86,7 +87,7 @@ public final class ECallLocal extends AExpression {
argument.write(writer, globals);
}
writer.invokeStatic(CLASS_TYPE, method.method);
writer.invokeStatic(CLASS_TYPE, new Method(method.name, method.methodType.toMethodDescriptorString()));
}
@Override

View File

@ -81,14 +81,14 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda
PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1);
// check casts between the interface method and the delegate method are legal
for (int i = 0; i < ref.interfaceMethod.arguments.size(); ++i) {
Class<?> from = ref.interfaceMethod.arguments.get(i);
for (int i = 0; i < ref.interfaceMethod.typeParameters.size(); ++i) {
Class<?> from = ref.interfaceMethod.typeParameters.get(i);
Class<?> to = ref.delegateTypeParameters.get(i);
AnalyzerCaster.getLegalCast(location, from, to, false, true);
}
if (ref.interfaceMethod.rtn != void.class) {
AnalyzerCaster.getLegalCast(location, ref.delegateReturnType, ref.interfaceMethod.rtn, false, true);
if (ref.interfaceMethod.returnType != void.class) {
AnalyzerCaster.getLegalCast(location, ref.delegateReturnType, ref.interfaceMethod.returnType, false, true);
}
} catch (IllegalArgumentException e) {
throw createError(e);

View File

@ -23,6 +23,7 @@ import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.LocalMethod;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
@ -70,8 +71,7 @@ public final class EFunctionRef extends AExpression implements ILambda {
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
"to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface");
}
PainlessMethod delegateMethod =
locals.getMethod(PainlessLookupUtility.buildPainlessMethodKey(call, interfaceMethod.arguments.size()));
LocalMethod delegateMethod = locals.getMethod(Locals.buildLocalMethodKey(call, interfaceMethod.typeParameters.size()));
if (delegateMethod == null) {
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
"to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], function not found");
@ -79,14 +79,14 @@ public final class EFunctionRef extends AExpression implements ILambda {
ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0);
// check casts between the interface method and the delegate method are legal
for (int i = 0; i < interfaceMethod.arguments.size(); ++i) {
Class<?> from = interfaceMethod.arguments.get(i);
Class<?> to = delegateMethod.arguments.get(i);
for (int i = 0; i < interfaceMethod.typeParameters.size(); ++i) {
Class<?> from = interfaceMethod.typeParameters.get(i);
Class<?> to = delegateMethod.typeParameters.get(i);
AnalyzerCaster.getLegalCast(location, from, to, false, true);
}
if (interfaceMethod.rtn != void.class) {
AnalyzerCaster.getLegalCast(location, delegateMethod.rtn, interfaceMethod.rtn, false, true);
if (interfaceMethod.returnType != void.class) {
AnalyzerCaster.getLegalCast(location, delegateMethod.returnType, interfaceMethod.returnType, false, true);
}
} else {
// whitelist lookup

View File

@ -23,6 +23,7 @@ import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.FunctionRef;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.LocalMethod;
import org.elasticsearch.painless.Locals.Variable;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
@ -126,21 +127,21 @@ public final class ELambda extends AExpression implements ILambda {
"[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"));
}
// check arity before we manipulate parameters
if (interfaceMethod.arguments.size() != paramTypeStrs.size())
throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name +
if (interfaceMethod.typeParameters.size() != paramTypeStrs.size())
throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.javaMethod.getName() +
"] in [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "]");
// for method invocation, its allowed to ignore the return value
if (interfaceMethod.rtn == void.class) {
if (interfaceMethod.returnType == void.class) {
returnType = def.class;
} else {
returnType = interfaceMethod.rtn;
returnType = interfaceMethod.returnType;
}
// replace any null types with the actual type
actualParamTypeStrs = new ArrayList<>(paramTypeStrs.size());
for (int i = 0; i < paramTypeStrs.size(); i++) {
String paramType = paramTypeStrs.get(i);
if (paramType == null) {
actualParamTypeStrs.add(PainlessLookupUtility.typeToCanonicalTypeName(interfaceMethod.arguments.get(i)));
actualParamTypeStrs.add(PainlessLookupUtility.typeToCanonicalTypeName(interfaceMethod.typeParameters.get(i)));
} else {
actualParamTypeStrs.add(paramType);
}
@ -183,20 +184,22 @@ public final class ELambda extends AExpression implements ILambda {
} else {
defPointer = null;
try {
ref = new FunctionRef(expected, interfaceMethod, desugared.method, captures.size());
LocalMethod localMethod =
new LocalMethod(desugared.name, desugared.returnType, desugared.typeParameters, desugared.methodType);
ref = new FunctionRef(expected, interfaceMethod, localMethod, captures.size());
} catch (IllegalArgumentException e) {
throw createError(e);
}
// check casts between the interface method and the delegate method are legal
for (int i = 0; i < interfaceMethod.arguments.size(); ++i) {
Class<?> from = interfaceMethod.arguments.get(i);
for (int i = 0; i < interfaceMethod.typeParameters.size(); ++i) {
Class<?> from = interfaceMethod.typeParameters.get(i);
Class<?> to = desugared.parameters.get(i + captures.size()).clazz;
AnalyzerCaster.getLegalCast(location, from, to, false, true);
}
if (interfaceMethod.rtn != void.class) {
AnalyzerCaster.getLegalCast(location, desugared.rtnType, interfaceMethod.rtn, false, true);
if (interfaceMethod.returnType != void.class) {
AnalyzerCaster.getLegalCast(location, desugared.returnType, interfaceMethod.returnType, false, true);
}
actual = expected;

View File

@ -100,7 +100,7 @@ public final class EListInit extends AExpression {
for (AExpression value : values) {
writer.dup();
value.write(writer, globals);
method.write(writer);
writer.invokeMethodCall(method);
writer.pop();
}
}

View File

@ -123,7 +123,7 @@ public final class EMapInit extends AExpression {
writer.dup();
key.write(writer, globals);
value.write(writer, globals);
method.write(writer);
writer.invokeMethodCall(method);
writer.pop();
}
}

View File

@ -56,14 +56,14 @@ final class PSubCallInvoke extends AExpression {
for (int argument = 0; argument < arguments.size(); ++argument) {
AExpression expression = arguments.get(argument);
expression.expected = method.arguments.get(argument);
expression.expected = method.typeParameters.get(argument);
expression.internal = true;
expression.analyze(locals);
arguments.set(argument, expression.cast(locals));
}
statement = true;
actual = method.rtn;
actual = method.returnType;
}
@Override
@ -78,11 +78,11 @@ final class PSubCallInvoke extends AExpression {
argument.write(writer, globals);
}
method.write(writer);
writer.invokeMethodCall(method);
}
@Override
public String toString() {
return singleLineToStringWithOptionalArgs(arguments, prefix, method.name);
return singleLineToStringWithOptionalArgs(arguments, prefix, method.javaMethod.getName());
}
}

View File

@ -51,22 +51,24 @@ final class PSubField extends AStoreable {
@Override
void analyze(Locals locals) {
if (write && Modifier.isFinal(field.modifiers)) {
throw createError(new IllegalArgumentException("Cannot write to read-only field [" + field.name + "] for type " +
"[" + PainlessLookupUtility.typeToCanonicalTypeName(field.clazz) + "]."));
if (write && Modifier.isFinal(field.javaField.getModifiers())) {
throw createError(new IllegalArgumentException("Cannot write to read-only field [" + field.javaField.getName() + "] " +
"for type [" + PainlessLookupUtility.typeToCanonicalTypeName(field.javaField.getDeclaringClass()) + "]."));
}
actual = field.clazz;
actual = field.typeParameter;
}
@Override
void write(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isStatic(field.modifiers)) {
writer.getStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz));
if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) {
writer.getStatic(Type.getType(
field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter));
} else {
writer.getField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz));
writer.getField(Type.getType(
field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter));
}
}
@ -94,10 +96,12 @@ final class PSubField extends AStoreable {
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isStatic(field.modifiers)) {
writer.getStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz));
if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) {
writer.getStatic(Type.getType(
field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter));
} else {
writer.getField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz));
writer.getField(Type.getType(
field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter));
}
}
@ -105,15 +109,17 @@ final class PSubField extends AStoreable {
void store(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isStatic(field.modifiers)) {
writer.putStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz));
if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) {
writer.putStatic(Type.getType(
field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter));
} else {
writer.putField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz));
writer.putField(Type.getType(
field.javaField.getDeclaringClass()), field.javaField.getName(), MethodWriter.getType(field.typeParameter));
}
}
@Override
public String toString() {
return singleLineToString(prefix, field.name);
return singleLineToString(prefix, field.javaField.getName());
}
}

View File

@ -62,17 +62,17 @@ final class PSubListShortcut extends AStoreable {
getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1));
setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("set", 2));
if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1 ||
getter.arguments.get(0) != int.class)) {
if (getter != null && (getter.returnType == void.class || getter.typeParameters.size() != 1 ||
getter.typeParameters.get(0) != int.class)) {
throw createError(new IllegalArgumentException("Illegal list get shortcut for type [" + canonicalClassName + "]."));
}
if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0) != int.class)) {
if (setter != null && (setter.typeParameters.size() != 2 || setter.typeParameters.get(0) != int.class)) {
throw createError(new IllegalArgumentException("Illegal list set shortcut for type [" + canonicalClassName + "]."));
}
if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0))
|| !getter.rtn.equals(setter.arguments.get(1)))) {
if (getter != null && setter != null && (!getter.typeParameters.get(0).equals(setter.typeParameters.get(0))
|| !getter.returnType.equals(setter.typeParameters.get(1)))) {
throw createError(new IllegalArgumentException("Shortcut argument types must match."));
}
@ -81,7 +81,7 @@ final class PSubListShortcut extends AStoreable {
index.analyze(locals);
index = index.cast(locals);
actual = setter != null ? setter.arguments.get(1) : getter.rtn;
actual = setter != null ? setter.typeParameters.get(1) : getter.returnType;
} else {
throw createError(new IllegalArgumentException("Illegal list shortcut for type [" + canonicalClassName + "]."));
}
@ -119,21 +119,18 @@ final class PSubListShortcut extends AStoreable {
@Override
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
writer.invokeMethodCall(getter);
getter.write(writer);
if (getter.rtn == getter.handle.type().returnType()) {
writer.checkCast(MethodWriter.getType(getter.rtn));
if (getter.returnType == getter.javaMethod.getReturnType()) {
writer.checkCast(MethodWriter.getType(getter.returnType));
}
}
@Override
void store(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
setter.write(writer);
writer.writePop(MethodWriter.getType(setter.rtn).getSize());
writer.invokeMethodCall(setter);
writer.writePop(MethodWriter.getType(setter.returnType).getSize());
}
@Override

View File

@ -61,25 +61,25 @@ final class PSubMapShortcut extends AStoreable {
getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1));
setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("put", 2));
if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1)) {
if (getter != null && (getter.returnType == void.class || getter.typeParameters.size() != 1)) {
throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + canonicalClassName + "]."));
}
if (setter != null && setter.arguments.size() != 2) {
if (setter != null && setter.typeParameters.size() != 2) {
throw createError(new IllegalArgumentException("Illegal map set shortcut for type [" + canonicalClassName + "]."));
}
if (getter != null && setter != null &&
(!getter.arguments.get(0).equals(setter.arguments.get(0)) || !getter.rtn.equals(setter.arguments.get(1)))) {
if (getter != null && setter != null && (!getter.typeParameters.get(0).equals(setter.typeParameters.get(0)) ||
!getter.returnType.equals(setter.typeParameters.get(1)))) {
throw createError(new IllegalArgumentException("Shortcut argument types must match."));
}
if ((read || write) && (!read || getter != null) && (!write || setter != null)) {
index.expected = setter != null ? setter.arguments.get(0) : getter.arguments.get(0);
index.expected = setter != null ? setter.typeParameters.get(0) : getter.typeParameters.get(0);
index.analyze(locals);
index = index.cast(locals);
actual = setter != null ? setter.arguments.get(1) : getter.rtn;
actual = setter != null ? setter.typeParameters.get(1) : getter.returnType;
} else {
throw createError(new IllegalArgumentException("Illegal map shortcut for type [" + canonicalClassName + "]."));
}
@ -90,11 +90,10 @@ final class PSubMapShortcut extends AStoreable {
index.write(writer, globals);
writer.writeDebugInfo(location);
writer.invokeMethodCall(getter);
getter.write(writer);
if (getter.rtn != getter.handle.type().returnType()) {
writer.checkCast(MethodWriter.getType(getter.rtn));
if (getter.returnType != getter.javaMethod.getReturnType()) {
writer.checkCast(MethodWriter.getType(getter.returnType));
}
}
@ -121,21 +120,18 @@ final class PSubMapShortcut extends AStoreable {
@Override
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
writer.invokeMethodCall(getter);
getter.write(writer);
if (getter.rtn != getter.handle.type().returnType()) {
writer.checkCast(MethodWriter.getType(getter.rtn));
if (getter.returnType != getter.javaMethod.getReturnType()) {
writer.checkCast(MethodWriter.getType(getter.returnType));
}
}
@Override
void store(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
setter.write(writer);
writer.writePop(MethodWriter.getType(setter.rtn).getSize());
writer.invokeMethodCall(setter);
writer.writePop(MethodWriter.getType(setter.returnType).getSize());
}
@Override

View File

@ -53,22 +53,22 @@ final class PSubShortcut extends AStoreable {
@Override
void analyze(Locals locals) {
if (getter != null && (getter.rtn == void.class || !getter.arguments.isEmpty())) {
if (getter != null && (getter.returnType == void.class || !getter.typeParameters.isEmpty())) {
throw createError(new IllegalArgumentException(
"Illegal get shortcut on field [" + value + "] for type [" + type + "]."));
}
if (setter != null && (setter.rtn != void.class || setter.arguments.size() != 1)) {
if (setter != null && (setter.returnType != void.class || setter.typeParameters.size() != 1)) {
throw createError(new IllegalArgumentException(
"Illegal set shortcut on field [" + value + "] for type [" + type + "]."));
}
if (getter != null && setter != null && setter.arguments.get(0) != getter.rtn) {
if (getter != null && setter != null && setter.typeParameters.get(0) != getter.returnType) {
throw createError(new IllegalArgumentException("Shortcut argument types must match."));
}
if ((getter != null || setter != null) && (!read || getter != null) && (!write || setter != null)) {
actual = setter != null ? setter.arguments.get(0) : getter.rtn;
actual = setter != null ? setter.typeParameters.get(0) : getter.returnType;
} else {
throw createError(new IllegalArgumentException("Illegal shortcut on field [" + value + "] for type [" + type + "]."));
}
@ -78,10 +78,10 @@ final class PSubShortcut extends AStoreable {
void write(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
getter.write(writer);
writer.invokeMethodCall(getter);
if (!getter.rtn.equals(getter.handle.type().returnType())) {
writer.checkCast(MethodWriter.getType(getter.rtn));
if (!getter.returnType.equals(getter.javaMethod.getReturnType())) {
writer.checkCast(MethodWriter.getType(getter.returnType));
}
}
@ -109,10 +109,10 @@ final class PSubShortcut extends AStoreable {
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
getter.write(writer);
writer.invokeMethodCall(getter);
if (getter.rtn != getter.handle.type().returnType()) {
writer.checkCast(MethodWriter.getType(getter.rtn));
if (getter.returnType != getter.javaMethod.getReturnType()) {
writer.checkCast(MethodWriter.getType(getter.returnType));
}
}
@ -120,9 +120,9 @@ final class PSubShortcut extends AStoreable {
void store(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
setter.write(writer);
writer.invokeMethodCall(setter);
writer.writePop(MethodWriter.getType(setter.rtn).getSize());
writer.writePop(MethodWriter.getType(setter.returnType).getSize());
}
@Override

View File

@ -31,14 +31,12 @@ import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.WriterConstants;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.node.SSource.Reserved;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.Handle;
import org.objectweb.asm.Opcodes;
import java.lang.invoke.MethodType;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
@ -92,9 +90,12 @@ public final class SFunction extends AStatement {
private final List<AStatement> statements;
public final boolean synthetic;
Class<?> rtnType = null;
Class<?> returnType;
List<Class<?>> typeParameters;
MethodType methodType;
org.objectweb.asm.commons.Method method;
List<Parameter> parameters = new ArrayList<>();
PainlessMethod method = null;
private Variable loop = null;
@ -120,7 +121,7 @@ public final class SFunction extends AStatement {
void generateSignature(PainlessLookup painlessLookup) {
try {
rtnType = painlessLookup.getJavaClassFromPainlessType(rtnTypeStr);
returnType = painlessLookup.getJavaClassFromPainlessType(rtnTypeStr);
} catch (IllegalArgumentException exception) {
throw createError(new IllegalArgumentException("Illegal return type [" + rtnTypeStr + "] for function [" + name + "]."));
}
@ -145,11 +146,10 @@ public final class SFunction extends AStatement {
}
}
int modifiers = Modifier.STATIC | Modifier.PRIVATE;
org.objectweb.asm.commons.Method method = new org.objectweb.asm.commons.Method(name, MethodType.methodType(
PainlessLookupUtility.typeToJavaType(rtnType), paramClasses).toMethodDescriptorString());
MethodType methodType = MethodType.methodType(PainlessLookupUtility.typeToJavaType(rtnType), paramClasses);
this.method = new PainlessMethod(name, null, null, rtnType, paramTypes, method, modifiers, null, methodType);
typeParameters = paramTypes;
methodType = MethodType.methodType(PainlessLookupUtility.typeToJavaType(returnType), paramClasses);
method = new org.objectweb.asm.commons.Method(name, MethodType.methodType(
PainlessLookupUtility.typeToJavaType(returnType), paramClasses).toMethodDescriptorString());
}
@Override
@ -177,7 +177,7 @@ public final class SFunction extends AStatement {
allEscape = statement.allEscape;
}
if (!methodEscape && rtnType != void.class) {
if (!methodEscape && returnType != void.class) {
throw createError(new IllegalArgumentException("Not all paths provide a return value for method [" + name + "]."));
}
@ -192,7 +192,7 @@ public final class SFunction extends AStatement {
if (synthetic) {
access |= Opcodes.ACC_SYNTHETIC;
}
final MethodWriter function = new MethodWriter(access, method.method, writer, globals.getStatements(), settings);
final MethodWriter function = new MethodWriter(access, method, writer, globals.getStatements(), settings);
function.visitCode();
write(function, globals);
function.endMethod();
@ -212,7 +212,7 @@ public final class SFunction extends AStatement {
}
if (!methodEscape) {
if (rtnType == void.class) {
if (returnType == void.class) {
function.returnValue();
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
@ -225,11 +225,7 @@ public final class SFunction extends AStatement {
}
private void initializeConstant(MethodWriter writer) {
final Handle handle = new Handle(Opcodes.H_INVOKESTATIC,
CLASS_TYPE.getInternalName(),
name,
method.method.getDescriptor(),
false);
final Handle handle = new Handle(Opcodes.H_INVOKESTATIC, CLASS_TYPE.getInternalName(), name, method.getDescriptor(), false);
writer.push(handle);
}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Constant;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Locals.LocalMethod;
import org.elasticsearch.painless.Locals.Variable;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
@ -30,8 +31,6 @@ import org.elasticsearch.painless.ScriptClassInfo;
import org.elasticsearch.painless.SimpleChecksAdapter;
import org.elasticsearch.painless.WriterConstants;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.node.SFunction.FunctionReserved;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
@ -165,14 +164,15 @@ public final class SSource extends AStatement {
}
public void analyze(PainlessLookup painlessLookup) {
Map<String, PainlessMethod> methods = new HashMap<>();
Map<String, LocalMethod> methods = new HashMap<>();
for (SFunction function : functions) {
function.generateSignature(painlessLookup);
String key = PainlessLookupUtility.buildPainlessMethodKey(function.name, function.parameters.size());
String key = Locals.buildLocalMethodKey(function.name, function.parameters.size());
if (methods.put(key, function.method) != null) {
if (methods.put(key,
new LocalMethod(function.name, function.returnType, function.typeParameters, function.methodType)) != null) {
throw createError(new IllegalArgumentException("Duplicate functions with name [" + function.name + "]."));
}
}
@ -184,7 +184,7 @@ public final class SSource extends AStatement {
void analyze(Locals program) {
for (SFunction function : functions) {
Locals functionLocals =
Locals.newFunctionScope(program, function.rtnType, function.parameters, function.reserved.getMaxLoopCounter());
Locals.newFunctionScope(program, function.returnType, function.parameters, function.reserved.getMaxLoopCounter());
function.analyze(functionLocals);
}

View File

@ -99,7 +99,7 @@ final class SSubEachIterable extends AStatement {
.getMethodType(org.objectweb.asm.Type.getType(Iterator.class), org.objectweb.asm.Type.getType(Object.class));
writer.invokeDefCall("iterator", methodType, DefBootstrap.ITERATOR);
} else {
method.write(writer);
writer.invokeMethodCall(method);
}
writer.visitVarInsn(MethodWriter.getType(iterator.clazz).getOpcode(Opcodes.ISTORE), iterator.getSlot());

View File

@ -77,8 +77,8 @@ class org.elasticsearch.index.fielddata.ScriptDocValues$Longs {
}
class org.elasticsearch.index.fielddata.ScriptDocValues$Dates {
org.joda.time.ReadableDateTime get(int)
org.joda.time.ReadableDateTime getValue()
Object get(int)
Object getValue()
List getValues()
}

View File

@ -56,9 +56,9 @@ public class PainlessDocGenerator {
private static final PainlessLookup PAINLESS_LOOKUP = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS);
private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class);
private static final Comparator<PainlessField> FIELD_NAME = comparing(f -> f.name);
private static final Comparator<PainlessMethod> METHOD_NAME = comparing(m -> m.name);
private static final Comparator<PainlessMethod> METHOD_NUMBER_OF_PARAMS = comparing(m -> m.arguments.size());
private static final Comparator<PainlessField> FIELD_NAME = comparing(f -> f.javaField.getName());
private static final Comparator<PainlessMethod> METHOD_NAME = comparing(m -> m.javaMethod.getName());
private static final Comparator<PainlessMethod> METHOD_NUMBER_OF_PARAMS = comparing(m -> m.typeParameters.size());
private static final Comparator<PainlessConstructor> CONSTRUCTOR_NUMBER_OF_PARAMS = comparing(m -> m.typeParameters.size());
public static void main(String[] args) throws IOException {
@ -114,10 +114,10 @@ public class PainlessDocGenerator {
struct.constructors.values().stream().sorted(CONSTRUCTOR_NUMBER_OF_PARAMS).forEach(documentConstructor);
Map<String, Class<?>> inherited = new TreeMap<>();
struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(METHOD_NUMBER_OF_PARAMS)).forEach(method -> {
if (method.target == clazz) {
if (method.targetClass == clazz) {
documentMethod(typeStream, method);
} else {
inherited.put(canonicalClassName, method.target);
inherited.put(canonicalClassName, method.targetClass);
}
});
@ -147,17 +147,17 @@ public class PainlessDocGenerator {
emitAnchor(stream, field);
stream.print("]]");
if (Modifier.isStatic(field.modifiers)) {
if (Modifier.isStatic(field.javaField.getModifiers())) {
stream.print("static ");
}
emitType(stream, field.clazz);
emitType(stream, field.typeParameter);
stream.print(' ');
String javadocRoot = javadocRoot(field);
emitJavadocLink(stream, javadocRoot, field);
stream.print('[');
stream.print(field.name);
stream.print(field.javaField.getName());
stream.print(']');
if (javadocRoot.equals("java8")) {
@ -212,11 +212,11 @@ public class PainlessDocGenerator {
emitAnchor(stream, method);
stream.print("]]");
if (null == method.augmentation && Modifier.isStatic(method.modifiers)) {
if (method.targetClass == method.javaMethod.getDeclaringClass() && Modifier.isStatic(method.javaMethod.getModifiers())) {
stream.print("static ");
}
emitType(stream, method.rtn);
emitType(stream, method.returnType);
stream.print(' ');
String javadocRoot = javadocRoot(method);
@ -227,7 +227,7 @@ public class PainlessDocGenerator {
stream.print("](");
boolean first = true;
for (Class<?> arg : method.arguments) {
for (Class<?> arg : method.typeParameters) {
if (first) {
first = false;
} else {
@ -269,20 +269,20 @@ public class PainlessDocGenerator {
* Anchor text for a {@link PainlessMethod}.
*/
private static void emitAnchor(PrintStream stream, PainlessMethod method) {
emitAnchor(stream, method.target);
emitAnchor(stream, method.targetClass);
stream.print('-');
stream.print(methodName(method));
stream.print('-');
stream.print(method.arguments.size());
stream.print(method.typeParameters.size());
}
/**
* Anchor text for a {@link PainlessField}.
*/
private static void emitAnchor(PrintStream stream, PainlessField field) {
emitAnchor(stream, field.target);
emitAnchor(stream, field.javaField.getDeclaringClass());
stream.print('-');
stream.print(field.name);
stream.print(field.javaField.getName());
}
private static String constructorName(PainlessConstructor constructor) {
@ -290,7 +290,7 @@ public class PainlessDocGenerator {
}
private static String methodName(PainlessMethod method) {
return PainlessLookupUtility.typeToCanonicalTypeName(method.target);
return PainlessLookupUtility.typeToCanonicalTypeName(method.targetClass);
}
/**
@ -359,16 +359,16 @@ public class PainlessDocGenerator {
stream.print("link:{");
stream.print(root);
stream.print("-javadoc}/");
stream.print(classUrlPath(method.augmentation != null ? method.augmentation : method.target));
stream.print(classUrlPath(method.javaMethod.getDeclaringClass()));
stream.print(".html#");
stream.print(methodName(method));
stream.print("%2D");
boolean first = true;
if (method.augmentation != null) {
if (method.targetClass != method.javaMethod.getDeclaringClass()) {
first = false;
stream.print(method.target.getName());
stream.print(method.javaMethod.getDeclaringClass().getName());
}
for (Class<?> clazz: method.arguments) {
for (Class<?> clazz: method.typeParameters) {
if (first) {
first = false;
} else {
@ -391,26 +391,26 @@ public class PainlessDocGenerator {
stream.print("link:{");
stream.print(root);
stream.print("-javadoc}/");
stream.print(classUrlPath(field.target));
stream.print(classUrlPath(field.javaField.getDeclaringClass()));
stream.print(".html#");
stream.print(field.javaName);
stream.print(field.javaField.getName());
}
/**
* Pick the javadoc root for a {@link PainlessMethod}.
*/
private static String javadocRoot(PainlessMethod method) {
if (method.augmentation != null) {
if (method.targetClass != method.javaMethod.getDeclaringClass()) {
return "painless";
}
return javadocRoot(method.target);
return javadocRoot(method.targetClass);
}
/**
* Pick the javadoc root for a {@link PainlessField}.
*/
private static String javadocRoot(PainlessField field) {
return javadocRoot(field.target);
return javadocRoot(field.javaField.getDeclaringClass());
}
/**

View File

@ -108,7 +108,7 @@ setup:
script_fields:
bar:
script:
source: "doc.date.value.dayOfWeek"
source: "doc.date.value.dayOfWeek.value"
- match: { hits.hits.0.fields.bar.0: 7}
@ -123,7 +123,7 @@ setup:
source: >
StringBuilder b = new StringBuilder();
for (def date : doc.dates) {
b.append(" ").append(date.getDayOfWeek());
b.append(" ").append(date.getDayOfWeek().value);
}
return b.toString().trim()

View File

@ -95,7 +95,7 @@ setup:
field:
script:
source: "doc.date.get(0)"
- match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' }
- match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12Z' }
- do:
search:
@ -104,7 +104,7 @@ setup:
field:
script:
source: "doc.date.value"
- match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12.000Z' }
- match: { hits.hits.0.fields.field.0: '2017-01-01T12:11:12Z' }
---
"geo_point":

View File

@ -34,13 +34,13 @@ compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-tr
dependencies {
// network stack
compile "io.netty:netty-buffer:4.1.16.Final"
compile "io.netty:netty-codec:4.1.16.Final"
compile "io.netty:netty-codec-http:4.1.16.Final"
compile "io.netty:netty-common:4.1.16.Final"
compile "io.netty:netty-handler:4.1.16.Final"
compile "io.netty:netty-resolver:4.1.16.Final"
compile "io.netty:netty-transport:4.1.16.Final"
compile "io.netty:netty-buffer:4.1.28.Final"
compile "io.netty:netty-codec:4.1.28.Final"
compile "io.netty:netty-codec-http:4.1.28.Final"
compile "io.netty:netty-common:4.1.28.Final"
compile "io.netty:netty-handler:4.1.28.Final"
compile "io.netty:netty-resolver:4.1.28.Final"
compile "io.netty:netty-transport:4.1.28.Final"
}
dependencyLicenses {
@ -134,7 +134,6 @@ thirdPartyAudit.excludes = [
'net.jpountz.xxhash.StreamingXXHash32',
'net.jpountz.xxhash.XXHashFactory',
'io.netty.internal.tcnative.CertificateRequestedCallback',
'io.netty.internal.tcnative.CertificateRequestedCallback$KeyMaterial',
'io.netty.internal.tcnative.CertificateVerifier',
'io.netty.internal.tcnative.SessionTicketKey',
'io.netty.internal.tcnative.SniHostNameMatcher',
@ -161,6 +160,6 @@ thirdPartyAudit.excludes = [
'org.conscrypt.AllocatedBuffer',
'org.conscrypt.BufferAllocator',
'org.conscrypt.Conscrypt$Engines',
'org.conscrypt.Conscrypt',
'org.conscrypt.HandshakeListener'
]

View File

@ -1 +0,0 @@
63b5fa95c74785e16f2c30ce268bc222e35c8cb5

View File

@ -0,0 +1 @@
d6c2d13492778009d33f60e05ed90bcb535d1fd1

View File

@ -1 +0,0 @@
d84a1f21768b7309c2954521cf5a1f46c2309eb1

View File

@ -0,0 +1 @@
a38361d893900947524f8a9da980555950e73d6a

View File

@ -1 +0,0 @@
d64312378b438dfdad84267c599a053327c6f02a

View File

@ -0,0 +1 @@
897100c1022c780b0a436b9349e507e8fa9800dc

View File

@ -1 +0,0 @@
177a6b30cca92f6f5f9873c9befd681377a4c328

View File

@ -0,0 +1 @@
df69ce8bb9b544a71e7bbee290253cf7c93e6bad

View File

@ -1 +0,0 @@
fec0e63e7dd7f4eeef7ea8dc47a1ff32dfc7ebc2

View File

@ -0,0 +1 @@
a035784682da0126bc25f10713dac732b5082a6d

View File

@ -1 +0,0 @@
f6eb553b53fb3a90a8ac1170697093fed82eae28

View File

@ -0,0 +1 @@
f33557dcb31fa20da075ac05e4808115e32ef9b7

View File

@ -1 +0,0 @@
3c8ee2c4d4a1cbb947a5c184c7aeb2204260958b

View File

@ -0,0 +1 @@
d2ef28f49d726737f0ffe84bf66529b3bf6e0c0d

View File

@ -23,6 +23,9 @@ grant codeBase "${codebase.netty-common}" {
// netty makes and accepts socket connections
permission java.net.SocketPermission "*", "accept,connect";
// Netty sets custom classloader for some of its internal threads
permission java.lang.RuntimePermission "*", "setContextClassLoader";
};
grant codeBase "${codebase.netty-transport}" {

View File

@ -20,6 +20,7 @@
package org.elasticsearch.http.netty4;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.util.ReferenceCounted;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
@ -92,6 +93,7 @@ public class Netty4BadRequestTests extends ESTestCase {
try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) {
final Collection<FullHttpResponse> responses =
nettyHttpClient.get(transportAddress.address(), "/_cluster/settings?pretty=%");
try {
assertThat(responses, hasSize(1));
assertThat(responses.iterator().next().status().code(), equalTo(400));
final Collection<String> responseBodies = Netty4HttpClient.returnHttpResponseBodies(responses);
@ -101,6 +103,9 @@ public class Netty4BadRequestTests extends ESTestCase {
responseBodies.iterator().next(),
containsString(
"\"reason\":\"java.lang.IllegalArgumentException: unterminated escape sequence at end of string: %\""));
} finally {
responses.forEach(ReferenceCounted::release);
}
}
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.http.netty4;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.util.ReferenceCounted;
import org.elasticsearch.ESNetty4IntegTestCase;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
@ -88,12 +89,20 @@ public class Netty4HttpRequestSizeLimitIT extends ESNetty4IntegTestCase {
try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) {
Collection<FullHttpResponse> singleResponse = nettyHttpClient.post(transportAddress.address(), requests[0]);
try {
assertThat(singleResponse, hasSize(1));
assertAtLeastOnceExpectedStatus(singleResponse, HttpResponseStatus.OK);
Collection<FullHttpResponse> multipleResponses = nettyHttpClient.post(transportAddress.address(), requests);
try {
assertThat(multipleResponses, hasSize(requests.length));
assertAtLeastOnceExpectedStatus(multipleResponses, HttpResponseStatus.SERVICE_UNAVAILABLE);
} finally {
multipleResponses.forEach(ReferenceCounted::release);
}
} finally {
singleResponse.forEach(ReferenceCounted::release);
}
}
}
@ -113,8 +122,12 @@ public class Netty4HttpRequestSizeLimitIT extends ESNetty4IntegTestCase {
try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) {
Collection<FullHttpResponse> responses = nettyHttpClient.put(transportAddress.address(), requestUris);
try {
assertThat(responses, hasSize(requestUris.length));
assertAllInExpectedStatus(responses, HttpResponseStatus.OK);
} finally {
responses.forEach(ReferenceCounted::release);
}
}
}

View File

@ -29,6 +29,7 @@ import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.util.ReferenceCounted;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
@ -98,8 +99,12 @@ public class Netty4HttpServerPipeliningTests extends ESTestCase {
try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) {
Collection<FullHttpResponse> responses = nettyHttpClient.get(transportAddress.address(), requests.toArray(new String[]{}));
try {
Collection<String> responseBodies = Netty4HttpClient.returnHttpResponseBodies(responses);
assertThat(responseBodies, contains(requests.toArray()));
} finally {
responses.forEach(ReferenceCounted::release);
}
}
}
}

View File

@ -207,14 +207,23 @@ public class Netty4HttpServerTransportTests extends ESTestCase {
HttpUtil.setContentLength(request, contentLength);
final FullHttpResponse response = client.post(remoteAddress.address(), request);
try {
assertThat(response.status(), equalTo(expectedStatus));
if (expectedStatus.equals(HttpResponseStatus.CONTINUE)) {
final FullHttpRequest continuationRequest =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/", Unpooled.EMPTY_BUFFER);
final FullHttpResponse continuationResponse = client.post(remoteAddress.address(), continuationRequest);
try {
assertThat(continuationResponse.status(), is(HttpResponseStatus.OK));
assertThat(new String(ByteBufUtil.getBytes(continuationResponse.content()), StandardCharsets.UTF_8), is("done"));
assertThat(
new String(ByteBufUtil.getBytes(continuationResponse.content()), StandardCharsets.UTF_8), is("done")
);
} finally {
continuationResponse.release();
}
}
} finally {
response.release();
}
}
}
@ -280,10 +289,14 @@ public class Netty4HttpServerTransportTests extends ESTestCase {
final FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, url);
final FullHttpResponse response = client.post(remoteAddress.address(), request);
try {
assertThat(response.status(), equalTo(HttpResponseStatus.BAD_REQUEST));
assertThat(
new String(response.content().array(), Charset.forName("UTF-8")),
containsString("you sent a bad request and you should feel bad"));
} finally {
response.release();
}
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.http.netty4;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.util.ReferenceCounted;
import org.elasticsearch.ESNetty4IntegTestCase;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.http.HttpServerTransport;
@ -45,14 +46,18 @@ public class Netty4PipeliningIT extends ESNetty4IntegTestCase {
HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class);
TransportAddress[] boundAddresses = httpServerTransport.boundAddress().boundAddresses();
TransportAddress transportAddress = (TransportAddress) randomFrom(boundAddresses);
TransportAddress transportAddress = randomFrom(boundAddresses);
try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) {
Collection<FullHttpResponse> responses = nettyHttpClient.get(transportAddress.address(), requests);
try {
assertThat(responses, hasSize(5));
Collection<String> opaqueIds = Netty4HttpClient.returnOpaqueIds(responses);
assertOpaqueIdsInOrder(opaqueIds);
} finally {
responses.forEach(ReferenceCounted::release);
}
}
}

View File

@ -29,13 +29,13 @@ dependencies {
compile "org.elasticsearch:elasticsearch-nio:${version}"
// network stack
compile "io.netty:netty-buffer:4.1.16.Final"
compile "io.netty:netty-codec:4.1.16.Final"
compile "io.netty:netty-codec-http:4.1.16.Final"
compile "io.netty:netty-common:4.1.16.Final"
compile "io.netty:netty-handler:4.1.16.Final"
compile "io.netty:netty-resolver:4.1.16.Final"
compile "io.netty:netty-transport:4.1.16.Final"
compile "io.netty:netty-buffer:4.1.28.Final"
compile "io.netty:netty-codec:4.1.28.Final"
compile "io.netty:netty-codec-http:4.1.28.Final"
compile "io.netty:netty-common:4.1.28.Final"
compile "io.netty:netty-handler:4.1.28.Final"
compile "io.netty:netty-resolver:4.1.28.Final"
compile "io.netty:netty-transport:4.1.28.Final"
}
dependencyLicenses {
@ -113,7 +113,6 @@ thirdPartyAudit.excludes = [
'net.jpountz.xxhash.StreamingXXHash32',
'net.jpountz.xxhash.XXHashFactory',
'io.netty.internal.tcnative.CertificateRequestedCallback',
'io.netty.internal.tcnative.CertificateRequestedCallback$KeyMaterial',
'io.netty.internal.tcnative.CertificateVerifier',
'io.netty.internal.tcnative.SessionTicketKey',
'io.netty.internal.tcnative.SniHostNameMatcher',
@ -140,6 +139,6 @@ thirdPartyAudit.excludes = [
'org.conscrypt.AllocatedBuffer',
'org.conscrypt.BufferAllocator',
'org.conscrypt.Conscrypt$Engines',
'org.conscrypt.Conscrypt',
'org.conscrypt.HandshakeListener'
]

View File

@ -1 +0,0 @@
63b5fa95c74785e16f2c30ce268bc222e35c8cb5

View File

@ -0,0 +1 @@
d6c2d13492778009d33f60e05ed90bcb535d1fd1

View File

@ -1 +0,0 @@
d84a1f21768b7309c2954521cf5a1f46c2309eb1

View File

@ -0,0 +1 @@
a38361d893900947524f8a9da980555950e73d6a

View File

@ -1 +0,0 @@
d64312378b438dfdad84267c599a053327c6f02a

View File

@ -0,0 +1 @@
897100c1022c780b0a436b9349e507e8fa9800dc

View File

@ -1 +0,0 @@
177a6b30cca92f6f5f9873c9befd681377a4c328

View File

@ -0,0 +1 @@
df69ce8bb9b544a71e7bbee290253cf7c93e6bad

View File

@ -1 +0,0 @@
fec0e63e7dd7f4eeef7ea8dc47a1ff32dfc7ebc2

View File

@ -0,0 +1 @@
a035784682da0126bc25f10713dac732b5082a6d

View File

@ -1 +0,0 @@
f6eb553b53fb3a90a8ac1170697093fed82eae28

View File

@ -0,0 +1 @@
f33557dcb31fa20da075ac05e4808115e32ef9b7

View File

@ -1 +0,0 @@
3c8ee2c4d4a1cbb947a5c184c7aeb2204260958b

View File

@ -0,0 +1 @@
d2ef28f49d726737f0ffe84bf66529b3bf6e0c0d

View File

@ -26,4 +26,6 @@ grant codeBase "${codebase.netty-common}" {
// This should only currently be required as we use the netty http client for tests
// netty makes and accepts socket connections
permission java.net.SocketPermission "*", "accept,connect";
// Netty sets custom classloader for some of its internal threads
permission java.lang.RuntimePermission "*", "setContextClassLoader";
};

View File

@ -198,14 +198,23 @@ public class NioHttpServerTransportTests extends ESTestCase {
HttpUtil.setContentLength(request, contentLength);
final FullHttpResponse response = client.post(remoteAddress.address(), request);
try {
assertThat(response.status(), equalTo(expectedStatus));
if (expectedStatus.equals(HttpResponseStatus.CONTINUE)) {
final FullHttpRequest continuationRequest =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/", Unpooled.EMPTY_BUFFER);
final FullHttpResponse continuationResponse = client.post(remoteAddress.address(), continuationRequest);
try {
assertThat(continuationResponse.status(), is(HttpResponseStatus.OK));
assertThat(new String(ByteBufUtil.getBytes(continuationResponse.content()), StandardCharsets.UTF_8), is("done"));
assertThat(
new String(ByteBufUtil.getBytes(continuationResponse.content()), StandardCharsets.UTF_8), is("done")
);
} finally {
continuationResponse.release();
}
}
} finally {
response.release();
}
}
}
@ -271,10 +280,14 @@ public class NioHttpServerTransportTests extends ESTestCase {
final FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, url);
final FullHttpResponse response = client.post(remoteAddress.address(), request);
try {
assertThat(response.status(), equalTo(HttpResponseStatus.BAD_REQUEST));
assertThat(
new String(response.content().array(), Charset.forName("UTF-8")),
containsString("you sent a bad request and you should feel bad"));
} finally {
response.release();
}
}
}

View File

@ -360,6 +360,7 @@ public class EvilLoggerTests extends ESTestCase {
}
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32546")
public void testNoNodeNameWarning() throws IOException, UserException {
setupLogging("no_node_name");

View File

@ -23,7 +23,6 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
@ -34,13 +33,7 @@ public abstract class AbstractComponent {
protected final Settings settings;
public AbstractComponent(Settings settings) {
this.logger = Loggers.getLogger(getClass(), settings);
this.deprecationLogger = new DeprecationLogger(logger);
this.settings = settings;
}
public AbstractComponent(Settings settings, Class<?> customClass) {
this.logger = LogManager.getLogger(customClass);
this.logger = LogManager.getLogger(getClass());
this.deprecationLogger = new DeprecationLogger(logger);
this.settings = settings;
}

View File

@ -35,10 +35,6 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple
super(settings);
}
protected AbstractLifecycleComponent(Settings settings, Class<?> customClass) {
super(settings, customClass);
}
@Override
public Lifecycle.State lifecycleState() {
return this.lifecycle.state();

View File

@ -38,9 +38,13 @@ public final class ESLoggerFactory {
public static Logger getLogger(String prefix, Class<?> clazz) {
/*
* Do not use LogManager#getLogger(Class) as this now uses Class#getCanonicalName under the hood; as this returns null for local and
* anonymous classes, any place we create, for example, an abstract component defined as an anonymous class (e.g., in tests) will
* result in a logger with a null name which will blow up in a lookup inside of Log4j.
* At one point we didn't use LogManager.getLogger(clazz) because
* of a bug in log4j that has since been fixed:
* https://github.com/apache/logging-log4j2/commit/ae33698a1846a5e10684ec3e52a99223f06047af
*
* For now we continue to use LogManager.getLogger(clazz.getName())
* because we expect to eventually migrate away from needing this
* method entirely.
*/
return getLogger(prefix, LogManager.getLogger(clazz.getName()));
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
@ -29,18 +28,23 @@ import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime;
import org.joda.time.ReadableDateTime;
import java.io.IOException;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.AbstractList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.UnaryOperator;
import static org.elasticsearch.common.Booleans.parseBoolean;
/**
* Script level doc values, the assumption is that any implementation will
@ -52,6 +56,7 @@ import java.util.function.UnaryOperator;
* values form multiple documents.
*/
public abstract class ScriptDocValues<T> extends AbstractList<T> {
/**
* Set the current doc ID.
*/
@ -142,31 +147,55 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
}
}
public static final class Dates extends ScriptDocValues<ReadableDateTime> {
protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Dates.class));
public static final class Dates extends ScriptDocValues<Object> {
private static final ReadableDateTime EPOCH = new DateTime(0, DateTimeZone.UTC);
/** Whether scripts should expose dates as java time objects instead of joda time. */
private static final boolean USE_JAVA_TIME = parseBoolean(System.getProperty("es.scripting.use_java_time"), false);
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Dates.class));
private final SortedNumericDocValues in;
/**
* Values wrapped in {@link MutableDateTime}. Null by default an allocated on first usage so we allocate a reasonably size. We keep
* this array so we don't have allocate new {@link MutableDateTime}s on every usage. Instead we reuse them for every document.
* Method call to add deprecation message. Normally this is
* {@link #deprecationLogger} but tests override.
*/
private MutableDateTime[] dates;
private final Consumer<String> deprecationCallback;
/**
* Whether java time or joda time should be used. This is normally {@link #USE_JAVA_TIME} but tests override it.
*/
private final boolean useJavaTime;
/**
* Values wrapped in a date time object. The concrete type depends on the system property {@code es.scripting.use_java_time}.
* When that system property is {@code false}, the date time objects are of type {@link MutableDateTime}. When the system
* property is {@code true}, the date time objects are of type {@link java.time.ZonedDateTime}.
*/
private Object[] dates;
private int count;
/**
* Standard constructor.
*/
public Dates(SortedNumericDocValues in) {
this(in, message -> deprecationLogger.deprecatedAndMaybeLog("scripting_joda_time_deprecation", message), USE_JAVA_TIME);
}
/**
* Constructor for testing with a deprecation callback.
*/
Dates(SortedNumericDocValues in, Consumer<String> deprecationCallback, boolean useJavaTime) {
this.in = in;
this.deprecationCallback = deprecationCallback;
this.useJavaTime = useJavaTime;
}
/**
* Fetch the first field value or 0 millis after epoch if there are no
* in.
*/
public ReadableDateTime getValue() {
public Object getValue() {
if (count == 0) {
throw new IllegalStateException("A document doesn't have a value for a field! " +
"Use doc[<field>].size()==0 to check if a document is missing a field!");
@ -175,7 +204,7 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
}
@Override
public ReadableDateTime get(int index) {
public Object get(int index) {
if (index >= count) {
throw new IndexOutOfBoundsException(
"attempted to fetch the [" + index + "] date when there are only ["
@ -206,26 +235,20 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
if (count == 0) {
return;
}
if (dates == null) {
if (useJavaTime) {
if (dates == null || count > dates.length) {
// Happens for the document. We delay allocating dates so we can allocate it with a reasonable size.
dates = new ZonedDateTime[count];
}
for (int i = 0; i < count; ++i) {
dates[i] = ZonedDateTime.ofInstant(Instant.ofEpochMilli(in.nextValue()), ZoneOffset.UTC);
}
} else {
deprecated("The joda time api for doc values is deprecated. Use -Des.scripting.use_java_time=true" +
" to use the java time api for date field doc values");
if (dates == null || count > dates.length) {
// Happens for the document. We delay allocating dates so we can allocate it with a reasonable size.
dates = new MutableDateTime[count];
for (int i = 0; i < dates.length; i++) {
dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC);
}
return;
}
if (count > dates.length) {
// Happens when we move to a new document and it has more dates than any documents before it.
MutableDateTime[] backup = dates;
dates = new MutableDateTime[count];
System.arraycopy(backup, 0, dates, 0, backup.length);
for (int i = 0; i < backup.length; i++) {
dates[i].setMillis(in.nextValue());
}
for (int i = backup.length; i < dates.length; i++) {
dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC);
}
return;
}
for (int i = 0; i < count; i++) {
dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC);
@ -233,6 +256,23 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
}
}
/**
* Log a deprecation log, with the server's permissions, not the permissions of the
* script calling this method. We need to do this to prevent errors when rolling
* the log file.
*/
private void deprecated(String message) {
// Intentionally not calling SpecialPermission.check because this is supposed to be called by scripts
AccessController.doPrivileged(new PrivilegedAction<Void>() {
@Override
public Void run() {
deprecationCallback.accept(message);
return null;
}
});
}
}
public static final class Doubles extends ScriptDocValues<Double> {
private final SortedNumericDoubleValues in;

View File

@ -19,6 +19,11 @@
package org.elasticsearch.script;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@ -27,12 +32,6 @@ import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript;
/**
* Manages building {@link ScriptService}.
*/

View File

@ -27,6 +27,7 @@ import org.joda.time.ReadableInstant;
import java.io.IOException;
import java.lang.reflect.Array;
import java.time.ZonedDateTime;
import java.util.Collection;
/**
@ -54,6 +55,9 @@ public class ScriptDoubleValues extends SortingNumericDoubleValues implements Sc
} else if (value instanceof ReadableInstant) {
resize(1);
values[0] = ((ReadableInstant) value).getMillis();
} else if (value instanceof ZonedDateTime) {
resize(1);
values[0] = ((ZonedDateTime) value).toInstant().toEpochMilli();
} else if (value.getClass().isArray()) {
int length = Array.getLength(value);
if (length == 0) {
@ -89,6 +93,8 @@ public class ScriptDoubleValues extends SortingNumericDoubleValues implements Sc
} else if (o instanceof ReadableInstant) {
// Dates are exposed in scripts as ReadableDateTimes but aggregations want them to be numeric
return ((ReadableInstant) o).getMillis();
} else if (o instanceof ZonedDateTime) {
return ((ZonedDateTime) o).toInstant().toEpochMilli();
} else if (o instanceof Boolean) {
// We do expose boolean fields as boolean in scripts, however aggregations still expect
// that scripts return the same internal representation as regular fields, so boolean

View File

@ -28,6 +28,7 @@ import org.joda.time.ReadableInstant;
import java.io.IOException;
import java.lang.reflect.Array;
import java.time.ZonedDateTime;
import java.util.Collection;
import java.util.Iterator;
@ -91,6 +92,8 @@ public class ScriptLongValues extends AbstractSortingNumericDocValues implements
} else if (o instanceof ReadableInstant) {
// Dates are exposed in scripts as ReadableDateTimes but aggregations want them to be numeric
return ((ReadableInstant) o).getMillis();
} else if (o instanceof ZonedDateTime) {
return ((ZonedDateTime) o).toInstant().toEpochMilli();
} else if (o instanceof Boolean) {
// We do expose boolean fields as boolean in scripts, however aggregations still expect
// that scripts return the same internal representation as regular fields, so boolean

View File

@ -118,13 +118,13 @@ public class ClusterApplierServiceTests extends ESTestCase {
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"test1",
clusterApplierService.getClass().getName(),
clusterApplierService.getClass().getCanonicalName(),
Level.DEBUG,
"*processing [test1]: took [1s] no change in cluster state"));
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"test2",
clusterApplierService.getClass().getName(),
clusterApplierService.getClass().getCanonicalName(),
Level.TRACE,
"*failed to execute cluster state applier in [2s]*"));
@ -192,19 +192,19 @@ public class ClusterApplierServiceTests extends ESTestCase {
mockAppender.addExpectation(
new MockLogAppender.UnseenEventExpectation(
"test1 shouldn't see because setting is too low",
clusterApplierService.getClass().getName(),
clusterApplierService.getClass().getCanonicalName(),
Level.WARN,
"*cluster state applier task [test1] took [*] above the warn threshold of *"));
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"test2",
clusterApplierService.getClass().getName(),
clusterApplierService.getClass().getCanonicalName(),
Level.WARN,
"*cluster state applier task [test2] took [32s] above the warn threshold of *"));
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"test4",
clusterApplierService.getClass().getName(),
clusterApplierService.getClass().getCanonicalName(),
Level.WARN,
"*cluster state applier task [test3] took [34s] above the warn threshold of *"));

View File

@ -309,19 +309,19 @@ public class MasterServiceTests extends ESTestCase {
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"test1",
masterService.getClass().getName(),
masterService.getClass().getCanonicalName(),
Level.DEBUG,
"*processing [test1]: took [1s] no change in cluster state"));
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"test2",
masterService.getClass().getName(),
masterService.getClass().getCanonicalName(),
Level.TRACE,
"*failed to execute cluster state update in [2s]*"));
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"test3",
masterService.getClass().getName(),
masterService.getClass().getCanonicalName(),
Level.DEBUG,
"*processing [test3]: took [3s] done publishing updated cluster state (version: *, uuid: *)"));
@ -650,25 +650,25 @@ public class MasterServiceTests extends ESTestCase {
mockAppender.addExpectation(
new MockLogAppender.UnseenEventExpectation(
"test1 shouldn't see because setting is too low",
masterService.getClass().getName(),
masterService.getClass().getCanonicalName(),
Level.WARN,
"*cluster state update task [test1] took [*] above the warn threshold of *"));
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"test2",
masterService.getClass().getName(),
masterService.getClass().getCanonicalName(),
Level.WARN,
"*cluster state update task [test2] took [32s] above the warn threshold of *"));
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"test3",
masterService.getClass().getName(),
masterService.getClass().getCanonicalName(),
Level.WARN,
"*cluster state update task [test3] took [33s] above the warn threshold of *"));
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"test4",
masterService.getClass().getName(),
masterService.getClass().getCanonicalName(),
Level.WARN,
"*cluster state update task [test4] took [34s] above the warn threshold of *"));

View File

@ -23,29 +23,74 @@ import org.elasticsearch.index.fielddata.ScriptDocValues.Dates;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.ReadableDateTime;
import java.io.IOException;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.PermissionCollection;
import java.security.Permissions;
import java.security.PrivilegedAction;
import java.security.ProtectionDomain;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.HashSet;
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
import static org.hamcrest.Matchers.containsInAnyOrder;
public class ScriptDocValuesDatesTests extends ESTestCase {
public void test() throws IOException {
public void testJavaTime() throws IOException {
assertDateDocValues(true);
}
public void testJodaTimeBwc() throws IOException {
assertDateDocValues(false, "The joda time api for doc values is deprecated." +
" Use -Des.scripting.use_java_time=true to use the java time api for date field doc values");
}
public void assertDateDocValues(boolean useJavaTime, String... expectedWarnings) throws IOException {
final Function<Long, Object> datetimeCtor;
if (useJavaTime) {
datetimeCtor = millis -> ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC);
} else {
datetimeCtor = millis -> new DateTime(millis, DateTimeZone.UTC);
}
long[][] values = new long[between(3, 10)][];
ReadableDateTime[][] expectedDates = new ReadableDateTime[values.length][];
Object[][] expectedDates = new Object[values.length][];
for (int d = 0; d < values.length; d++) {
values[d] = new long[randomBoolean() ? randomBoolean() ? 0 : 1 : between(2, 100)];
expectedDates[d] = new ReadableDateTime[values[d].length];
expectedDates[d] = new Object[values[d].length];
for (int i = 0; i < values[d].length; i++) {
expectedDates[d][i] = new DateTime(randomNonNegativeLong(), DateTimeZone.UTC);
values[d][i] = expectedDates[d][i].getMillis();
values[d][i] = randomNonNegativeLong();
expectedDates[d][i] = datetimeCtor.apply(values[d][i]);
}
}
Dates dates = wrap(values);
Set<String> warnings = new HashSet<>();
Dates dates = wrap(values, deprecationMessage -> {
warnings.add(deprecationMessage);
/* Create a temporary directory to prove we are running with the
* server's permissions. */
createTempDir();
}, useJavaTime);
// each call to get or getValue will be run with limited permissions, just as they are in scripts
PermissionCollection noPermissions = new Permissions();
AccessControlContext noPermissionsAcc = new AccessControlContext(
new ProtectionDomain[] {
new ProtectionDomain(null, noPermissions)
}
);
for (int round = 0; round < 10; round++) {
int d = between(0, values.length - 1);
dates.setNextDocId(d);
if (expectedDates[d].length > 0) {
assertEquals(expectedDates[d][0] , dates.getValue());
Object dateValue = AccessController.doPrivileged((PrivilegedAction<Object>) dates::getValue, noPermissionsAcc);
assertEquals(expectedDates[d][0] , dateValue);
} else {
Exception e = expectThrows(IllegalStateException.class, () -> dates.getValue());
assertEquals("A document doesn't have a value for a field! " +
@ -54,15 +99,16 @@ public class ScriptDocValuesDatesTests extends ESTestCase {
assertEquals(values[d].length, dates.size());
for (int i = 0; i < values[d].length; i++) {
assertEquals(expectedDates[d][i], dates.get(i));
}
Exception e = expectThrows(UnsupportedOperationException.class, () -> dates.add(new DateTime()));
assertEquals("doc values are unmodifiable", e.getMessage());
final int ndx = i;
Object dateValue = AccessController.doPrivileged((PrivilegedAction<Object>) () -> dates.get(ndx), noPermissionsAcc);
assertEquals(expectedDates[d][i], dateValue);
}
}
private Dates wrap(long[][] values) {
assertThat(warnings, containsInAnyOrder(expectedWarnings));
}
private Dates wrap(long[][] values, Consumer<String> deprecationHandler, boolean useJavaTime) {
return new Dates(new AbstractSortedNumericDocValues() {
long[] current;
int i;
@ -81,6 +127,6 @@ public class ScriptDocValuesDatesTests extends ESTestCase {
public long nextValue() {
return current[i++];
}
});
}, deprecationHandler, useJavaTime);
}
}

View File

@ -47,15 +47,10 @@ import org.elasticsearch.search.lookup.FieldLookup;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.ReadableDateTime;
import org.joda.time.base.BaseDateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
@ -64,6 +59,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
@ -115,7 +111,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
scripts.put("doc['date'].date.millis", vars -> {
Map<?, ?> doc = (Map) vars.get("doc");
ScriptDocValues.Dates dates = (ScriptDocValues.Dates) doc.get("date");
return dates.getValue().getMillis();
return ((ZonedDateTime) dates.getValue()).toInstant().toEpochMilli();
});
scripts.put("_fields['num1'].value", vars -> fieldsScript(vars, "num1"));
@ -805,8 +801,8 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L));
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
BaseDateTime dateField = searchResponse.getHits().getAt(0).getFields().get("date_field").getValue();
assertThat(dateField.getMillis(), equalTo(date.toInstant().toEpochMilli()));
ZonedDateTime dateField = searchResponse.getHits().getAt(0).getFields().get("date_field").getValue();
assertThat(dateField.toInstant().toEpochMilli(), equalTo(date.toInstant().toEpochMilli()));
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
@ -946,10 +942,10 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertAcked(prepareCreate("test").addMapping("type", mapping));
ensureGreen("test");
DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC);
DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd");
ZonedDateTime date = ZonedDateTime.of(1990, 12, 29, 0, 0, 0, 0, ZoneOffset.UTC);
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd", Locale.ROOT);
index("test", "type", "1", "text_field", "foo", "date_field", formatter.print(date));
index("test", "type", "1", "text_field", "foo", "date_field", formatter.format(date));
refresh("test");
SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery())
@ -977,7 +973,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
DocumentField dateField = fields.get("date_field");
assertThat(dateField.getName(), equalTo("date_field"));
ReadableDateTime fetchedDate = dateField.getValue();
ZonedDateTime fetchedDate = dateField.getValue();
assertThat(fetchedDate, equalTo(date));
}

View File

@ -148,7 +148,7 @@ public abstract class ESRestTestCase extends ESTestCase {
}
String host = stringUrl.substring(0, portSeparator);
int port = Integer.valueOf(stringUrl.substring(portSeparator + 1));
hosts.add(new HttpHost(host, port, getProtocol()));
hosts.add(buildHttpHost(host, port));
}
clusterHosts = unmodifiableList(hosts);
logger.info("initializing REST clients against {}", clusterHosts);
@ -160,6 +160,13 @@ public abstract class ESRestTestCase extends ESTestCase {
assert clusterHosts != null;
}
/**
* Construct a HttpHost from the given host and port
*/
protected HttpHost buildHttpHost(String host, int port) {
return new HttpHost(host, port, getProtocol());
}
/**
* Clean up after the test case.
*/

View File

@ -16,7 +16,7 @@ Let us see how those can be configured by examples.
==== Specifying rule scope
Let us assume we are configuring a job in order to DNS data exfiltration.
Let us assume we are configuring a job in order to detect DNS data exfiltration.
Our data contain fields "subdomain" and "highest_registered_domain".
We can use a detector that looks like `high_info_content(subdomain) over highest_registered_domain`.
If we run such a job it is possible that we discover a lot of anomalies on
@ -25,8 +25,8 @@ are not interested in such anomalies. Ideally, we could instruct the detector to
skip results for domains that we consider safe. Using a rule with a scope allows
us to achieve this.
First, we need to create a list with our safe domains. Those lists are called
`filters` in {ml}. Filters can be shared across jobs.
First, we need to create a list of our safe domains. Those lists are called
_filters_ in {ml}. Filters can be shared across jobs.
We create our filter using the {ref}/ml-put-filter.html[put filter API]:
@ -40,8 +40,8 @@ PUT _xpack/ml/filters/safe_domains
----------------------------------
// CONSOLE
Now, we can create our job specifying a scope that uses the filter for the
`highest_registered_domain` field:
Now, we can create our job specifying a scope that uses the `safe_domains`
filter for the `highest_registered_domain` field:
[source,js]
----------------------------------
@ -85,7 +85,9 @@ POST _xpack/ml/filters/safe_domains/_update
// CONSOLE
// TEST[setup:ml_filter_safe_domains]
Note that we can provide scope for any of the partition/over/by fields.
Note that we can use any of the `partition_field_name`, `over_field_name`, or
`by_field_name` fields in the `scope`.
In the following example we scope multiple fields:
[source,js]
@ -210,9 +212,9 @@ Rules only affect results created after the rules were applied.
Let us imagine that we have configured a job and it has been running
for some time. After observing its results we decide that we can employ
rules in order to get rid of some uninteresting results. We can use
the update-job API to do so. However, the rule we added will only be in effect
for any results created from the moment we added the rule onwards. Past results
will remain unaffected.
the {ref}/ml-update-job.html[update job API] to do so. However, the rule we
added will only be in effect for any results created from the moment we added
the rule onwards. Past results will remain unaffected.
==== Using rules VS filtering data

View File

@ -23,6 +23,8 @@ import java.util.Map;
import java.util.Objects;
import java.util.Set;
import static java.util.Arrays.asList;
/**
* The configuration object for the groups section in the rollup config.
* Basically just a wrapper for histo/date histo/terms objects
@ -50,7 +52,7 @@ public class GroupConfig implements Writeable, ToXContentObject {
static {
PARSER.declareObject(GroupConfig.Builder::setDateHisto, (p,c) -> DateHistoGroupConfig.PARSER.apply(p,c).build(), DATE_HISTO);
PARSER.declareObject(GroupConfig.Builder::setHisto, (p,c) -> HistoGroupConfig.PARSER.apply(p,c).build(), HISTO);
PARSER.declareObject(GroupConfig.Builder::setTerms, (p,c) -> TermsGroupConfig.PARSER.apply(p,c).build(), TERMS);
PARSER.declareObject(GroupConfig.Builder::setTerms, (p,c) -> TermsGroupConfig.fromXContent(p), TERMS);
}
private GroupConfig(DateHistoGroupConfig dateHisto, @Nullable HistoGroupConfig histo, @Nullable TermsGroupConfig terms) {
@ -84,7 +86,7 @@ public class GroupConfig implements Writeable, ToXContentObject {
fields.addAll(histo.getAllFields());
}
if (terms != null) {
fields.addAll(terms.getAllFields());
fields.addAll(asList(terms.getFields()));
}
return fields;
}
@ -100,7 +102,6 @@ public class GroupConfig implements Writeable, ToXContentObject {
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
@ -113,9 +114,7 @@ public class GroupConfig implements Writeable, ToXContentObject {
builder.endObject();
}
if (terms != null) {
builder.startObject(TERMS.getPreferredName());
terms.toXContent(builder, params);
builder.endObject();
builder.field(TERMS.getPreferredName(), terms);
}
builder.endObject();
return builder;

View File

@ -12,9 +12,10 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
@ -24,13 +25,13 @@ import org.elasticsearch.xpack.core.rollup.RollupField;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* The configuration object for the histograms in the rollup config
*
@ -42,20 +43,29 @@ import java.util.stream.Collectors;
* ]
* }
*/
public class TermsGroupConfig implements Writeable, ToXContentFragment {
private static final String NAME = "term_group_config";
public static final ObjectParser<TermsGroupConfig.Builder, Void> PARSER = new ObjectParser<>(NAME, TermsGroupConfig.Builder::new);
public class TermsGroupConfig implements Writeable, ToXContentObject {
private static final String NAME = "terms";
private static final String FIELDS = "fields";
private static final ParseField FIELDS = new ParseField("fields");
private static final List<String> FLOAT_TYPES = Arrays.asList("half_float", "float", "double", "scaled_float");
private static final List<String> NATURAL_TYPES = Arrays.asList("byte", "short", "integer", "long");
private final String[] fields;
private static final ConstructingObjectParser<TermsGroupConfig, Void> PARSER;
static {
PARSER.declareStringArray(TermsGroupConfig.Builder::setFields, FIELDS);
PARSER = new ConstructingObjectParser<>(NAME, args -> {
@SuppressWarnings("unchecked") List<String> fields = (List<String>) args[0];
return new TermsGroupConfig(fields != null ? fields.toArray(new String[fields.size()]) : null);
});
PARSER.declareStringArray(constructorArg(), new ParseField(FIELDS));
}
private TermsGroupConfig(String[] fields) {
private final String[] fields;
public TermsGroupConfig(final String... fields) {
if (fields == null || fields.length == 0) {
throw new IllegalArgumentException("Fields must have at least one value");
}
this.fields = fields;
}
@ -63,6 +73,9 @@ public class TermsGroupConfig implements Writeable, ToXContentFragment {
fields = in.readStringArray();
}
/**
* @return the names of the fields. Never {@code null}.
*/
public String[] getFields() {
return fields;
}
@ -72,10 +85,6 @@ public class TermsGroupConfig implements Writeable, ToXContentFragment {
* set of date histograms. Used by the rollup indexer to iterate over historical data
*/
public List<CompositeValuesSourceBuilder<?>> toBuilders() {
if (fields.length == 0) {
return Collections.emptyList();
}
return Arrays.stream(fields).map(f -> {
TermsValuesSourceBuilder vsBuilder
= new TermsValuesSourceBuilder(RollupField.formatIndexerAggName(f, TermsAggregationBuilder.NAME));
@ -94,14 +103,6 @@ public class TermsGroupConfig implements Writeable, ToXContentFragment {
return map;
}
public Map<String, Object> getMetadata() {
return Collections.emptyMap();
}
public Set<String> getAllFields() {
return Arrays.stream(fields).collect(Collectors.toSet());
}
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
ActionRequestValidationException validationException) {
@ -138,8 +139,11 @@ public class TermsGroupConfig implements Writeable, ToXContentFragment {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(FIELDS.getPreferredName(), fields);
return builder;
builder.startObject();
{
builder.field(FIELDS, fields);
}
return builder.endObject();
}
@Override
@ -148,18 +152,15 @@ public class TermsGroupConfig implements Writeable, ToXContentFragment {
}
@Override
public boolean equals(Object other) {
public boolean equals(final Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
TermsGroupConfig that = (TermsGroupConfig) other;
return Arrays.equals(this.fields, that.fields);
final TermsGroupConfig that = (TermsGroupConfig) other;
return Arrays.equals(fields, that.fields);
}
@Override
@ -172,23 +173,7 @@ public class TermsGroupConfig implements Writeable, ToXContentFragment {
return Strings.toString(this, true, true);
}
public static class Builder {
private List<String> fields;
public List<String> getFields() {
return fields;
}
public TermsGroupConfig.Builder setFields(List<String> fields) {
this.fields = fields;
return this;
}
public TermsGroupConfig build() {
if (fields == null || fields.isEmpty()) {
throw new IllegalArgumentException("Parameter [" + FIELDS + "] must have at least one value.");
}
return new TermsGroupConfig(fields.toArray(new String[0]));
}
public static TermsGroupConfig fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}

View File

@ -15,6 +15,8 @@ grant {
grant codeBase "${codebase.netty-common}" {
// for reading the system-wide configuration for the backlog of established sockets
permission java.io.FilePermission "/proc/sys/net/core/somaxconn", "read";
// Netty sets custom classloader for some of its internal threads
permission java.lang.RuntimePermission "*", "setContextClassLoader";
};
grant codeBase "${codebase.netty-transport}" {

View File

@ -17,9 +17,13 @@ import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static com.carrotsearch.randomizedtesting.generators.RandomNumbers.randomIntBetween;
import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween;
public class ConfigTestHelpers {
public static RollupJobConfig.Builder getRollupJob(String jobId) {
@ -49,7 +53,7 @@ public class ConfigTestHelpers {
groupBuilder.setHisto(getHisto().build());
}
if (ESTestCase.randomBoolean()) {
groupBuilder.setTerms(getTerms().build());
groupBuilder.setTerms(randomTermsGroupConfig(ESTestCase.random()));
}
return groupBuilder;
}
@ -105,12 +109,6 @@ public class ConfigTestHelpers {
return histoBuilder;
}
public static TermsGroupConfig.Builder getTerms() {
TermsGroupConfig.Builder builder = new TermsGroupConfig.Builder();
builder.setFields(getFields());
return builder;
}
public static List<String> getFields() {
return IntStream.range(0, ESTestCase.randomIntBetween(1, 10))
.mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(5, 10))
@ -126,4 +124,21 @@ public class ConfigTestHelpers {
" ?" + //day of week
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1970, 2199))); //year
}
public static TermsGroupConfig randomTermsGroupConfig(final Random random) {
return new TermsGroupConfig(randomFields(random));
}
private static String[] randomFields(final Random random) {
final int numFields = randomIntBetween(random, 1, 10);
final String[] fields = new String[numFields];
for (int i = 0; i < numFields; i++) {
fields[i] = randomField(random);
}
return fields;
}
private static String randomField(final Random random) {
return randomAsciiAlphanumOfLengthBetween(random, 5, 10);
}
}

View File

@ -11,27 +11,23 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomTermsGroupConfig;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCase<TermsGroupConfig> {
private static final List<String> FLOAT_TYPES = Arrays.asList("half_float", "float", "double", "scaled_float");
private static final List<String> NATURAL_TYPES = Arrays.asList("byte", "short", "integer", "long");
@Override
protected TermsGroupConfig doParseInstance(XContentParser parser) throws IOException {
return TermsGroupConfig.PARSER.apply(parser, null).build();
return TermsGroupConfig.fromXContent(parser);
}
@Override
@ -41,23 +37,20 @@ public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCas
@Override
protected TermsGroupConfig createTestInstance() {
return ConfigTestHelpers.getTerms().build();
return randomTermsGroupConfig(random());
}
public void testValidateNoMapping() throws IOException {
public void testValidateNoMapping() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
TermsGroupConfig config = new TermsGroupConfig.Builder()
.setFields(Collections.singletonList("my_field"))
.build();
TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("Could not find a [numeric] or [keyword/text] field with name " +
"[my_field] in any of the indices matching the index pattern."));
}
public void testValidateNomatchingField() throws IOException {
public void testValidateNomatchingField() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
@ -65,16 +58,13 @@ public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCas
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
responseMap.put("some_other_field", Collections.singletonMap("keyword", fieldCaps));
TermsGroupConfig config = new TermsGroupConfig.Builder()
.setFields(Collections.singletonList("my_field"))
.build();
TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("Could not find a [numeric] or [keyword/text] field with name " +
"[my_field] in any of the indices matching the index pattern."));
}
public void testValidateFieldWrongType() throws IOException {
public void testValidateFieldWrongType() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
@ -82,17 +72,13 @@ public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCas
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
responseMap.put("my_field", Collections.singletonMap("geo_point", fieldCaps));
TermsGroupConfig config = new TermsGroupConfig.Builder()
.setFields(Collections.singletonList("my_field"))
.build();
TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("The field referenced by a terms group must be a [numeric] or " +
"[keyword/text] type, but found [geo_point] for field [my_field]"));
}
public void testValidateFieldMatchingNotAggregatable() throws IOException {
public void testValidateFieldMatchingNotAggregatable() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
@ -101,14 +87,12 @@ public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCas
when(fieldCaps.isAggregatable()).thenReturn(false);
responseMap.put("my_field", Collections.singletonMap(getRandomType(), fieldCaps));
TermsGroupConfig config = new TermsGroupConfig.Builder()
.setFields(Collections.singletonList("my_field"))
.build();
TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not."));
}
public void testValidateMatchingField() throws IOException {
public void testValidateMatchingField() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
String type = getRandomType();
@ -118,9 +102,7 @@ public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCas
when(fieldCaps.isAggregatable()).thenReturn(true);
responseMap.put("my_field", Collections.singletonMap(type, fieldCaps));
TermsGroupConfig config = new TermsGroupConfig.Builder()
.setFields(Collections.singletonList("my_field"))
.build();
TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
if (e.validationErrors().size() != 0) {
fail(e.getMessage());

View File

@ -270,7 +270,7 @@ public class AutodetectCommunicator implements Closeable {
}
@Nullable
FlushAcknowledgement waitFlushToCompletion(String flushId) {
FlushAcknowledgement waitFlushToCompletion(String flushId) throws InterruptedException {
LOGGER.debug("[{}] waiting for flush", job.getId());
FlushAcknowledgement flushAcknowledgement;

View File

@ -485,7 +485,7 @@ public class AutoDetectResultProcessor {
* @return The {@link FlushAcknowledgement} if the flush has completed or the parsing finished; {@code null} if the timeout expired
*/
@Nullable
public FlushAcknowledgement waitForFlushAcknowledgement(String flushId, Duration timeout) {
public FlushAcknowledgement waitForFlushAcknowledgement(String flushId, Duration timeout) throws InterruptedException {
return failed ? null : flushListener.waitForFlush(flushId, timeout);
}

View File

@ -22,19 +22,15 @@ class FlushListener {
final AtomicBoolean cleared = new AtomicBoolean(false);
@Nullable
FlushAcknowledgement waitForFlush(String flushId, Duration timeout) {
FlushAcknowledgement waitForFlush(String flushId, Duration timeout) throws InterruptedException {
if (cleared.get()) {
return null;
}
FlushAcknowledgementHolder holder = awaitingFlushed.computeIfAbsent(flushId, (key) -> new FlushAcknowledgementHolder(flushId));
try {
if (holder.latch.await(timeout.toMillis(), TimeUnit.MILLISECONDS)) {
return holder.flushAcknowledgement;
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
return null;
}

Some files were not shown because too many files have changed in this diff Show More