Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
a2abc86595
|
@ -131,6 +131,9 @@ class BuildPlugin implements Plugin<Project> {
|
|||
runtimeJavaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, runtimeJavaHome))
|
||||
}
|
||||
|
||||
String inFipsJvmScript = 'print(java.security.Security.getProviders()[0].name.toLowerCase().contains("fips"));'
|
||||
boolean inFipsJvm = Boolean.parseBoolean(runJavascript(project, runtimeJavaHome, inFipsJvmScript))
|
||||
|
||||
// Build debugging info
|
||||
println '======================================='
|
||||
println 'Elasticsearch Build Hamster says Hello!'
|
||||
|
@ -202,6 +205,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.rootProject.ext.buildChecksDone = true
|
||||
project.rootProject.ext.minimumCompilerVersion = minimumCompilerVersion
|
||||
project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion
|
||||
project.rootProject.ext.inFipsJvm = inFipsJvm
|
||||
}
|
||||
|
||||
project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion
|
||||
|
@ -213,6 +217,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.ext.compilerJavaVersion = project.rootProject.ext.compilerJavaVersion
|
||||
project.ext.runtimeJavaVersion = project.rootProject.ext.runtimeJavaVersion
|
||||
project.ext.javaVersions = project.rootProject.ext.javaVersions
|
||||
project.ext.inFipsJvm = project.rootProject.ext.inFipsJvm
|
||||
}
|
||||
|
||||
private static String findCompilerJavaHome() {
|
||||
|
@ -770,7 +775,11 @@ class BuildPlugin implements Plugin<Project> {
|
|||
systemProperty property.getKey(), property.getValue()
|
||||
}
|
||||
}
|
||||
|
||||
// Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM
|
||||
if (project.inFipsJvm) {
|
||||
systemProperty 'javax.net.ssl.trustStorePassword', 'password'
|
||||
systemProperty 'javax.net.ssl.keyStorePassword', 'password'
|
||||
}
|
||||
boolean assertionsEnabled = Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))
|
||||
enableSystemAssertions assertionsEnabled
|
||||
enableAssertions assertionsEnabled
|
||||
|
|
|
@ -80,7 +80,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
|||
RankEvalResponse response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync);
|
||||
// the expected Prec@ for the first query is 5/7 and the expected Prec@ for the second is 1/7, divided by 2 to get the average
|
||||
double expectedPrecision = (1.0 / 7.0 + 5.0 / 7.0) / 2.0;
|
||||
assertEquals(expectedPrecision, response.getEvaluationResult(), Double.MIN_VALUE);
|
||||
assertEquals(expectedPrecision, response.getMetricScore(), Double.MIN_VALUE);
|
||||
Map<String, EvalQueryQuality> partialResults = response.getPartialResults();
|
||||
assertEquals(2, partialResults.size());
|
||||
EvalQueryQuality amsterdamQueryQuality = partialResults.get("amsterdam_query");
|
||||
|
|
|
@ -1136,14 +1136,14 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// end::rank-eval-execute
|
||||
|
||||
// tag::rank-eval-response
|
||||
double evaluationResult = response.getEvaluationResult(); // <1>
|
||||
double evaluationResult = response.getMetricScore(); // <1>
|
||||
assertEquals(1.0 / 3.0, evaluationResult, 0.0);
|
||||
Map<String, EvalQueryQuality> partialResults =
|
||||
response.getPartialResults();
|
||||
EvalQueryQuality evalQuality =
|
||||
partialResults.get("kimchy_query"); // <2>
|
||||
assertEquals("kimchy_query", evalQuality.getId());
|
||||
double qualityLevel = evalQuality.getQualityLevel(); // <3>
|
||||
double qualityLevel = evalQuality.metricScore(); // <3>
|
||||
assertEquals(1.0 / 3.0, qualityLevel, 0.0);
|
||||
List<RatedSearchHit> hitsAndRatings = evalQuality.getHitsAndRatings();
|
||||
RatedSearchHit ratedSearchHit = hitsAndRatings.get(2);
|
||||
|
|
|
@ -125,27 +125,18 @@ Closure commonPackageConfig(String type, boolean oss) {
|
|||
fileMode 0644
|
||||
}
|
||||
into('lib') {
|
||||
with copySpec {
|
||||
with libFiles(oss)
|
||||
// we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine
|
||||
eachFile { FileCopyDetails fcp ->
|
||||
String[] segments = fcp.relativePath.segments
|
||||
for (int i = segments.length - 2; i > 0 && segments[i] != 'lib'; --i) {
|
||||
directory('/' + segments[0..i].join('/'), 0755)
|
||||
}
|
||||
fcp.mode = 0644
|
||||
}
|
||||
}
|
||||
}
|
||||
into('modules') {
|
||||
with copySpec {
|
||||
with modulesFiles(oss)
|
||||
// we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine
|
||||
}
|
||||
// we need to specify every intermediate directory in these paths so the package managers know they are explicitly
|
||||
// intended to manage them; otherwise they may be left behind on uninstallation. duplicate calls of the same
|
||||
// directory are fine
|
||||
eachFile { FileCopyDetails fcp ->
|
||||
String[] segments = fcp.relativePath.segments
|
||||
for (int i = segments.length - 2; i > 0 && segments[i] != 'modules'; --i) {
|
||||
for (int i = segments.length - 2; i > 2; --i) {
|
||||
directory('/' + segments[0..i].join('/'), 0755)
|
||||
}
|
||||
if (segments[-2] == 'bin') {
|
||||
fcp.mode = 0755
|
||||
} else {
|
||||
|
@ -154,7 +145,6 @@ Closure commonPackageConfig(String type, boolean oss) {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// license files
|
||||
if (type == 'deb') {
|
||||
|
@ -333,12 +323,6 @@ Closure commonRpmConfig(boolean oss) {
|
|||
|
||||
// without this the rpm will have parent dirs of any files we copy in, eg /etc/elasticsearch
|
||||
addParentDirs false
|
||||
|
||||
// Declare the folders so that the RPM package manager removes
|
||||
// them when upgrading or removing the package
|
||||
directory('/usr/share/elasticsearch/bin', 0755)
|
||||
directory('/usr/share/elasticsearch/lib', 0755)
|
||||
directory('/usr/share/elasticsearch/modules', 0755)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -379,9 +379,9 @@ buildRestTests.setups['exams'] = '''
|
|||
refresh: true
|
||||
body: |
|
||||
{"index":{}}
|
||||
{"grade": 100}
|
||||
{"grade": 100, "weight": 2}
|
||||
{"index":{}}
|
||||
{"grade": 50}'''
|
||||
{"grade": 50, "weight": 3}'''
|
||||
|
||||
buildRestTests.setups['stored_example_script'] = '''
|
||||
# Simple script to load a field. Not really a good example, but a simple one.
|
||||
|
|
|
@ -17,15 +17,15 @@ The `phonetic` token filter takes the following settings:
|
|||
`encoder`::
|
||||
|
||||
Which phonetic encoder to use. Accepts `metaphone` (default),
|
||||
`doublemetaphone`, `soundex`, `refinedsoundex`, `caverphone1`,
|
||||
`double_metaphone`, `soundex`, `refined_soundex`, `caverphone1`,
|
||||
`caverphone2`, `cologne`, `nysiis`, `koelnerphonetik`, `haasephonetik`,
|
||||
`beidermorse`, `daitch_mokotoff`.
|
||||
`beider_morse`, `daitch_mokotoff`.
|
||||
|
||||
`replace`::
|
||||
|
||||
Whether or not the original token should be replaced by the phonetic
|
||||
token. Accepts `true` (default) and `false`. Not supported by
|
||||
`beidermorse` encoding.
|
||||
`beider_morse` encoding.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -13,6 +13,8 @@ bucket aggregations (some bucket aggregations enable you to sort the returned bu
|
|||
|
||||
include::metrics/avg-aggregation.asciidoc[]
|
||||
|
||||
include::metrics/weighted-avg-aggregation.asciidoc[]
|
||||
|
||||
include::metrics/cardinality-aggregation.asciidoc[]
|
||||
|
||||
include::metrics/extendedstats-aggregation.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,202 @@
|
|||
[[search-aggregations-metrics-weight-avg-aggregation]]
|
||||
=== Weighted Avg Aggregation
|
||||
|
||||
A `single-value` metrics aggregation that computes the weighted average of numeric values that are extracted from the aggregated documents.
|
||||
These values can be extracted either from specific numeric fields in the documents.
|
||||
|
||||
When calculating a regular average, each datapoint has an equal "weight" ... it contributes equally to the final value. Weighted averages,
|
||||
on the other hand, weight each datapoint differently. The amount that each datapoint contributes to the final value is extracted from the
|
||||
document, or provided by a script.
|
||||
|
||||
As a formula, a weighted average is the `∑(value * weight) / ∑(weight)`
|
||||
|
||||
A regular average can be thought of as a weighted average where every value has an implicit weight of `1`.
|
||||
|
||||
.`weighted_avg` Parameters
|
||||
|===
|
||||
|Parameter Name |Description |Required |Default Value
|
||||
|`value` | The configuration for the field or script that provides the values |Required |
|
||||
|`weight` | The configuration for the field or script that provides the weights |Required |
|
||||
|`format` | The numeric response formatter |Optional |
|
||||
|`value_type` | A hint about the values for pure scripts or unmapped fields |Optional |
|
||||
|===
|
||||
|
||||
The `value` and `weight` objects have per-field specific configuration:
|
||||
|
||||
.`value` Parameters
|
||||
|===
|
||||
|Parameter Name |Description |Required |Default Value
|
||||
|`field` | The field that values should be extracted from |Required |
|
||||
|`missing` | A value to use if the field is missing entirely |Optional |
|
||||
|`script` | A script which provides the values for the document. This is mutually exclusive with `field` |Optional
|
||||
|===
|
||||
|
||||
.`weight` Parameters
|
||||
|===
|
||||
|Parameter Name |Description |Required |Default Value
|
||||
|`field` | The field that weights should be extracted from |Required |
|
||||
|`missing` | A weight to use if the field is missing entirely |Optional |
|
||||
|`script` | A script which provides the weights for the document. This is mutually exclusive with `field` |Optional
|
||||
|===
|
||||
|
||||
|
||||
==== Examples
|
||||
|
||||
If our documents have a `"grade"` field that holds a 0-100 numeric score, and a `"weight"` field which holds an arbitrary numeric weight,
|
||||
we can calculate the weighted average using:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST /exams/_search
|
||||
{
|
||||
"size": 0,
|
||||
"aggs" : {
|
||||
"weighted_grade": {
|
||||
"weighted_avg": {
|
||||
"value": {
|
||||
"field": "grade"
|
||||
},
|
||||
"weight": {
|
||||
"field": "weight"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:exams]
|
||||
|
||||
Which yields a response like:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...
|
||||
"aggregations": {
|
||||
"weighted_grade": {
|
||||
"value": 70.0
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/]
|
||||
|
||||
|
||||
While multiple values-per-field are allowed, only one weight is allowed. If the aggregation encounters
|
||||
a document that has more than one weight (e.g. the weight field is a multi-valued field) it will throw an exception.
|
||||
If you have this situation, you will need to specify a `script` for the weight field, and use the script
|
||||
to combine the multiple values into a single value to be used.
|
||||
|
||||
This single weight will be applied independently to each value extracted from the `value` field.
|
||||
|
||||
This example show how a single document with multiple values will be averaged with a single weight:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST /exams/_doc?refresh
|
||||
{
|
||||
"grade": [1, 2, 3],
|
||||
"weight": 2
|
||||
}
|
||||
|
||||
POST /exams/_search
|
||||
{
|
||||
"size": 0,
|
||||
"aggs" : {
|
||||
"weighted_grade": {
|
||||
"weighted_avg": {
|
||||
"value": {
|
||||
"field": "grade"
|
||||
},
|
||||
"weight": {
|
||||
"field": "weight"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST
|
||||
|
||||
The three values (`1`, `2`, and `3`) will be included as independent values, all with the weight of `2`:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...
|
||||
"aggregations": {
|
||||
"weighted_grade": {
|
||||
"value": 2.0
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/]
|
||||
|
||||
The aggregation returns `2.0` as the result, which matches what we would expect when calculating by hand:
|
||||
`((1*2) + (2*2) + (3*2)) / (2+2+2) == 2`
|
||||
|
||||
==== Script
|
||||
|
||||
Both the value and the weight can be derived from a script, instead of a field. As a simple example, the following
|
||||
will add one to the grade and weight in the document using a script:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST /exams/_search
|
||||
{
|
||||
"size": 0,
|
||||
"aggs" : {
|
||||
"weighted_grade": {
|
||||
"weighted_avg": {
|
||||
"value": {
|
||||
"script": "doc.grade.value + 1"
|
||||
},
|
||||
"weight": {
|
||||
"script": "doc.weight.value + 1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:exams]
|
||||
|
||||
|
||||
==== Missing values
|
||||
|
||||
The `missing` parameter defines how documents that are missing a value should be treated.
|
||||
The default behavior is different for `value` and `weight`:
|
||||
|
||||
By default, if the `value` field is missing the document is ignored and the aggregation moves on to the next document.
|
||||
If the `weight` field is missing, it is assumed to have a weight of `1` (like a normal average).
|
||||
|
||||
Both of these defaults can be overridden with the `missing` parameter:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST /exams/_search
|
||||
{
|
||||
"size": 0,
|
||||
"aggs" : {
|
||||
"weighted_grade": {
|
||||
"weighted_avg": {
|
||||
"value": {
|
||||
"field": "grade",
|
||||
"missing": 2
|
||||
},
|
||||
"weight": {
|
||||
"field": "weight",
|
||||
"missing": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:exams]
|
||||
|
|
@ -270,10 +270,10 @@ that shows potential errors of individual queries. The response has the followin
|
|||
--------------------------------
|
||||
{
|
||||
"rank_eval": {
|
||||
"quality_level": 0.4, <1>
|
||||
"metric_score": 0.4, <1>
|
||||
"details": {
|
||||
"my_query_id1": { <2>
|
||||
"quality_level": 0.6, <3>
|
||||
"metric_score": 0.6, <3>
|
||||
"unrated_docs": [ <4>
|
||||
{
|
||||
"_index": "my_index",
|
||||
|
@ -308,7 +308,7 @@ that shows potential errors of individual queries. The response has the followin
|
|||
|
||||
<1> the overall evaluation quality calculated by the defined metric
|
||||
<2> the `details` section contains one entry for every query in the original `requests` section, keyed by the search request id
|
||||
<3> the `quality_level` in the `details` section shows the contribution of this query to the global quality score
|
||||
<3> the `metric_score` in the `details` section shows the contribution of this query to the global quality metric score
|
||||
<4> the `unrated_docs` section contains an `_index` and `_id` entry for each document in the search result for this
|
||||
query that didn't have a ratings value. This can be used to ask the user to supply ratings for these documents
|
||||
<5> the `hits` section shows a grouping of the search results with their supplied rating
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.search.MultiValueMode;
|
|||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ArrayValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
|
@ -38,7 +38,7 @@ import java.io.IOException;
|
|||
import java.util.Map;
|
||||
|
||||
public class MatrixStatsAggregationBuilder
|
||||
extends MultiValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MatrixStatsAggregationBuilder> {
|
||||
extends ArrayValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MatrixStatsAggregationBuilder> {
|
||||
public static final String NAME = "matrix_stats";
|
||||
|
||||
private MultiValueMode multiValueMode = MultiValueMode.AVG;
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
|||
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
|
||||
import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSource.NumericMultiValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ArrayValuesSource.NumericArrayValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -43,7 +43,7 @@ import java.util.Map;
|
|||
**/
|
||||
final class MatrixStatsAggregator extends MetricsAggregator {
|
||||
/** Multiple ValuesSource with field names */
|
||||
private final NumericMultiValuesSource valuesSources;
|
||||
private final NumericArrayValuesSource valuesSources;
|
||||
|
||||
/** array of descriptive stats, per shard, needed to compute the correlation */
|
||||
ObjectArray<RunningStats> stats;
|
||||
|
@ -53,7 +53,7 @@ final class MatrixStatsAggregator extends MetricsAggregator {
|
|||
Map<String,Object> metaData) throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
if (valuesSources != null && !valuesSources.isEmpty()) {
|
||||
this.valuesSources = new NumericMultiValuesSource(valuesSources, multiValueMode);
|
||||
this.valuesSources = new NumericArrayValuesSource(valuesSources, multiValueMode);
|
||||
stats = context.bigArrays().newObjectArray(1);
|
||||
} else {
|
||||
this.valuesSources = null;
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.Aggregator;
|
|||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ArrayValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -33,7 +33,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
final class MatrixStatsAggregatorFactory
|
||||
extends MultiValuesSourceAggregatorFactory<ValuesSource.Numeric, MatrixStatsAggregatorFactory> {
|
||||
extends ArrayValuesSourceAggregatorFactory<ValuesSource.Numeric, MatrixStatsAggregatorFactory> {
|
||||
|
||||
private final MultiValueMode multiValueMode;
|
||||
|
||||
|
|
|
@ -21,14 +21,14 @@ package org.elasticsearch.search.aggregations.matrix.stats;
|
|||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.ArrayValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregationBuilder.MULTIVALUE_MODE_FIELD;
|
||||
import static org.elasticsearch.search.aggregations.support.ArrayValuesSourceAggregationBuilder.MULTIVALUE_MODE_FIELD;
|
||||
|
||||
public class MatrixStatsParser extends NumericValuesSourceParser {
|
||||
|
||||
|
|
|
@ -28,13 +28,13 @@ import java.util.Map;
|
|||
/**
|
||||
* Class to encapsulate a set of ValuesSource objects labeled by field name
|
||||
*/
|
||||
public abstract class MultiValuesSource <VS extends ValuesSource> {
|
||||
public abstract class ArrayValuesSource<VS extends ValuesSource> {
|
||||
protected MultiValueMode multiValueMode;
|
||||
protected String[] names;
|
||||
protected VS[] values;
|
||||
|
||||
public static class NumericMultiValuesSource extends MultiValuesSource<ValuesSource.Numeric> {
|
||||
public NumericMultiValuesSource(Map<String, ValuesSource.Numeric> valuesSources, MultiValueMode multiValueMode) {
|
||||
public static class NumericArrayValuesSource extends ArrayValuesSource<ValuesSource.Numeric> {
|
||||
public NumericArrayValuesSource(Map<String, ValuesSource.Numeric> valuesSources, MultiValueMode multiValueMode) {
|
||||
super(valuesSources, multiValueMode);
|
||||
if (valuesSources != null) {
|
||||
this.values = valuesSources.values().toArray(new ValuesSource.Numeric[0]);
|
||||
|
@ -51,8 +51,8 @@ public abstract class MultiValuesSource <VS extends ValuesSource> {
|
|||
}
|
||||
}
|
||||
|
||||
public static class BytesMultiValuesSource extends MultiValuesSource<ValuesSource.Bytes> {
|
||||
public BytesMultiValuesSource(Map<String, ValuesSource.Bytes> valuesSources, MultiValueMode multiValueMode) {
|
||||
public static class BytesArrayValuesSource extends ArrayValuesSource<ValuesSource.Bytes> {
|
||||
public BytesArrayValuesSource(Map<String, ValuesSource.Bytes> valuesSources, MultiValueMode multiValueMode) {
|
||||
super(valuesSources, multiValueMode);
|
||||
this.values = valuesSources.values().toArray(new ValuesSource.Bytes[0]);
|
||||
}
|
||||
|
@ -62,14 +62,14 @@ public abstract class MultiValuesSource <VS extends ValuesSource> {
|
|||
}
|
||||
}
|
||||
|
||||
public static class GeoPointValuesSource extends MultiValuesSource<ValuesSource.GeoPoint> {
|
||||
public static class GeoPointValuesSource extends ArrayValuesSource<ValuesSource.GeoPoint> {
|
||||
public GeoPointValuesSource(Map<String, ValuesSource.GeoPoint> valuesSources, MultiValueMode multiValueMode) {
|
||||
super(valuesSources, multiValueMode);
|
||||
this.values = valuesSources.values().toArray(new ValuesSource.GeoPoint[0]);
|
||||
}
|
||||
}
|
||||
|
||||
private MultiValuesSource(Map<String, ?> valuesSources, MultiValueMode multiValueMode) {
|
||||
private ArrayValuesSource(Map<String, ?> valuesSources, MultiValueMode multiValueMode) {
|
||||
if (valuesSources != null) {
|
||||
this.names = valuesSources.keySet().toArray(new String[0]);
|
||||
}
|
|
@ -44,13 +44,13 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public abstract class MultiValuesSourceAggregationBuilder<VS extends ValuesSource, AB extends MultiValuesSourceAggregationBuilder<VS, AB>>
|
||||
public abstract class ArrayValuesSourceAggregationBuilder<VS extends ValuesSource, AB extends ArrayValuesSourceAggregationBuilder<VS, AB>>
|
||||
extends AbstractAggregationBuilder<AB> {
|
||||
|
||||
public static final ParseField MULTIVALUE_MODE_FIELD = new ParseField("mode");
|
||||
|
||||
public abstract static class LeafOnly<VS extends ValuesSource, AB extends MultiValuesSourceAggregationBuilder<VS, AB>>
|
||||
extends MultiValuesSourceAggregationBuilder<VS, AB> {
|
||||
public abstract static class LeafOnly<VS extends ValuesSource, AB extends ArrayValuesSourceAggregationBuilder<VS, AB>>
|
||||
extends ArrayValuesSourceAggregationBuilder<VS, AB> {
|
||||
|
||||
protected LeafOnly(String name, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
||||
super(name, valuesSourceType, targetValueType);
|
||||
|
@ -94,7 +94,7 @@ public abstract class MultiValuesSourceAggregationBuilder<VS extends ValuesSourc
|
|||
private Object missing = null;
|
||||
private Map<String, Object> missingMap = Collections.emptyMap();
|
||||
|
||||
protected MultiValuesSourceAggregationBuilder(String name, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
||||
protected ArrayValuesSourceAggregationBuilder(String name, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
||||
super(name);
|
||||
if (valuesSourceType == null) {
|
||||
throw new IllegalArgumentException("[valuesSourceType] must not be null: [" + name + "]");
|
||||
|
@ -103,7 +103,7 @@ public abstract class MultiValuesSourceAggregationBuilder<VS extends ValuesSourc
|
|||
this.targetValueType = targetValueType;
|
||||
}
|
||||
|
||||
protected MultiValuesSourceAggregationBuilder(MultiValuesSourceAggregationBuilder<VS, AB> clone,
|
||||
protected ArrayValuesSourceAggregationBuilder(ArrayValuesSourceAggregationBuilder<VS, AB> clone,
|
||||
Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
super(clone, factoriesBuilder, metaData);
|
||||
this.valuesSourceType = clone.valuesSourceType;
|
||||
|
@ -115,7 +115,7 @@ public abstract class MultiValuesSourceAggregationBuilder<VS extends ValuesSourc
|
|||
this.missing = clone.missing;
|
||||
}
|
||||
|
||||
protected MultiValuesSourceAggregationBuilder(StreamInput in, ValuesSourceType valuesSourceType, ValueType targetValueType)
|
||||
protected ArrayValuesSourceAggregationBuilder(StreamInput in, ValuesSourceType valuesSourceType, ValueType targetValueType)
|
||||
throws IOException {
|
||||
super(in);
|
||||
assert false == serializeTargetValueType() : "Wrong read constructor called for subclass that provides its targetValueType";
|
||||
|
@ -124,7 +124,7 @@ public abstract class MultiValuesSourceAggregationBuilder<VS extends ValuesSourc
|
|||
read(in);
|
||||
}
|
||||
|
||||
protected MultiValuesSourceAggregationBuilder(StreamInput in, ValuesSourceType valuesSourceType) throws IOException {
|
||||
protected ArrayValuesSourceAggregationBuilder(StreamInput in, ValuesSourceType valuesSourceType) throws IOException {
|
||||
super(in);
|
||||
assert serializeTargetValueType() : "Wrong read constructor called for subclass that serializes its targetValueType";
|
||||
this.valuesSourceType = valuesSourceType;
|
||||
|
@ -239,10 +239,10 @@ public abstract class MultiValuesSourceAggregationBuilder<VS extends ValuesSourc
|
|||
}
|
||||
|
||||
@Override
|
||||
protected final MultiValuesSourceAggregatorFactory<VS, ?> doBuild(SearchContext context, AggregatorFactory<?> parent,
|
||||
protected final ArrayValuesSourceAggregatorFactory<VS, ?> doBuild(SearchContext context, AggregatorFactory<?> parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder) throws IOException {
|
||||
Map<String, ValuesSourceConfig<VS>> configs = resolveConfig(context);
|
||||
MultiValuesSourceAggregatorFactory<VS, ?> factory = innerBuild(context, configs, parent, subFactoriesBuilder);
|
||||
ArrayValuesSourceAggregatorFactory<VS, ?> factory = innerBuild(context, configs, parent, subFactoriesBuilder);
|
||||
return factory;
|
||||
}
|
||||
|
||||
|
@ -255,8 +255,9 @@ public abstract class MultiValuesSourceAggregationBuilder<VS extends ValuesSourc
|
|||
return configs;
|
||||
}
|
||||
|
||||
protected abstract MultiValuesSourceAggregatorFactory<VS, ?> innerBuild(SearchContext context,
|
||||
Map<String, ValuesSourceConfig<VS>> configs, AggregatorFactory<?> parent,
|
||||
protected abstract ArrayValuesSourceAggregatorFactory<VS, ?> innerBuild(SearchContext context,
|
||||
Map<String, ValuesSourceConfig<VS>> configs,
|
||||
AggregatorFactory<?> parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder) throws IOException;
|
||||
|
||||
public ValuesSourceConfig<VS> config(SearchContext context, String field, Script script) {
|
||||
|
@ -362,7 +363,7 @@ public abstract class MultiValuesSourceAggregationBuilder<VS extends ValuesSourc
|
|||
|
||||
@Override
|
||||
protected final boolean doEquals(Object obj) {
|
||||
MultiValuesSourceAggregationBuilder<?, ?> other = (MultiValuesSourceAggregationBuilder<?, ?>) obj;
|
||||
ArrayValuesSourceAggregationBuilder<?, ?> other = (ArrayValuesSourceAggregationBuilder<?, ?>) obj;
|
||||
if (!Objects.equals(fields, other.fields))
|
||||
return false;
|
||||
if (!Objects.equals(format, other.format))
|
|
@ -30,13 +30,14 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class MultiValuesSourceAggregatorFactory<VS extends ValuesSource, AF extends MultiValuesSourceAggregatorFactory<VS, AF>>
|
||||
public abstract class ArrayValuesSourceAggregatorFactory<VS extends ValuesSource, AF extends ArrayValuesSourceAggregatorFactory<VS, AF>>
|
||||
extends AggregatorFactory<AF> {
|
||||
|
||||
protected Map<String, ValuesSourceConfig<VS>> configs;
|
||||
|
||||
public MultiValuesSourceAggregatorFactory(String name, Map<String, ValuesSourceConfig<VS>> configs,
|
||||
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
||||
public ArrayValuesSourceAggregatorFactory(String name, Map<String, ValuesSourceConfig<VS>> configs,
|
||||
SearchContext context, AggregatorFactory<?> parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, context, parent, subFactoriesBuilder, metaData);
|
||||
this.configs = configs;
|
||||
|
@ -63,6 +64,7 @@ public abstract class MultiValuesSourceAggregatorFactory<VS extends ValuesSource
|
|||
Map<String, Object> metaData) throws IOException;
|
||||
|
||||
protected abstract Aggregator doCreateInternal(Map<String, VS> valuesSources, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException;
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException;
|
||||
|
||||
}
|
|
@ -33,30 +33,30 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class MultiValuesSourceParser<VS extends ValuesSource> implements Aggregator.Parser {
|
||||
public abstract class ArrayValuesSourceParser<VS extends ValuesSource> implements Aggregator.Parser {
|
||||
|
||||
public abstract static class AnyValuesSourceParser extends MultiValuesSourceParser<ValuesSource> {
|
||||
public abstract static class AnyValuesSourceParser extends ArrayValuesSourceParser<ValuesSource> {
|
||||
|
||||
protected AnyValuesSourceParser(boolean formattable) {
|
||||
super(formattable, ValuesSourceType.ANY, null);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract static class NumericValuesSourceParser extends MultiValuesSourceParser<ValuesSource.Numeric> {
|
||||
public abstract static class NumericValuesSourceParser extends ArrayValuesSourceParser<ValuesSource.Numeric> {
|
||||
|
||||
protected NumericValuesSourceParser(boolean formattable) {
|
||||
super(formattable, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract static class BytesValuesSourceParser extends MultiValuesSourceParser<ValuesSource.Bytes> {
|
||||
public abstract static class BytesValuesSourceParser extends ArrayValuesSourceParser<ValuesSource.Bytes> {
|
||||
|
||||
protected BytesValuesSourceParser(boolean formattable) {
|
||||
super(formattable, ValuesSourceType.BYTES, ValueType.STRING);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract static class GeoPointValuesSourceParser extends MultiValuesSourceParser<ValuesSource.GeoPoint> {
|
||||
public abstract static class GeoPointValuesSourceParser extends ArrayValuesSourceParser<ValuesSource.GeoPoint> {
|
||||
|
||||
protected GeoPointValuesSourceParser(boolean formattable) {
|
||||
super(formattable, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
|
@ -67,14 +67,14 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
|||
private ValuesSourceType valuesSourceType = null;
|
||||
private ValueType targetValueType = null;
|
||||
|
||||
private MultiValuesSourceParser(boolean formattable, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
||||
private ArrayValuesSourceParser(boolean formattable, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
||||
this.valuesSourceType = valuesSourceType;
|
||||
this.targetValueType = targetValueType;
|
||||
this.formattable = formattable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final MultiValuesSourceAggregationBuilder<VS, ?> parse(String aggregationName, XContentParser parser)
|
||||
public final ArrayValuesSourceAggregationBuilder<VS, ?> parse(String aggregationName, XContentParser parser)
|
||||
throws IOException {
|
||||
|
||||
List<String> fields = null;
|
||||
|
@ -140,7 +140,7 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
|||
}
|
||||
}
|
||||
|
||||
MultiValuesSourceAggregationBuilder<VS, ?> factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType,
|
||||
ArrayValuesSourceAggregationBuilder<VS, ?> factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType,
|
||||
otherOptions);
|
||||
if (fields != null) {
|
||||
factory.fields(fields);
|
||||
|
@ -182,7 +182,7 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
|||
/**
|
||||
* Creates a {@link ValuesSourceAggregationBuilder} from the information
|
||||
* gathered by the subclass. Options parsed in
|
||||
* {@link MultiValuesSourceParser} itself will be added to the factory
|
||||
* {@link ArrayValuesSourceParser} itself will be added to the factory
|
||||
* after it has been returned by this method.
|
||||
*
|
||||
* @param aggregationName
|
||||
|
@ -197,11 +197,13 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
|||
* method
|
||||
* @return the created factory
|
||||
*/
|
||||
protected abstract MultiValuesSourceAggregationBuilder<VS, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions);
|
||||
protected abstract ArrayValuesSourceAggregationBuilder<VS, ?> createFactory(String aggregationName,
|
||||
ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType,
|
||||
Map<ParseField, Object> otherOptions);
|
||||
|
||||
/**
|
||||
* Allows subclasses of {@link MultiValuesSourceParser} to parse extra
|
||||
* Allows subclasses of {@link ArrayValuesSourceParser} to parse extra
|
||||
* parameters and store them in a {@link Map} which will later be passed to
|
||||
* {@link #createFactory(String, ValuesSourceType, ValueType, Map)}.
|
||||
*
|
|
@ -24,10 +24,12 @@ import org.elasticsearch.painless.spi.WhitelistClass;
|
|||
import org.elasticsearch.painless.spi.WhitelistConstructor;
|
||||
import org.elasticsearch.painless.spi.WhitelistField;
|
||||
import org.elasticsearch.painless.spi.WhitelistMethod;
|
||||
import org.objectweb.asm.Type;
|
||||
|
||||
import java.lang.invoke.MethodHandle;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
@ -38,8 +40,13 @@ import java.util.Objects;
|
|||
import java.util.Stack;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_TYPE_NAME;
|
||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.CONSTRUCTOR_NAME;
|
||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_CLASS_NAME;
|
||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessFieldKey;
|
||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey;
|
||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName;
|
||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToJavaType;
|
||||
import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typesToCanonicalTypeNames;
|
||||
|
||||
public class PainlessLookupBuilder {
|
||||
|
||||
|
@ -123,17 +130,17 @@ public class PainlessLookupBuilder {
|
|||
private final List<Whitelist> whitelists;
|
||||
|
||||
private final Map<String, Class<?>> canonicalClassNamesToClasses;
|
||||
private final Map<Class<?>, PainlessClassBuilder> classesToPainlessClasses;
|
||||
private final Map<Class<?>, PainlessClassBuilder> classesToPainlessClassBuilders;
|
||||
|
||||
public PainlessLookupBuilder(List<Whitelist> whitelists) {
|
||||
this.whitelists = whitelists;
|
||||
|
||||
canonicalClassNamesToClasses = new HashMap<>();
|
||||
classesToPainlessClasses = new HashMap<>();
|
||||
classesToPainlessClassBuilders = new HashMap<>();
|
||||
|
||||
canonicalClassNamesToClasses.put(DEF_TYPE_NAME, def.class);
|
||||
classesToPainlessClasses.put(def.class,
|
||||
new PainlessClassBuilder(DEF_TYPE_NAME, Object.class, Type.getType(Object.class)));
|
||||
canonicalClassNamesToClasses.put(DEF_CLASS_NAME, def.class);
|
||||
classesToPainlessClassBuilders.put(def.class,
|
||||
new PainlessClassBuilder(DEF_CLASS_NAME, Object.class, org.objectweb.asm.Type.getType(Object.class)));
|
||||
}
|
||||
|
||||
private Class<?> canonicalTypeNameToType(String canonicalTypeName) {
|
||||
|
@ -141,7 +148,7 @@ public class PainlessLookupBuilder {
|
|||
}
|
||||
|
||||
private void validateType(Class<?> type) {
|
||||
PainlessLookupUtility.validateType(type, classesToPainlessClasses.keySet());
|
||||
PainlessLookupUtility.validateType(type, classesToPainlessClassBuilders.keySet());
|
||||
}
|
||||
|
||||
public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importClassName) {
|
||||
|
@ -174,10 +181,10 @@ public class PainlessLookupBuilder {
|
|||
Objects.requireNonNull(clazz);
|
||||
|
||||
if (clazz == def.class) {
|
||||
throw new IllegalArgumentException("cannot add reserved class [" + DEF_TYPE_NAME + "]");
|
||||
throw new IllegalArgumentException("cannot add reserved class [" + DEF_CLASS_NAME + "]");
|
||||
}
|
||||
|
||||
String canonicalClassName = clazz.getCanonicalName();
|
||||
String canonicalClassName = typeToCanonicalTypeName(clazz);
|
||||
|
||||
if (clazz.isArray()) {
|
||||
throw new IllegalArgumentException("cannot add array type [" + canonicalClassName + "] as a class");
|
||||
|
@ -187,13 +194,14 @@ public class PainlessLookupBuilder {
|
|||
throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]");
|
||||
}
|
||||
|
||||
PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClasses.get(clazz);
|
||||
PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClassBuilders.get(clazz);
|
||||
|
||||
if (existingPainlessClassBuilder == null) {
|
||||
PainlessClassBuilder painlessClassBuilder = new PainlessClassBuilder(canonicalClassName, clazz, Type.getType(clazz));
|
||||
PainlessClassBuilder painlessClassBuilder =
|
||||
new PainlessClassBuilder(canonicalClassName, clazz, org.objectweb.asm.Type.getType(clazz));
|
||||
|
||||
canonicalClassNamesToClasses.put(canonicalClassName, clazz);
|
||||
classesToPainlessClasses.put(clazz, painlessClassBuilder);
|
||||
classesToPainlessClassBuilders.put(clazz, painlessClassBuilder);
|
||||
} else if (existingPainlessClassBuilder.clazz.equals(clazz) == false) {
|
||||
throw new IllegalArgumentException("class [" + canonicalClassName + "] " +
|
||||
"cannot represent multiple java classes with the same name from different class loaders");
|
||||
|
@ -207,308 +215,459 @@ public class PainlessLookupBuilder {
|
|||
throw new IllegalArgumentException("must use only_fqn parameter on class [" + canonicalClassName + "] with no package");
|
||||
}
|
||||
} else {
|
||||
Class<?> importedPainlessType = canonicalClassNamesToClasses.get(importedCanonicalClassName);
|
||||
Class<?> importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName);
|
||||
|
||||
if (importedPainlessType == null) {
|
||||
if (importedPainlessClass == null) {
|
||||
if (importClassName) {
|
||||
if (existingPainlessClassBuilder != null) {
|
||||
throw new IllegalArgumentException(
|
||||
"inconsistent only_fqn parameters found for painless type [" + canonicalClassName + "]");
|
||||
throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]");
|
||||
}
|
||||
|
||||
canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz);
|
||||
}
|
||||
} else if (importedPainlessType.equals(clazz) == false) {
|
||||
throw new IllegalArgumentException("painless type [" + importedCanonicalClassName + "] illegally represents multiple " +
|
||||
"java types [" + clazz.getCanonicalName() + "] and [" + importedPainlessType.getCanonicalName() + "]");
|
||||
} else if (importedPainlessClass.equals(clazz) == false) {
|
||||
throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " +
|
||||
"classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]");
|
||||
} else if (importClassName == false) {
|
||||
throw new IllegalArgumentException("inconsistent only_fqn parameters found for painless type [" + canonicalClassName + "]");
|
||||
throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) {
|
||||
PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName));
|
||||
public void addPainlessConstructor(String targetCanonicalClassName, List<String> typeNameParameters) {
|
||||
Objects.requireNonNull(targetCanonicalClassName);
|
||||
Objects.requireNonNull(typeNameParameters);
|
||||
|
||||
if (ownerStruct == null) {
|
||||
throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " +
|
||||
"parameters " + whitelistConstructor.painlessParameterTypeNames);
|
||||
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
|
||||
|
||||
if (targetClass == null) {
|
||||
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" +
|
||||
"for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]");
|
||||
}
|
||||
|
||||
List<Class<?>> painlessParametersTypes = new ArrayList<>(whitelistConstructor.painlessParameterTypeNames.size());
|
||||
Class<?>[] javaClassParameters = new Class<?>[whitelistConstructor.painlessParameterTypeNames.size()];
|
||||
|
||||
for (int parameterCount = 0; parameterCount < whitelistConstructor.painlessParameterTypeNames.size(); ++parameterCount) {
|
||||
String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount);
|
||||
List<Class<?>> typeParameters = new ArrayList<>(typeNameParameters.size());
|
||||
|
||||
for (String typeNameParameter : typeNameParameters) {
|
||||
try {
|
||||
Class<?> painlessParameterClass = canonicalTypeNameToType(painlessParameterTypeName);
|
||||
|
||||
painlessParametersTypes.add(painlessParameterClass);
|
||||
javaClassParameters[parameterCount] = PainlessLookupUtility.typeToJavaType(painlessParameterClass);
|
||||
Class<?> typeParameter = canonicalTypeNameToType(typeNameParameter);
|
||||
typeParameters.add(typeParameter);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " +
|
||||
"with owner struct [" + ownerStructName + "] and constructor parameters " +
|
||||
whitelistConstructor.painlessParameterTypeNames, iae);
|
||||
throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " +
|
||||
"for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]", iae);
|
||||
}
|
||||
}
|
||||
|
||||
java.lang.reflect.Constructor<?> javaConstructor;
|
||||
addPainlessConstructor(targetClass, typeParameters);
|
||||
}
|
||||
|
||||
public void addPainlessConstructor(Class<?> targetClass, List<Class<?>> typeParameters) {
|
||||
Objects.requireNonNull(targetClass);
|
||||
Objects.requireNonNull(typeParameters);
|
||||
|
||||
if (targetClass == def.class) {
|
||||
throw new IllegalArgumentException("cannot add constructor to reserved class [" + DEF_CLASS_NAME + "]");
|
||||
}
|
||||
|
||||
String targetCanonicalClassName = targetClass.getCanonicalName();
|
||||
PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass);
|
||||
|
||||
if (painlessClassBuilder == null) {
|
||||
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" +
|
||||
"for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
int typeParametersSize = typeParameters.size();
|
||||
List<Class<?>> javaTypeParameters = new ArrayList<>(typeParametersSize);
|
||||
|
||||
for (Class<?> typeParameter : typeParameters) {
|
||||
try {
|
||||
validateType(typeParameter);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " +
|
||||
"for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae);
|
||||
}
|
||||
|
||||
javaTypeParameters.add(typeToJavaType(typeParameter));
|
||||
}
|
||||
|
||||
Constructor<?> javaConstructor;
|
||||
|
||||
try {
|
||||
javaConstructor = ownerStruct.clazz.getConstructor(javaClassParameters);
|
||||
} catch (NoSuchMethodException exception) {
|
||||
throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
|
||||
" with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception);
|
||||
javaConstructor = targetClass.getConstructor(javaTypeParameters.toArray(new Class<?>[typeParametersSize]));
|
||||
} catch (NoSuchMethodException nsme) {
|
||||
throw new IllegalArgumentException("constructor reflection object " +
|
||||
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme);
|
||||
}
|
||||
|
||||
String painlessMethodKey = buildPainlessMethodKey("<init>", whitelistConstructor.painlessParameterTypeNames.size());
|
||||
PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey);
|
||||
String painlessMethodKey = buildPainlessMethodKey(CONSTRUCTOR_NAME, typeParametersSize);
|
||||
PainlessMethod painlessConstructor = painlessClassBuilder.constructors.get(painlessMethodKey);
|
||||
|
||||
if (painlessConstructor == null) {
|
||||
org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor);
|
||||
MethodHandle javaHandle;
|
||||
MethodHandle methodHandle;
|
||||
|
||||
try {
|
||||
javaHandle = MethodHandles.publicLookup().in(ownerStruct.clazz).unreflectConstructor(javaConstructor);
|
||||
} catch (IllegalAccessException exception) {
|
||||
throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
|
||||
" with constructor parameters " + whitelistConstructor.painlessParameterTypeNames);
|
||||
methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor);
|
||||
} catch (IllegalAccessException iae) {
|
||||
throw new IllegalArgumentException("constructor method handle " +
|
||||
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae);
|
||||
}
|
||||
|
||||
painlessConstructor = painlessMethodCache.computeIfAbsent(
|
||||
new PainlessMethodCacheKey(ownerStruct.clazz, "<init>", painlessParametersTypes),
|
||||
key -> new PainlessMethod("<init>", ownerStruct.clazz, null, void.class, painlessParametersTypes,
|
||||
asmConstructor, javaConstructor.getModifiers(), javaHandle));
|
||||
ownerStruct.constructors.put(painlessMethodKey, painlessConstructor);
|
||||
} else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){
|
||||
throw new IllegalArgumentException(
|
||||
"illegal duplicate constructors [" + painlessMethodKey + "] found within the struct [" + ownerStruct.name + "] " +
|
||||
"with parameters " + painlessParametersTypes + " and " + painlessConstructor.arguments);
|
||||
new PainlessMethodCacheKey(targetClass, CONSTRUCTOR_NAME, typeParameters),
|
||||
key -> new PainlessMethod(CONSTRUCTOR_NAME, targetClass, null, void.class, typeParameters,
|
||||
asmConstructor, javaConstructor.getModifiers(), methodHandle)
|
||||
);
|
||||
|
||||
painlessClassBuilder.constructors.put(painlessMethodKey, painlessConstructor);
|
||||
} else if (painlessConstructor.arguments.equals(typeParameters) == false){
|
||||
throw new IllegalArgumentException("cannot have constructors " +
|
||||
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " +
|
||||
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.arguments) + "] " +
|
||||
"with the same arity and different type parameters");
|
||||
}
|
||||
}
|
||||
|
||||
private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) {
|
||||
PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName));
|
||||
public void addPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, String augmentedCanonicalClassName,
|
||||
String methodName, String returnCanonicalTypeName, List<String> typeNameParameters) {
|
||||
|
||||
if (ownerStruct == null) {
|
||||
throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
|
||||
"name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
|
||||
Objects.requireNonNull(classLoader);
|
||||
Objects.requireNonNull(targetCanonicalClassName);
|
||||
Objects.requireNonNull(methodName);
|
||||
Objects.requireNonNull(returnCanonicalTypeName);
|
||||
Objects.requireNonNull(typeNameParameters);
|
||||
|
||||
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
|
||||
|
||||
if (targetClass == null) {
|
||||
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]");
|
||||
}
|
||||
|
||||
if (METHOD_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) {
|
||||
throw new IllegalArgumentException("invalid method name" +
|
||||
" [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "].");
|
||||
}
|
||||
Class<?> augmentedClass = null;
|
||||
|
||||
Class<?> javaAugmentedClass;
|
||||
|
||||
if (whitelistMethod.javaAugmentedClassName != null) {
|
||||
if (augmentedCanonicalClassName != null) {
|
||||
try {
|
||||
javaAugmentedClass = Class.forName(whitelistMethod.javaAugmentedClassName, true, whitelistClassLoader);
|
||||
augmentedClass = Class.forName(augmentedCanonicalClassName, true, classLoader);
|
||||
} catch (ClassNotFoundException cnfe) {
|
||||
throw new IllegalArgumentException("augmented class [" + whitelistMethod.javaAugmentedClassName + "] " +
|
||||
"not found for method with name [" + whitelistMethod.javaMethodName + "] " +
|
||||
"and parameters " + whitelistMethod.painlessParameterTypeNames, cnfe);
|
||||
throw new IllegalArgumentException("augmented class [" + augmentedCanonicalClassName + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", cnfe);
|
||||
}
|
||||
} else {
|
||||
javaAugmentedClass = null;
|
||||
}
|
||||
|
||||
int augmentedOffset = javaAugmentedClass == null ? 0 : 1;
|
||||
|
||||
List<Class<?>> painlessParametersTypes = new ArrayList<>(whitelistMethod.painlessParameterTypeNames.size());
|
||||
Class<?>[] javaClassParameters = new Class<?>[whitelistMethod.painlessParameterTypeNames.size() + augmentedOffset];
|
||||
|
||||
if (javaAugmentedClass != null) {
|
||||
javaClassParameters[0] = ownerStruct.clazz;
|
||||
}
|
||||
|
||||
for (int parameterCount = 0; parameterCount < whitelistMethod.painlessParameterTypeNames.size(); ++parameterCount) {
|
||||
String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount);
|
||||
List<Class<?>> typeParameters = new ArrayList<>(typeNameParameters.size());
|
||||
|
||||
for (String typeNameParameter : typeNameParameters) {
|
||||
try {
|
||||
Class<?> painlessParameterClass = canonicalTypeNameToType(painlessParameterTypeName);
|
||||
|
||||
painlessParametersTypes.add(painlessParameterClass);
|
||||
javaClassParameters[parameterCount + augmentedOffset] =
|
||||
PainlessLookupUtility.typeToJavaType(painlessParameterClass);
|
||||
Class<?> typeParameter = canonicalTypeNameToType(typeNameParameter);
|
||||
typeParameters.add(typeParameter);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " +
|
||||
"with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
|
||||
"and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
|
||||
throw new IllegalArgumentException("parameter type [" + typeNameParameter + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae);
|
||||
}
|
||||
}
|
||||
|
||||
Class<?> javaImplClass = javaAugmentedClass == null ? ownerStruct.clazz : javaAugmentedClass;
|
||||
java.lang.reflect.Method javaMethod;
|
||||
Class<?> returnType;
|
||||
|
||||
try {
|
||||
javaMethod = javaImplClass.getMethod(whitelistMethod.javaMethodName, javaClassParameters);
|
||||
returnType = canonicalTypeNameToType(returnCanonicalTypeName);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new IllegalArgumentException("parameter type [" + returnCanonicalTypeName + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae);
|
||||
}
|
||||
|
||||
addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters);
|
||||
}
|
||||
|
||||
public void addPainlessMethod(Class<?> targetClass, Class<?> augmentedClass, String methodName,
|
||||
Class<?> returnType, List<Class<?>> typeParameters) {
|
||||
Objects.requireNonNull(targetClass);
|
||||
Objects.requireNonNull(methodName);
|
||||
Objects.requireNonNull(returnType);
|
||||
Objects.requireNonNull(typeParameters);
|
||||
|
||||
if (targetClass == def.class) {
|
||||
throw new IllegalArgumentException("cannot add method to reserved class [" + DEF_CLASS_NAME + "]");
|
||||
}
|
||||
|
||||
String targetCanonicalClassName = typeToCanonicalTypeName(targetClass);
|
||||
|
||||
if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid method name [" + methodName + "] for target class [" + targetCanonicalClassName + "].");
|
||||
}
|
||||
|
||||
PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass);
|
||||
|
||||
if (painlessClassBuilder == null) {
|
||||
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
int typeParametersSize = typeParameters.size();
|
||||
int augmentedParameterOffset = augmentedClass == null ? 0 : 1;
|
||||
List<Class<?>> javaTypeParameters = new ArrayList<>(typeParametersSize + augmentedParameterOffset);
|
||||
|
||||
if (augmentedClass != null) {
|
||||
javaTypeParameters.add(targetClass);
|
||||
}
|
||||
|
||||
for (Class<?> typeParameter : typeParameters) {
|
||||
try {
|
||||
validateType(typeParameter);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " +
|
||||
"not found for method [[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "]", iae);
|
||||
}
|
||||
|
||||
javaTypeParameters.add(typeToJavaType(typeParameter));
|
||||
}
|
||||
|
||||
try {
|
||||
validateType(returnType);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae);
|
||||
}
|
||||
|
||||
Method javaMethod;
|
||||
|
||||
if (augmentedClass == null) {
|
||||
try {
|
||||
javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class<?>[typeParametersSize]));
|
||||
} catch (NoSuchMethodException nsme) {
|
||||
throw new IllegalArgumentException("method with name [" + whitelistMethod.javaMethodName + "] " +
|
||||
"and parameters " + whitelistMethod.painlessParameterTypeNames + " not found for class [" +
|
||||
javaImplClass.getName() + "]", nsme);
|
||||
}
|
||||
|
||||
Class<?> painlessReturnClass;
|
||||
|
||||
try {
|
||||
painlessReturnClass = canonicalTypeNameToType(whitelistMethod.painlessReturnTypeName);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " +
|
||||
"with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
|
||||
"and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
|
||||
}
|
||||
|
||||
if (javaMethod.getReturnType() != PainlessLookupUtility.typeToJavaType(painlessReturnClass)) {
|
||||
throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " +
|
||||
"does not match the return type class [" + javaMethod.getReturnType() + "] for the " +
|
||||
"method with name [" + whitelistMethod.javaMethodName + "] " +
|
||||
"and parameters " + whitelistMethod.painlessParameterTypeNames);
|
||||
}
|
||||
|
||||
String painlessMethodKey =
|
||||
buildPainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size());
|
||||
|
||||
if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) {
|
||||
PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey);
|
||||
|
||||
if (painlessMethod == null) {
|
||||
org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
|
||||
MethodHandle javaMethodHandle;
|
||||
|
||||
try {
|
||||
javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
|
||||
} catch (IllegalAccessException exception) {
|
||||
throw new IllegalArgumentException("method handle not found for method with name " +
|
||||
"[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
|
||||
}
|
||||
|
||||
painlessMethod = painlessMethodCache.computeIfAbsent(
|
||||
new PainlessMethodCacheKey(ownerStruct.clazz, whitelistMethod.javaMethodName, painlessParametersTypes),
|
||||
key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, null, painlessReturnClass,
|
||||
painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
|
||||
ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod);
|
||||
} else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass &&
|
||||
painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
|
||||
throw new IllegalArgumentException("illegal duplicate static methods [" + painlessMethodKey + "] " +
|
||||
"found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
|
||||
"return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
|
||||
"and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
|
||||
throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " +
|
||||
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme);
|
||||
}
|
||||
} else {
|
||||
PainlessMethod painlessMethod = ownerStruct.methods.get(painlessMethodKey);
|
||||
try {
|
||||
javaMethod = augmentedClass.getMethod(methodName, javaTypeParameters.toArray(new Class<?>[typeParametersSize]));
|
||||
} catch (NoSuchMethodException nsme) {
|
||||
throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " +
|
||||
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " +
|
||||
"with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme);
|
||||
}
|
||||
}
|
||||
|
||||
if (javaMethod.getReturnType() != typeToJavaType(returnType)) {
|
||||
throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " +
|
||||
"does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " +
|
||||
"for method [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize);
|
||||
|
||||
if (augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) {
|
||||
PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey);
|
||||
|
||||
if (painlessMethod == null) {
|
||||
org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
|
||||
MethodHandle javaMethodHandle;
|
||||
|
||||
try {
|
||||
javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
|
||||
} catch (IllegalAccessException exception) {
|
||||
throw new IllegalArgumentException("method handle not found for method with name " +
|
||||
"[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
|
||||
javaMethodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod);
|
||||
} catch (IllegalAccessException iae) {
|
||||
throw new IllegalArgumentException("static method handle [[" + targetClass.getCanonicalName() + "], " +
|
||||
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae);
|
||||
}
|
||||
|
||||
painlessMethod = painlessMethodCache.computeIfAbsent(
|
||||
new PainlessMethodCacheKey(ownerStruct.clazz, whitelistMethod.javaMethodName, painlessParametersTypes),
|
||||
key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, javaAugmentedClass, painlessReturnClass,
|
||||
painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
|
||||
ownerStruct.methods.put(painlessMethodKey, painlessMethod);
|
||||
} else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) &&
|
||||
painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
|
||||
throw new IllegalArgumentException("illegal duplicate member methods [" + painlessMethodKey + "] " +
|
||||
"found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
|
||||
"return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
|
||||
"and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
|
||||
new PainlessMethodCacheKey(targetClass, methodName, typeParameters),
|
||||
key -> new PainlessMethod(methodName, targetClass, null, returnType,
|
||||
typeParameters, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
|
||||
|
||||
painlessClassBuilder.staticMethods.put(painlessMethodKey, painlessMethod);
|
||||
} else if ((painlessMethod.name.equals(methodName) && painlessMethod.rtn == returnType &&
|
||||
painlessMethod.arguments.equals(typeParameters)) == false) {
|
||||
throw new IllegalArgumentException("cannot have static methods " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
||||
"[" + typeToCanonicalTypeName(returnType) + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "] and " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
||||
"[" + typeToCanonicalTypeName(painlessMethod.rtn) + "], " +
|
||||
typesToCanonicalTypeNames(painlessMethod.arguments) + "] " +
|
||||
"with the same arity and different return type or type parameters");
|
||||
}
|
||||
} else {
|
||||
PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey);
|
||||
|
||||
if (painlessMethod == null) {
|
||||
org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
|
||||
MethodHandle javaMethodHandle;
|
||||
|
||||
if (augmentedClass == null) {
|
||||
try {
|
||||
javaMethodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod);
|
||||
} catch (IllegalAccessException iae) {
|
||||
throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " +
|
||||
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
javaMethodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod);
|
||||
} catch (IllegalAccessException iae) {
|
||||
throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " +
|
||||
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " +
|
||||
"with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae);
|
||||
}
|
||||
}
|
||||
|
||||
painlessMethod = painlessMethodCache.computeIfAbsent(
|
||||
new PainlessMethodCacheKey(targetClass, methodName, typeParameters),
|
||||
key -> new PainlessMethod(methodName, targetClass, augmentedClass, returnType,
|
||||
typeParameters, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
|
||||
|
||||
painlessClassBuilder.methods.put(painlessMethodKey, painlessMethod);
|
||||
} else if ((painlessMethod.name.equals(methodName) && painlessMethod.rtn == returnType &&
|
||||
painlessMethod.arguments.equals(typeParameters)) == false) {
|
||||
throw new IllegalArgumentException("cannot have methods " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
||||
"[" + typeToCanonicalTypeName(returnType) + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "] and " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
||||
"[" + typeToCanonicalTypeName(painlessMethod.rtn) + "], " +
|
||||
typesToCanonicalTypeNames(painlessMethod.arguments) + "] " +
|
||||
"with the same arity and different return type or type parameters");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addField(String ownerStructName, WhitelistField whitelistField) {
|
||||
PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName));
|
||||
public void addPainlessField(String targetCanonicalClassName, String fieldName, String typeNameParameter) {
|
||||
Objects.requireNonNull(targetCanonicalClassName);
|
||||
Objects.requireNonNull(fieldName);
|
||||
Objects.requireNonNull(typeNameParameter);
|
||||
|
||||
if (ownerStruct == null) {
|
||||
throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
|
||||
"name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName);
|
||||
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
|
||||
|
||||
if (targetClass == null) {
|
||||
throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found");
|
||||
}
|
||||
|
||||
if (FIELD_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) {
|
||||
throw new IllegalArgumentException("invalid field name " +
|
||||
"[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "].");
|
||||
}
|
||||
|
||||
java.lang.reflect.Field javaField;
|
||||
Class<?> typeParameter;
|
||||
|
||||
try {
|
||||
javaField = ownerStruct.clazz.getField(whitelistField.javaFieldName);
|
||||
} catch (NoSuchFieldException exception) {
|
||||
throw new IllegalArgumentException("field [" + whitelistField.javaFieldName + "] " +
|
||||
"not found for class [" + ownerStruct.clazz.getName() + "].");
|
||||
}
|
||||
|
||||
Class<?> painlessFieldClass;
|
||||
|
||||
try {
|
||||
painlessFieldClass = canonicalTypeNameToType(whitelistField.painlessFieldTypeName);
|
||||
typeParameter = canonicalTypeNameToType(typeNameParameter);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " +
|
||||
"with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae);
|
||||
throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " +
|
||||
"for field [[" + targetCanonicalClassName + "], [" + fieldName + "]");
|
||||
}
|
||||
|
||||
|
||||
addPainlessField(targetClass, fieldName, typeParameter);
|
||||
}
|
||||
|
||||
public void addPainlessField(Class<?> targetClass, String fieldName, Class<?> typeParameter) {
|
||||
Objects.requireNonNull(targetClass);
|
||||
Objects.requireNonNull(fieldName);
|
||||
Objects.requireNonNull(typeParameter);
|
||||
|
||||
if (targetClass == def.class) {
|
||||
throw new IllegalArgumentException("cannot add field to reserved class [" + DEF_CLASS_NAME + "]");
|
||||
}
|
||||
|
||||
String targetCanonicalClassName = typeToCanonicalTypeName(targetClass);
|
||||
|
||||
if (FIELD_NAME_PATTERN.matcher(fieldName).matches() == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid field name [" + fieldName + "] for target class [" + targetCanonicalClassName + "].");
|
||||
}
|
||||
|
||||
|
||||
PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass);
|
||||
|
||||
if (painlessClassBuilder == null) {
|
||||
throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found");
|
||||
}
|
||||
|
||||
try {
|
||||
validateType(typeParameter);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " +
|
||||
"for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", iae);
|
||||
}
|
||||
|
||||
Field javaField;
|
||||
|
||||
try {
|
||||
javaField = targetClass.getField(fieldName);
|
||||
} catch (NoSuchFieldException nsme) {
|
||||
throw new IllegalArgumentException(
|
||||
"field reflection object [[" + targetCanonicalClassName + "], [" + fieldName + "] not found", nsme);
|
||||
}
|
||||
|
||||
if (javaField.getType() != typeToJavaType(typeParameter)) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(javaField.getType()) + "] " +
|
||||
"does not match the specified type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " +
|
||||
"for field [[" + targetCanonicalClassName + "], [" + fieldName + "]");
|
||||
}
|
||||
|
||||
String painlessFieldKey = buildPainlessFieldKey(fieldName);
|
||||
|
||||
if (Modifier.isStatic(javaField.getModifiers())) {
|
||||
if (Modifier.isFinal(javaField.getModifiers()) == false) {
|
||||
throw new IllegalArgumentException("static [" + whitelistField.javaFieldName + "] " +
|
||||
"with owner struct [" + ownerStruct.name + "] is not final");
|
||||
throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "]. [" + fieldName + "]] must be final");
|
||||
}
|
||||
|
||||
PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName);
|
||||
PainlessField painlessField = painlessClassBuilder.staticMembers.get(painlessFieldKey);
|
||||
|
||||
if (painlessField == null) {
|
||||
painlessField = painlessFieldCache.computeIfAbsent(
|
||||
new PainlessFieldCacheKey(ownerStruct.clazz, whitelistField.javaFieldName, painlessFieldClass),
|
||||
key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
|
||||
ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), null, null));
|
||||
ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField);
|
||||
} else if (painlessField.clazz != painlessFieldClass) {
|
||||
throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " +
|
||||
"found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
|
||||
new PainlessFieldCacheKey(targetClass, fieldName, typeParameter),
|
||||
key -> new PainlessField(fieldName, javaField.getName(), targetClass,
|
||||
typeParameter, javaField.getModifiers(), null, null));
|
||||
|
||||
painlessClassBuilder.staticMembers.put(painlessFieldKey, painlessField);
|
||||
} else if (painlessField.clazz != typeParameter) {
|
||||
throw new IllegalArgumentException("cannot have static fields " +
|
||||
"[[" + targetCanonicalClassName + "], [" + fieldName + "], [" +
|
||||
typeToCanonicalTypeName(typeParameter) + "] and " +
|
||||
"[[" + targetCanonicalClassName + "], [" + painlessField.name + "], " +
|
||||
typeToCanonicalTypeName(painlessField.clazz) + "] " +
|
||||
"with the same and different type parameters");
|
||||
}
|
||||
} else {
|
||||
MethodHandle javaMethodHandleGetter;
|
||||
MethodHandle javaMethodHandleSetter;
|
||||
MethodHandle methodHandleGetter;
|
||||
|
||||
try {
|
||||
if (Modifier.isStatic(javaField.getModifiers()) == false) {
|
||||
javaMethodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField);
|
||||
javaMethodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField);
|
||||
} else {
|
||||
javaMethodHandleGetter = null;
|
||||
javaMethodHandleSetter = null;
|
||||
}
|
||||
} catch (IllegalAccessException exception) {
|
||||
throw new IllegalArgumentException("getter/setter [" + whitelistField.javaFieldName + "]" +
|
||||
" not found for class [" + ownerStruct.clazz.getName() + "].");
|
||||
methodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField);
|
||||
} catch (IllegalAccessException iae) {
|
||||
throw new IllegalArgumentException(
|
||||
"method handle getter not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]");
|
||||
}
|
||||
|
||||
PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName);
|
||||
MethodHandle methodHandleSetter;
|
||||
|
||||
try {
|
||||
methodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField);
|
||||
} catch (IllegalAccessException iae) {
|
||||
throw new IllegalArgumentException(
|
||||
"method handle setter not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]");
|
||||
}
|
||||
|
||||
PainlessField painlessField = painlessClassBuilder.members.get(painlessFieldKey);
|
||||
|
||||
if (painlessField == null) {
|
||||
painlessField = painlessFieldCache.computeIfAbsent(
|
||||
new PainlessFieldCacheKey(ownerStruct.clazz, whitelistField.javaFieldName, painlessFieldClass),
|
||||
key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
|
||||
ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter));
|
||||
ownerStruct.members.put(whitelistField.javaFieldName, painlessField);
|
||||
} else if (painlessField.clazz != painlessFieldClass) {
|
||||
throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " +
|
||||
"found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
|
||||
new PainlessFieldCacheKey(targetClass, painlessFieldKey, typeParameter),
|
||||
key -> new PainlessField(fieldName, javaField.getName(), targetClass,
|
||||
typeParameter, javaField.getModifiers(), methodHandleGetter, methodHandleSetter));
|
||||
|
||||
painlessClassBuilder.members.put(fieldName, painlessField);
|
||||
} else if (painlessField.clazz != typeParameter) {
|
||||
throw new IllegalArgumentException("cannot have fields " +
|
||||
"[[" + targetCanonicalClassName + "], [" + fieldName + "], [" +
|
||||
typeToCanonicalTypeName(typeParameter) + "] and " +
|
||||
"[[" + targetCanonicalClassName + "], [" + painlessField.name + "], " +
|
||||
typeToCanonicalTypeName(painlessField.clazz) + "] " +
|
||||
"with the same and different type parameters");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void copyStruct(String struct, List<String> children) {
|
||||
final PainlessClassBuilder owner = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(struct));
|
||||
final PainlessClassBuilder owner = classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(struct));
|
||||
|
||||
if (owner == null) {
|
||||
throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy.");
|
||||
|
@ -516,7 +675,7 @@ public class PainlessLookupBuilder {
|
|||
|
||||
for (int count = 0; count < children.size(); ++count) {
|
||||
final PainlessClassBuilder child =
|
||||
classesToPainlessClasses.get(canonicalClassNamesToClasses.get(children.get(count)));
|
||||
classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(children.get(count)));
|
||||
|
||||
if (child == null) {
|
||||
throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" +
|
||||
|
@ -690,7 +849,7 @@ public class PainlessLookupBuilder {
|
|||
for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
|
||||
String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
|
||||
PainlessClassBuilder painlessStruct =
|
||||
classesToPainlessClasses.get(canonicalClassNamesToClasses.get(painlessTypeName));
|
||||
classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(painlessTypeName));
|
||||
|
||||
if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) {
|
||||
throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " +
|
||||
|
@ -701,8 +860,8 @@ public class PainlessLookupBuilder {
|
|||
addPainlessClass(
|
||||
whitelist.javaClassLoader, whitelistStruct.javaClassName, whitelistStruct.onlyFQNJavaClassName == false);
|
||||
|
||||
painlessStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(painlessTypeName));
|
||||
classesToPainlessClasses.put(painlessStruct.clazz, painlessStruct);
|
||||
painlessStruct = classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(painlessTypeName));
|
||||
classesToPainlessClassBuilders.put(painlessStruct.clazz, painlessStruct);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -715,17 +874,19 @@ public class PainlessLookupBuilder {
|
|||
|
||||
for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) {
|
||||
origin = whitelistConstructor.origin;
|
||||
addConstructor(painlessTypeName, whitelistConstructor);
|
||||
addPainlessConstructor(painlessTypeName, whitelistConstructor.painlessParameterTypeNames);
|
||||
}
|
||||
|
||||
for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) {
|
||||
origin = whitelistMethod.origin;
|
||||
addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod);
|
||||
addPainlessMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod.javaAugmentedClassName,
|
||||
whitelistMethod.javaMethodName, whitelistMethod.painlessReturnTypeName,
|
||||
whitelistMethod.painlessParameterTypeNames);
|
||||
}
|
||||
|
||||
for (WhitelistField whitelistField : whitelistStruct.whitelistFields) {
|
||||
origin = whitelistField.origin;
|
||||
addField(painlessTypeName, whitelistField);
|
||||
addPainlessField(painlessTypeName, whitelistField.javaFieldName, whitelistField.painlessFieldTypeName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -735,8 +896,8 @@ public class PainlessLookupBuilder {
|
|||
|
||||
// goes through each Painless struct and determines the inheritance list,
|
||||
// and then adds all inherited types to the Painless struct's whitelist
|
||||
for (Class<?> javaClass : classesToPainlessClasses.keySet()) {
|
||||
PainlessClassBuilder painlessStruct = classesToPainlessClasses.get(javaClass);
|
||||
for (Class<?> javaClass : classesToPainlessClassBuilders.keySet()) {
|
||||
PainlessClassBuilder painlessStruct = classesToPainlessClassBuilders.get(javaClass);
|
||||
|
||||
List<String> painlessSuperStructs = new ArrayList<>();
|
||||
Class<?> javaSuperClass = painlessStruct.clazz.getSuperclass();
|
||||
|
@ -747,7 +908,7 @@ public class PainlessLookupBuilder {
|
|||
// adds super classes to the inheritance list
|
||||
if (javaSuperClass != null && javaSuperClass.isInterface() == false) {
|
||||
while (javaSuperClass != null) {
|
||||
PainlessClassBuilder painlessSuperStruct = classesToPainlessClasses.get(javaSuperClass);
|
||||
PainlessClassBuilder painlessSuperStruct = classesToPainlessClassBuilders.get(javaSuperClass);
|
||||
|
||||
if (painlessSuperStruct != null) {
|
||||
painlessSuperStructs.add(painlessSuperStruct.name);
|
||||
|
@ -763,7 +924,7 @@ public class PainlessLookupBuilder {
|
|||
Class<?> javaInterfaceLookup = javaInteraceLookups.pop();
|
||||
|
||||
for (Class<?> javaSuperInterface : javaInterfaceLookup.getInterfaces()) {
|
||||
PainlessClassBuilder painlessInterfaceStruct = classesToPainlessClasses.get(javaSuperInterface);
|
||||
PainlessClassBuilder painlessInterfaceStruct = classesToPainlessClassBuilders.get(javaSuperInterface);
|
||||
|
||||
if (painlessInterfaceStruct != null) {
|
||||
String painlessInterfaceStructName = painlessInterfaceStruct.name;
|
||||
|
@ -784,7 +945,7 @@ public class PainlessLookupBuilder {
|
|||
|
||||
// copies methods and fields from Object into interface types
|
||||
if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) {
|
||||
PainlessClassBuilder painlessObjectStruct = classesToPainlessClasses.get(Object.class);
|
||||
PainlessClassBuilder painlessObjectStruct = classesToPainlessClassBuilders.get(Object.class);
|
||||
|
||||
if (painlessObjectStruct != null) {
|
||||
copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name));
|
||||
|
@ -793,14 +954,14 @@ public class PainlessLookupBuilder {
|
|||
}
|
||||
|
||||
// precompute runtime classes
|
||||
for (PainlessClassBuilder painlessStruct : classesToPainlessClasses.values()) {
|
||||
for (PainlessClassBuilder painlessStruct : classesToPainlessClassBuilders.values()) {
|
||||
addRuntimeClass(painlessStruct);
|
||||
}
|
||||
|
||||
Map<Class<?>, PainlessClass> javaClassesToPainlessClasses = new HashMap<>();
|
||||
|
||||
// copy all structs to make them unmodifiable for outside users:
|
||||
for (Map.Entry<Class<?>,PainlessClassBuilder> entry : classesToPainlessClasses.entrySet()) {
|
||||
for (Map.Entry<Class<?>,PainlessClassBuilder> entry : classesToPainlessClassBuilders.entrySet()) {
|
||||
entry.getValue().functionalMethod = computeFunctionalInterfaceMethod(entry.getValue());
|
||||
javaClassesToPainlessClasses.put(entry.getKey(), entry.getValue().build());
|
||||
}
|
||||
|
|
|
@ -36,7 +36,9 @@ import java.util.Objects;
|
|||
* classes to be represented. The set of available classes will always be a subset of the available types.
|
||||
*
|
||||
* Under ambiguous circumstances most variable names are prefixed with asm, java, or painless. If the variable value is the same for asm,
|
||||
* java, and painless, no prefix is used.
|
||||
* java, and painless, no prefix is used. Target is used as a prefix to represent if a constructor, method, or field is being
|
||||
* called/accessed on that specific class. Parameter is often a postfix used to represent if a type is used as a parameter to a
|
||||
* constructor, method, or field.
|
||||
*
|
||||
* <ul>
|
||||
* <li> - javaClassName (String) - the fully qualified java class name where '$' tokens represent inner classes excluding
|
||||
|
@ -150,8 +152,8 @@ public final class PainlessLookupUtility {
|
|||
|
||||
String canonicalTypeName = type.getCanonicalName();
|
||||
|
||||
if (canonicalTypeName.startsWith(def.class.getName())) {
|
||||
canonicalTypeName = canonicalTypeName.replace(def.class.getName(), DEF_TYPE_NAME);
|
||||
if (canonicalTypeName.startsWith(def.class.getCanonicalName())) {
|
||||
canonicalTypeName = canonicalTypeName.replace(def.class.getCanonicalName(), DEF_CLASS_NAME);
|
||||
}
|
||||
|
||||
return canonicalTypeName;
|
||||
|
@ -351,7 +353,7 @@ public final class PainlessLookupUtility {
|
|||
/**
|
||||
* The def type name as specified in the source for a script.
|
||||
*/
|
||||
public static final String DEF_TYPE_NAME = "def";
|
||||
public static final String DEF_CLASS_NAME = "def";
|
||||
|
||||
/**
|
||||
* The method name for all constructors.
|
||||
|
|
|
@ -148,7 +148,7 @@ class java.lang.Character {
|
|||
int MAX_RADIX
|
||||
char MAX_SURROGATE
|
||||
char MAX_VALUE
|
||||
char MIN_CODE_POINT
|
||||
int MIN_CODE_POINT
|
||||
char MIN_HIGH_SURROGATE
|
||||
char MIN_LOW_SURROGATE
|
||||
int MIN_RADIX
|
||||
|
|
|
@ -126,8 +126,6 @@ public class DiscountedCumulativeGain implements EvaluationMetric {
|
|||
@Override
|
||||
public EvalQueryQuality evaluate(String taskId, SearchHit[] hits,
|
||||
List<RatedDocument> ratedDocs) {
|
||||
List<Integer> allRatings = ratedDocs.stream().mapToInt(RatedDocument::getRating).boxed()
|
||||
.collect(Collectors.toList());
|
||||
List<RatedSearchHit> ratedHits = joinHitsWithRatings(hits, ratedDocs);
|
||||
List<Integer> ratingsInSearchHits = new ArrayList<>(ratedHits.size());
|
||||
int unratedResults = 0;
|
||||
|
@ -144,6 +142,8 @@ public class DiscountedCumulativeGain implements EvaluationMetric {
|
|||
double idcg = 0;
|
||||
|
||||
if (normalize) {
|
||||
List<Integer> allRatings = ratedDocs.stream().mapToInt(RatedDocument::getRating).boxed()
|
||||
.collect(Collectors.toList());
|
||||
Collections.sort(allRatings, Comparator.nullsLast(Collections.reverseOrder()));
|
||||
idcg = computeDCG(allRatings.subList(0, Math.min(ratingsInSearchHits.size(), allRatings.size())));
|
||||
if (idcg != 0) {
|
||||
|
|
|
@ -41,19 +41,19 @@ import java.util.Objects;
|
|||
public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
||||
|
||||
private final String queryId;
|
||||
private final double evaluationResult;
|
||||
private final double metricScore;
|
||||
private MetricDetail optionalMetricDetails;
|
||||
private final List<RatedSearchHit> ratedHits;
|
||||
|
||||
public EvalQueryQuality(String id, double evaluationResult) {
|
||||
public EvalQueryQuality(String id, double metricScore) {
|
||||
this.queryId = id;
|
||||
this.evaluationResult = evaluationResult;
|
||||
this.metricScore = metricScore;
|
||||
this.ratedHits = new ArrayList<>();
|
||||
}
|
||||
|
||||
public EvalQueryQuality(StreamInput in) throws IOException {
|
||||
this.queryId = in.readString();
|
||||
this.evaluationResult = in.readDouble();
|
||||
this.metricScore = in.readDouble();
|
||||
this.ratedHits = in.readList(RatedSearchHit::new);
|
||||
this.optionalMetricDetails = in.readOptionalNamedWriteable(MetricDetail.class);
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
|||
// only used for parsing internally
|
||||
private EvalQueryQuality(String queryId, ParsedEvalQueryQuality builder) {
|
||||
this.queryId = queryId;
|
||||
this.evaluationResult = builder.evaluationResult;
|
||||
this.metricScore = builder.evaluationResult;
|
||||
this.optionalMetricDetails = builder.optionalMetricDetails;
|
||||
this.ratedHits = builder.ratedHits;
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(queryId);
|
||||
out.writeDouble(evaluationResult);
|
||||
out.writeDouble(metricScore);
|
||||
out.writeList(ratedHits);
|
||||
out.writeOptionalNamedWriteable(this.optionalMetricDetails);
|
||||
}
|
||||
|
@ -78,8 +78,8 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
|||
return queryId;
|
||||
}
|
||||
|
||||
public double getQualityLevel() {
|
||||
return evaluationResult;
|
||||
public double metricScore() {
|
||||
return metricScore;
|
||||
}
|
||||
|
||||
public void setMetricDetails(MetricDetail breakdown) {
|
||||
|
@ -101,7 +101,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(queryId);
|
||||
builder.field(QUALITY_LEVEL_FIELD.getPreferredName(), this.evaluationResult);
|
||||
builder.field(METRIC_SCORE_FIELD.getPreferredName(), this.metricScore);
|
||||
builder.startArray(UNRATED_DOCS_FIELD.getPreferredName());
|
||||
for (DocumentKey key : EvaluationMetric.filterUnratedDocuments(ratedHits)) {
|
||||
builder.startObject();
|
||||
|
@ -122,7 +122,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
|||
return builder;
|
||||
}
|
||||
|
||||
private static final ParseField QUALITY_LEVEL_FIELD = new ParseField("quality_level");
|
||||
static final ParseField METRIC_SCORE_FIELD = new ParseField("metric_score");
|
||||
private static final ParseField UNRATED_DOCS_FIELD = new ParseField("unrated_docs");
|
||||
private static final ParseField HITS_FIELD = new ParseField("hits");
|
||||
private static final ParseField METRIC_DETAILS_FIELD = new ParseField("metric_details");
|
||||
|
@ -136,7 +136,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
|||
}
|
||||
|
||||
static {
|
||||
PARSER.declareDouble((obj, value) -> obj.evaluationResult = value, QUALITY_LEVEL_FIELD);
|
||||
PARSER.declareDouble((obj, value) -> obj.evaluationResult = value, METRIC_SCORE_FIELD);
|
||||
PARSER.declareObject((obj, value) -> obj.optionalMetricDetails = value, (p, c) -> parseMetricDetail(p),
|
||||
METRIC_DETAILS_FIELD);
|
||||
PARSER.declareObjectArray((obj, list) -> obj.ratedHits = list, (p, c) -> RatedSearchHit.parse(p), HITS_FIELD);
|
||||
|
@ -164,13 +164,13 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
|||
}
|
||||
EvalQueryQuality other = (EvalQueryQuality) obj;
|
||||
return Objects.equals(queryId, other.queryId) &&
|
||||
Objects.equals(evaluationResult, other.evaluationResult) &&
|
||||
Objects.equals(metricScore, other.metricScore) &&
|
||||
Objects.equals(ratedHits, other.ratedHits) &&
|
||||
Objects.equals(optionalMetricDetails, other.optionalMetricDetails);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
return Objects.hash(queryId, evaluationResult, ratedHits, optionalMetricDetails);
|
||||
return Objects.hash(queryId, metricScore, ratedHits, optionalMetricDetails);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,23 +39,22 @@ import java.util.stream.Collectors;
|
|||
public interface EvaluationMetric extends ToXContentObject, NamedWriteable {
|
||||
|
||||
/**
|
||||
* Returns a single metric representing the ranking quality of a set of returned
|
||||
* documents wrt. to a set of document ids labeled as relevant for this search.
|
||||
* Evaluates a single ranking evaluation case.
|
||||
*
|
||||
* @param taskId
|
||||
* the id of the query for which the ranking is currently evaluated
|
||||
* an identifier of the query for which the search ranking is
|
||||
* evaluated
|
||||
* @param hits
|
||||
* the result hits as returned by a search request
|
||||
* the search result hits
|
||||
* @param ratedDocs
|
||||
* the documents that were ranked by human annotators for this query
|
||||
* case
|
||||
* @return some metric representing the quality of the result hit list wrt. to
|
||||
* relevant doc ids.
|
||||
* the documents that contain the document rating for this query case
|
||||
* @return an {@link EvalQueryQuality} instance that contains the metric score
|
||||
* with respect to the provided search hits and ratings
|
||||
*/
|
||||
EvalQueryQuality evaluate(String taskId, SearchHit[] hits, List<RatedDocument> ratedDocs);
|
||||
|
||||
/**
|
||||
* join hits with rated documents using the joint _index/_id document key
|
||||
* Joins hits with rated documents using the joint _index/_id document key.
|
||||
*/
|
||||
static List<RatedSearchHit> joinHitsWithRatings(SearchHit[] hits, List<RatedDocument> ratedDocs) {
|
||||
Map<DocumentKey, RatedDocument> ratedDocumentMap = ratedDocs.stream()
|
||||
|
@ -74,7 +73,7 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable {
|
|||
}
|
||||
|
||||
/**
|
||||
* filter @link {@link RatedSearchHit} that don't have a rating
|
||||
* Filter {@link RatedSearchHit}s that do not have a rating.
|
||||
*/
|
||||
static List<DocumentKey> filterUnratedDocuments(List<RatedSearchHit> ratedHits) {
|
||||
return ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false)
|
||||
|
@ -82,11 +81,11 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable {
|
|||
}
|
||||
|
||||
/**
|
||||
* how evaluation metrics for particular search queries get combined for the overall evaluation score.
|
||||
* Defaults to averaging over the partial results.
|
||||
* Combine several {@link EvalQueryQuality} results into the overall evaluation score.
|
||||
* This defaults to averaging over the partial results, but can be overwritten to obtain a different behavior.
|
||||
*/
|
||||
default double combine(Collection<EvalQueryQuality> partialResults) {
|
||||
return partialResults.stream().mapToDouble(EvalQueryQuality::getQualityLevel).sum() / partialResults.size();
|
||||
return partialResults.stream().mapToDouble(EvalQueryQuality::metricScore).sum() / partialResults.size();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -110,8 +110,7 @@ public class MeanReciprocalRank implements EvaluationMetric {
|
|||
* Compute ReciprocalRank based on provided relevant document IDs.
|
||||
**/
|
||||
@Override
|
||||
public EvalQueryQuality evaluate(String taskId, SearchHit[] hits,
|
||||
List<RatedDocument> ratedDocs) {
|
||||
public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, List<RatedDocument> ratedDocs) {
|
||||
List<RatedSearchHit> ratedHits = joinHitsWithRatings(hits, ratedDocs);
|
||||
int firstRelevant = -1;
|
||||
int rank = 1;
|
||||
|
|
|
@ -48,15 +48,15 @@ import java.util.stream.Collectors;
|
|||
public class RankEvalResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
/** The overall evaluation result. */
|
||||
private double evaluationResult;
|
||||
private double metricScore;
|
||||
/** details about individual ranking evaluation queries, keyed by their id */
|
||||
private Map<String, EvalQueryQuality> details;
|
||||
/** exceptions for specific ranking evaluation queries, keyed by their id */
|
||||
private Map<String, Exception> failures;
|
||||
|
||||
public RankEvalResponse(double qualityLevel, Map<String, EvalQueryQuality> partialResults,
|
||||
public RankEvalResponse(double metricScore, Map<String, EvalQueryQuality> partialResults,
|
||||
Map<String, Exception> failures) {
|
||||
this.evaluationResult = qualityLevel;
|
||||
this.metricScore = metricScore;
|
||||
this.details = new HashMap<>(partialResults);
|
||||
this.failures = new HashMap<>(failures);
|
||||
}
|
||||
|
@ -65,8 +65,8 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject
|
|||
// only used in RankEvalAction#newResponse()
|
||||
}
|
||||
|
||||
public double getEvaluationResult() {
|
||||
return evaluationResult;
|
||||
public double getMetricScore() {
|
||||
return metricScore;
|
||||
}
|
||||
|
||||
public Map<String, EvalQueryQuality> getPartialResults() {
|
||||
|
@ -85,7 +85,7 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeDouble(evaluationResult);
|
||||
out.writeDouble(metricScore);
|
||||
out.writeVInt(details.size());
|
||||
for (String queryId : details.keySet()) {
|
||||
out.writeString(queryId);
|
||||
|
@ -101,7 +101,7 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
this.evaluationResult = in.readDouble();
|
||||
this.metricScore = in.readDouble();
|
||||
int partialResultSize = in.readVInt();
|
||||
this.details = new HashMap<>(partialResultSize);
|
||||
for (int i = 0; i < partialResultSize; i++) {
|
||||
|
@ -120,7 +120,7 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("quality_level", evaluationResult);
|
||||
builder.field("metric_score", metricScore);
|
||||
builder.startObject("details");
|
||||
for (String key : details.keySet()) {
|
||||
details.get(key).toXContent(builder, params);
|
||||
|
@ -137,7 +137,6 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject
|
|||
return builder;
|
||||
}
|
||||
|
||||
private static final ParseField QUALITY_LEVEL_FIELD = new ParseField("quality_level");
|
||||
private static final ParseField DETAILS_FIELD = new ParseField("details");
|
||||
private static final ParseField FAILURES_FIELD = new ParseField("failures");
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -147,7 +146,7 @@ public class RankEvalResponse extends ActionResponse implements ToXContentObject
|
|||
((List<EvalQueryQuality>) a[1]).stream().collect(Collectors.toMap(EvalQueryQuality::getId, Function.identity())),
|
||||
((List<Tuple<String, Exception>>) a[2]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2))));
|
||||
static {
|
||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), QUALITY_LEVEL_FIELD);
|
||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), EvalQueryQuality.METRIC_SCORE_FIELD);
|
||||
PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> EvalQueryQuality.fromXContent(p, n),
|
||||
DETAILS_FIELD);
|
||||
PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(), (p, c, n) -> {
|
||||
|
|
|
@ -76,7 +76,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
hits[i].shard(new SearchShardTarget("testnode", new Index("index", "uuid"), 0, null));
|
||||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
assertEquals(EXPECTED_DCG, dcg.evaluate("id", hits, rated).getQualityLevel(), DELTA);
|
||||
assertEquals(EXPECTED_DCG, dcg.evaluate("id", hits, rated).metricScore(), DELTA);
|
||||
|
||||
/**
|
||||
* Check with normalization: to get the maximal possible dcg, sort documents by
|
||||
|
@ -94,7 +94,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
* idcg = 14.595390756454922 (sum of last column)
|
||||
*/
|
||||
dcg = new DiscountedCumulativeGain(true, null, 10);
|
||||
assertEquals(EXPECTED_NDCG, dcg.evaluate("id", hits, rated).getQualityLevel(), DELTA);
|
||||
assertEquals(EXPECTED_NDCG, dcg.evaluate("id", hits, rated).metricScore(), DELTA);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -127,7 +127,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
EvalQueryQuality result = dcg.evaluate("id", hits, rated);
|
||||
assertEquals(12.779642067948913, result.getQualityLevel(), DELTA);
|
||||
assertEquals(12.779642067948913, result.metricScore(), DELTA);
|
||||
assertEquals(2, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||
|
||||
/**
|
||||
|
@ -146,7 +146,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
* idcg = 13.347184833073591 (sum of last column)
|
||||
*/
|
||||
dcg = new DiscountedCumulativeGain(true, null, 10);
|
||||
assertEquals(12.779642067948913 / 13.347184833073591, dcg.evaluate("id", hits, rated).getQualityLevel(), DELTA);
|
||||
assertEquals(12.779642067948913 / 13.347184833073591, dcg.evaluate("id", hits, rated).metricScore(), DELTA);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -184,7 +184,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
|
||||
assertEquals(12.392789260714371, result.getQualityLevel(), DELTA);
|
||||
assertEquals(12.392789260714371, result.metricScore(), DELTA);
|
||||
assertEquals(1, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||
|
||||
/**
|
||||
|
@ -204,7 +204,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
* idcg = 13.347184833073591 (sum of last column)
|
||||
*/
|
||||
dcg = new DiscountedCumulativeGain(true, null, 10);
|
||||
assertEquals(12.392789260714371 / 13.347184833073591, dcg.evaluate("id", hits, ratedDocs).getQualityLevel(), DELTA);
|
||||
assertEquals(12.392789260714371 / 13.347184833073591, dcg.evaluate("id", hits, ratedDocs).metricScore(), DELTA);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -223,13 +223,13 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
SearchHit[] hits = new SearchHit[0];
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
|
||||
assertEquals(0.0d, result.getQualityLevel(), DELTA);
|
||||
assertEquals(0.0d, result.metricScore(), DELTA);
|
||||
assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||
|
||||
// also check normalized
|
||||
dcg = new DiscountedCumulativeGain(true, null, 10);
|
||||
result = dcg.evaluate("id", hits, ratedDocs);
|
||||
assertEquals(0.0d, result.getQualityLevel(), DELTA);
|
||||
assertEquals(0.0d, result.metricScore(), DELTA);
|
||||
assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||
}
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ public class EvalQueryQualityTests extends ESTestCase {
|
|||
|
||||
private static EvalQueryQuality mutateTestItem(EvalQueryQuality original) {
|
||||
String id = original.getId();
|
||||
double qualityLevel = original.getQualityLevel();
|
||||
double metricScore = original.metricScore();
|
||||
List<RatedSearchHit> ratedHits = new ArrayList<>(original.getHitsAndRatings());
|
||||
MetricDetail metricDetails = original.getMetricDetails();
|
||||
switch (randomIntBetween(0, 3)) {
|
||||
|
@ -137,7 +137,7 @@ public class EvalQueryQualityTests extends ESTestCase {
|
|||
id = id + "_";
|
||||
break;
|
||||
case 1:
|
||||
qualityLevel = qualityLevel + 0.1;
|
||||
metricScore = metricScore + 0.1;
|
||||
break;
|
||||
case 2:
|
||||
if (metricDetails == null) {
|
||||
|
@ -152,7 +152,7 @@ public class EvalQueryQualityTests extends ESTestCase {
|
|||
default:
|
||||
throw new IllegalStateException("The test should only allow four parameters mutated");
|
||||
}
|
||||
EvalQueryQuality evalQueryQuality = new EvalQueryQuality(id, qualityLevel);
|
||||
EvalQueryQuality evalQueryQuality = new EvalQueryQuality(id, metricScore);
|
||||
evalQueryQuality.setMetricDetails(metricDetails);
|
||||
evalQueryQuality.addHitsAndRatings(ratedHits);
|
||||
return evalQueryQuality;
|
||||
|
|
|
@ -76,10 +76,10 @@ public class ExpectedReciprocalRankTests extends ESTestCase {
|
|||
Integer[] relevanceRatings = new Integer[] { 3, 2, 0, 1};
|
||||
SearchHit[] hits = createSearchHits(rated, relevanceRatings);
|
||||
ExpectedReciprocalRank err = new ExpectedReciprocalRank(3, 0, 3);
|
||||
assertEquals(0.8984375, err.evaluate("id", hits, rated).getQualityLevel(), DELTA);
|
||||
assertEquals(0.8984375, err.evaluate("id", hits, rated).metricScore(), DELTA);
|
||||
// take 4th rank into window
|
||||
err = new ExpectedReciprocalRank(3, 0, 4);
|
||||
assertEquals(0.8984375 + 0.00244140625, err.evaluate("id", hits, rated).getQualityLevel(), DELTA);
|
||||
assertEquals(0.8984375 + 0.00244140625, err.evaluate("id", hits, rated).metricScore(), DELTA);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -102,11 +102,11 @@ public class ExpectedReciprocalRankTests extends ESTestCase {
|
|||
SearchHit[] hits = createSearchHits(rated, relevanceRatings);
|
||||
ExpectedReciprocalRank err = new ExpectedReciprocalRank(3, null, 4);
|
||||
EvalQueryQuality evaluation = err.evaluate("id", hits, rated);
|
||||
assertEquals(0.875 + 0.00390625, evaluation.getQualityLevel(), DELTA);
|
||||
assertEquals(0.875 + 0.00390625, evaluation.metricScore(), DELTA);
|
||||
assertEquals(1, ((ExpectedReciprocalRank.Detail) evaluation.getMetricDetails()).getUnratedDocs());
|
||||
// if we supply e.g. 2 as unknown docs rating, it should be the same as in the other test above
|
||||
err = new ExpectedReciprocalRank(3, 2, 4);
|
||||
assertEquals(0.8984375 + 0.00244140625, err.evaluate("id", hits, rated).getQualityLevel(), DELTA);
|
||||
assertEquals(0.8984375 + 0.00244140625, err.evaluate("id", hits, rated).metricScore(), DELTA);
|
||||
}
|
||||
|
||||
private SearchHit[] createSearchHits(List<RatedDocument> rated, Integer[] relevanceRatings) {
|
||||
|
@ -126,7 +126,7 @@ public class ExpectedReciprocalRankTests extends ESTestCase {
|
|||
*/
|
||||
public void testNoResults() throws Exception {
|
||||
ExpectedReciprocalRank err = new ExpectedReciprocalRank(5, 0, 10);
|
||||
assertEquals(0.0, err.evaluate("id", new SearchHit[0], Collections.emptyList()).getQualityLevel(), DELTA);
|
||||
assertEquals(0.0, err.evaluate("id", new SearchHit[0], Collections.emptyList()).metricScore(), DELTA);
|
||||
}
|
||||
|
||||
public void testParseFromXContent() throws IOException {
|
||||
|
|
|
@ -95,14 +95,14 @@ public class MeanReciprocalRankTests extends ESTestCase {
|
|||
|
||||
int rankAtFirstRelevant = relevantAt + 1;
|
||||
EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, ratedDocs);
|
||||
assertEquals(1.0 / rankAtFirstRelevant, evaluation.getQualityLevel(), Double.MIN_VALUE);
|
||||
assertEquals(1.0 / rankAtFirstRelevant, evaluation.metricScore(), Double.MIN_VALUE);
|
||||
assertEquals(rankAtFirstRelevant, ((MeanReciprocalRank.Detail) evaluation.getMetricDetails()).getFirstRelevantRank());
|
||||
|
||||
// check that if we have fewer search hits than relevant doc position,
|
||||
// we don't find any result and get 0.0 quality level
|
||||
// we don't find any result and get 0.0 score
|
||||
reciprocalRank = new MeanReciprocalRank();
|
||||
evaluation = reciprocalRank.evaluate("id", Arrays.copyOfRange(hits, 0, relevantAt), ratedDocs);
|
||||
assertEquals(0.0, evaluation.getQualityLevel(), Double.MIN_VALUE);
|
||||
assertEquals(0.0, evaluation.metricScore(), Double.MIN_VALUE);
|
||||
}
|
||||
|
||||
public void testEvaluationOneRelevantInResults() {
|
||||
|
@ -120,7 +120,7 @@ public class MeanReciprocalRankTests extends ESTestCase {
|
|||
}
|
||||
|
||||
EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, ratedDocs);
|
||||
assertEquals(1.0 / (relevantAt + 1), evaluation.getQualityLevel(), Double.MIN_VALUE);
|
||||
assertEquals(1.0 / (relevantAt + 1), evaluation.metricScore(), Double.MIN_VALUE);
|
||||
assertEquals(relevantAt + 1, ((MeanReciprocalRank.Detail) evaluation.getMetricDetails()).getFirstRelevantRank());
|
||||
}
|
||||
|
||||
|
@ -140,7 +140,7 @@ public class MeanReciprocalRankTests extends ESTestCase {
|
|||
|
||||
MeanReciprocalRank reciprocalRank = new MeanReciprocalRank(2, 10);
|
||||
EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, rated);
|
||||
assertEquals((double) 1 / 3, evaluation.getQualityLevel(), 0.00001);
|
||||
assertEquals((double) 1 / 3, evaluation.metricScore(), 0.00001);
|
||||
assertEquals(3, ((MeanReciprocalRank.Detail) evaluation.getMetricDetails()).getFirstRelevantRank());
|
||||
}
|
||||
|
||||
|
@ -158,13 +158,13 @@ public class MeanReciprocalRankTests extends ESTestCase {
|
|||
SearchHit[] hits = createSearchHits(0, 9, "test");
|
||||
List<RatedDocument> ratedDocs = new ArrayList<>();
|
||||
EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, ratedDocs);
|
||||
assertEquals(0.0, evaluation.getQualityLevel(), Double.MIN_VALUE);
|
||||
assertEquals(0.0, evaluation.metricScore(), Double.MIN_VALUE);
|
||||
}
|
||||
|
||||
public void testNoResults() throws Exception {
|
||||
SearchHit[] hits = new SearchHit[0];
|
||||
EvalQueryQuality evaluated = (new MeanReciprocalRank()).evaluate("id", hits, Collections.emptyList());
|
||||
assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001);
|
||||
assertEquals(0.0d, evaluated.metricScore(), 0.00001);
|
||||
assertEquals(-1, ((MeanReciprocalRank.Detail) evaluated.getMetricDetails()).getFirstRelevantRank());
|
||||
}
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
List<RatedDocument> rated = new ArrayList<>();
|
||||
rated.add(createRatedDoc("test", "0", RELEVANT_RATING_1));
|
||||
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated, "test"), rated);
|
||||
assertEquals(1, evaluated.getQualityLevel(), 0.00001);
|
||||
assertEquals(1, evaluated.metricScore(), 0.00001);
|
||||
assertEquals(1, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved());
|
||||
assertEquals(1, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved());
|
||||
}
|
||||
|
@ -66,7 +66,7 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
rated.add(createRatedDoc("test", "3", RELEVANT_RATING_1));
|
||||
rated.add(createRatedDoc("test", "4", IRRELEVANT_RATING_0));
|
||||
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated, "test"), rated);
|
||||
assertEquals((double) 4 / 5, evaluated.getQualityLevel(), 0.00001);
|
||||
assertEquals((double) 4 / 5, evaluated.metricScore(), 0.00001);
|
||||
assertEquals(4, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved());
|
||||
assertEquals(5, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved());
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
rated.add(createRatedDoc("test", "4", 4));
|
||||
PrecisionAtK precisionAtN = new PrecisionAtK(2, false, 5);
|
||||
EvalQueryQuality evaluated = precisionAtN.evaluate("id", toSearchHits(rated, "test"), rated);
|
||||
assertEquals((double) 3 / 5, evaluated.getQualityLevel(), 0.00001);
|
||||
assertEquals((double) 3 / 5, evaluated.metricScore(), 0.00001);
|
||||
assertEquals(3, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved());
|
||||
assertEquals(5, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved());
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
rated.add(createRatedDoc("test", "2", IRRELEVANT_RATING_0));
|
||||
// the following search hits contain only the last three documents
|
||||
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated.subList(2, 5), "test"), rated);
|
||||
assertEquals((double) 2 / 3, evaluated.getQualityLevel(), 0.00001);
|
||||
assertEquals((double) 2 / 3, evaluated.metricScore(), 0.00001);
|
||||
assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved());
|
||||
assertEquals(3, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved());
|
||||
}
|
||||
|
@ -114,14 +114,14 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
searchHits[2].shard(new SearchShardTarget("testnode", new Index("index", "uuid"), 0, null));
|
||||
|
||||
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated);
|
||||
assertEquals((double) 2 / 3, evaluated.getQualityLevel(), 0.00001);
|
||||
assertEquals((double) 2 / 3, evaluated.metricScore(), 0.00001);
|
||||
assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved());
|
||||
assertEquals(3, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved());
|
||||
|
||||
// also try with setting `ignore_unlabeled`
|
||||
PrecisionAtK prec = new PrecisionAtK(1, true, 10);
|
||||
evaluated = prec.evaluate("id", searchHits, rated);
|
||||
assertEquals((double) 2 / 2, evaluated.getQualityLevel(), 0.00001);
|
||||
assertEquals((double) 2 / 2, evaluated.metricScore(), 0.00001);
|
||||
assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved());
|
||||
assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved());
|
||||
}
|
||||
|
@ -133,14 +133,14 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
hits[i].shard(new SearchShardTarget("testnode", new Index("index", "uuid"), 0, null));
|
||||
}
|
||||
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList());
|
||||
assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001);
|
||||
assertEquals(0.0d, evaluated.metricScore(), 0.00001);
|
||||
assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved());
|
||||
assertEquals(5, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved());
|
||||
|
||||
// also try with setting `ignore_unlabeled`
|
||||
PrecisionAtK prec = new PrecisionAtK(1, true, 10);
|
||||
evaluated = prec.evaluate("id", hits, Collections.emptyList());
|
||||
assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001);
|
||||
assertEquals(0.0d, evaluated.metricScore(), 0.00001);
|
||||
assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved());
|
||||
assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved());
|
||||
}
|
||||
|
@ -148,7 +148,7 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
public void testNoResults() throws Exception {
|
||||
SearchHit[] hits = new SearchHit[0];
|
||||
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList());
|
||||
assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001);
|
||||
assertEquals(0.0d, evaluated.metricScore(), 0.00001);
|
||||
assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved());
|
||||
assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved());
|
||||
}
|
||||
|
|
|
@ -114,7 +114,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||
// the expected Prec@ for the first query is 4/6 and the expected Prec@ for the
|
||||
// second is 1/6, divided by 2 to get the average
|
||||
double expectedPrecision = (1.0 / 6.0 + 4.0 / 6.0) / 2.0;
|
||||
assertEquals(expectedPrecision, response.getEvaluationResult(), Double.MIN_VALUE);
|
||||
assertEquals(expectedPrecision, response.getMetricScore(), Double.MIN_VALUE);
|
||||
Set<Entry<String, EvalQueryQuality>> entrySet = response.getPartialResults().entrySet();
|
||||
assertEquals(2, entrySet.size());
|
||||
for (Entry<String, EvalQueryQuality> entry : entrySet) {
|
||||
|
@ -157,7 +157,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||
// if we look only at top 3 documente, the expected P@3 for the first query is
|
||||
// 2/3 and the expected Prec@ for the second is 1/3, divided by 2 to get the average
|
||||
expectedPrecision = (1.0 / 3.0 + 2.0 / 3.0) / 2.0;
|
||||
assertEquals(expectedPrecision, response.getEvaluationResult(), Double.MIN_VALUE);
|
||||
assertEquals(expectedPrecision, response.getMetricScore(), Double.MIN_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -186,7 +186,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||
new RankEvalRequest(task, new String[] { TEST_INDEX }));
|
||||
|
||||
RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet();
|
||||
assertEquals(DiscountedCumulativeGainTests.EXPECTED_DCG, response.getEvaluationResult(), 10E-14);
|
||||
assertEquals(DiscountedCumulativeGainTests.EXPECTED_DCG, response.getMetricScore(), 10E-14);
|
||||
|
||||
// test that a different window size k affects the result
|
||||
metric = new DiscountedCumulativeGain(false, null, 3);
|
||||
|
@ -195,7 +195,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||
builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest(task, new String[] { TEST_INDEX }));
|
||||
|
||||
response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet();
|
||||
assertEquals(12.39278926071437, response.getEvaluationResult(), 10E-14);
|
||||
assertEquals(12.39278926071437, response.getMetricScore(), 10E-14);
|
||||
}
|
||||
|
||||
public void testMRRRequest() {
|
||||
|
@ -218,7 +218,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||
// the expected reciprocal rank for the berlin_query is 1/1
|
||||
// dividing by 2 to get the average
|
||||
double expectedMRR = (1.0 + 1.0 / 5.0) / 2.0;
|
||||
assertEquals(expectedMRR, response.getEvaluationResult(), 0.0);
|
||||
assertEquals(expectedMRR, response.getMetricScore(), 0.0);
|
||||
|
||||
// test that a different window size k affects the result
|
||||
metric = new MeanReciprocalRank(1, 3);
|
||||
|
@ -231,7 +231,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||
// the reciprocal rank for the berlin_query is 1/1
|
||||
// dividing by 2 to get the average
|
||||
expectedMRR = 1.0 / 2.0;
|
||||
assertEquals(expectedMRR, response.getEvaluationResult(), 0.0);
|
||||
assertEquals(expectedMRR, response.getMetricScore(), 0.0);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -102,7 +102,7 @@ public class RankEvalResponseTests extends ESTestCase {
|
|||
try (StreamInput in = output.bytes().streamInput()) {
|
||||
RankEvalResponse deserializedResponse = new RankEvalResponse();
|
||||
deserializedResponse.readFrom(in);
|
||||
assertEquals(randomResponse.getEvaluationResult(), deserializedResponse.getEvaluationResult(), Double.MIN_VALUE);
|
||||
assertEquals(randomResponse.getMetricScore(), deserializedResponse.getMetricScore(), Double.MIN_VALUE);
|
||||
assertEquals(randomResponse.getPartialResults(), deserializedResponse.getPartialResults());
|
||||
assertEquals(randomResponse.getFailures().keySet(), deserializedResponse.getFailures().keySet());
|
||||
assertNotSame(randomResponse, deserializedResponse);
|
||||
|
@ -130,7 +130,7 @@ public class RankEvalResponseTests extends ESTestCase {
|
|||
assertNotSame(testItem, parsedItem);
|
||||
// We cannot check equality of object here because some information (e.g.
|
||||
// SearchHit#shard) cannot fully be parsed back.
|
||||
assertEquals(testItem.getEvaluationResult(), parsedItem.getEvaluationResult(), 0.0);
|
||||
assertEquals(testItem.getMetricScore(), parsedItem.getMetricScore(), 0.0);
|
||||
assertEquals(testItem.getPartialResults().keySet(), parsedItem.getPartialResults().keySet());
|
||||
for (EvalQueryQuality metricDetail : testItem.getPartialResults().values()) {
|
||||
EvalQueryQuality parsedEvalQueryQuality = parsedItem.getPartialResults().get(metricDetail.getId());
|
||||
|
@ -154,10 +154,10 @@ public class RankEvalResponseTests extends ESTestCase {
|
|||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
String xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString();
|
||||
assertEquals(("{" +
|
||||
" \"quality_level\": 0.123," +
|
||||
" \"metric_score\": 0.123," +
|
||||
" \"details\": {" +
|
||||
" \"coffee_query\": {" +
|
||||
" \"quality_level\": 0.1," +
|
||||
" \"metric_score\": 0.1," +
|
||||
" \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," +
|
||||
" \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"123\",\"_score\":1.0}," +
|
||||
" \"rating\":5}," +
|
||||
|
|
|
@ -71,8 +71,8 @@ setup:
|
|||
"metric" : { "precision": { "ignore_unlabeled" : true }}
|
||||
}
|
||||
|
||||
- match: { quality_level: 1}
|
||||
- match: { details.amsterdam_query.quality_level: 1.0}
|
||||
- match: { metric_score: 1}
|
||||
- match: { details.amsterdam_query.metric_score: 1.0}
|
||||
- match: { details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]}
|
||||
- match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 2, "docs_retrieved": 2}}
|
||||
|
||||
|
@ -84,7 +84,7 @@ setup:
|
|||
- match: { details.amsterdam_query.hits.2.hit._id: "doc4"}
|
||||
- is_false: details.amsterdam_query.hits.2.rating
|
||||
|
||||
- match: { details.berlin_query.quality_level: 1.0}
|
||||
- match: { details.berlin_query.metric_score: 1.0}
|
||||
- match: { details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]}
|
||||
- match: { details.berlin_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}}
|
||||
- length: { details.berlin_query.hits: 2}
|
||||
|
@ -118,9 +118,9 @@ setup:
|
|||
"metric" : { "precision": { "ignore_unlabeled" : true }}
|
||||
}
|
||||
|
||||
- match: { quality_level: 1}
|
||||
- match: { details.amsterdam_query.quality_level: 1.0}
|
||||
- match: { details.berlin_query.quality_level: 1.0}
|
||||
- match: { metric_score: 1}
|
||||
- match: { details.amsterdam_query.metric_score: 1.0}
|
||||
- match: { details.berlin_query.metric_score: 1.0}
|
||||
|
||||
---
|
||||
"Mean Reciprocal Rank":
|
||||
|
@ -150,14 +150,14 @@ setup:
|
|||
}
|
||||
|
||||
# average is (1/3 + 1/2)/2 = 5/12 ~ 0.41666666666666663
|
||||
- gt: {quality_level: 0.416}
|
||||
- lt: {quality_level: 0.417}
|
||||
- gt: {details.amsterdam_query.quality_level: 0.333}
|
||||
- lt: {details.amsterdam_query.quality_level: 0.334}
|
||||
- gt: {metric_score: 0.416}
|
||||
- lt: {metric_score: 0.417}
|
||||
- gt: {details.amsterdam_query.metric_score: 0.333}
|
||||
- lt: {details.amsterdam_query.metric_score: 0.334}
|
||||
- match: {details.amsterdam_query.metric_details.mean_reciprocal_rank: {"first_relevant": 3}}
|
||||
- match: {details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc2"},
|
||||
{"_index": "foo", "_id": "doc3"} ]}
|
||||
- match: {details.berlin_query.quality_level: 0.5}
|
||||
- match: {details.berlin_query.metric_score: 0.5}
|
||||
- match: {details.berlin_query.metric_details.mean_reciprocal_rank: {"first_relevant": 2}}
|
||||
- match: {details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc1"}]}
|
||||
|
||||
|
|
|
@ -69,10 +69,10 @@
|
|||
"metric" : { "dcg": {}}
|
||||
}
|
||||
|
||||
- gt: {quality_level: 13.848263 }
|
||||
- lt: {quality_level: 13.848264 }
|
||||
- gt: {details.dcg_query.quality_level: 13.848263}
|
||||
- lt: {details.dcg_query.quality_level: 13.848264}
|
||||
- gt: {metric_score: 13.848263 }
|
||||
- lt: {metric_score: 13.848264 }
|
||||
- gt: {details.dcg_query.metric_score: 13.848263}
|
||||
- lt: {details.dcg_query.metric_score: 13.848264}
|
||||
- match: {details.dcg_query.unrated_docs: [ ]}
|
||||
|
||||
# reverse the order in which the results are returned (less relevant docs first)
|
||||
|
@ -96,10 +96,10 @@
|
|||
"metric" : { "dcg": { }}
|
||||
}
|
||||
|
||||
- gt: {quality_level: 10.299674}
|
||||
- lt: {quality_level: 10.299675}
|
||||
- gt: {details.dcg_query_reverse.quality_level: 10.299674}
|
||||
- lt: {details.dcg_query_reverse.quality_level: 10.299675}
|
||||
- gt: {metric_score: 10.299674}
|
||||
- lt: {metric_score: 10.299675}
|
||||
- gt: {details.dcg_query_reverse.metric_score: 10.299674}
|
||||
- lt: {details.dcg_query_reverse.metric_score: 10.299675}
|
||||
- match: {details.dcg_query_reverse.unrated_docs: [ ]}
|
||||
|
||||
# if we mix both, we should get the average
|
||||
|
@ -134,11 +134,11 @@
|
|||
"metric" : { "dcg": { }}
|
||||
}
|
||||
|
||||
- gt: {quality_level: 12.073969}
|
||||
- lt: {quality_level: 12.073970}
|
||||
- gt: {details.dcg_query.quality_level: 13.848263}
|
||||
- lt: {details.dcg_query.quality_level: 13.848264}
|
||||
- gt: {metric_score: 12.073969}
|
||||
- lt: {metric_score: 12.073970}
|
||||
- gt: {details.dcg_query.metric_score: 13.848263}
|
||||
- lt: {details.dcg_query.metric_score: 13.848264}
|
||||
- match: {details.dcg_query.unrated_docs: [ ]}
|
||||
- gt: {details.dcg_query_reverse.quality_level: 10.299674}
|
||||
- lt: {details.dcg_query_reverse.quality_level: 10.299675}
|
||||
- gt: {details.dcg_query_reverse.metric_score: 10.299674}
|
||||
- lt: {details.dcg_query_reverse.metric_score: 10.299675}
|
||||
- match: {details.dcg_query_reverse.unrated_docs: [ ]}
|
||||
|
|
|
@ -34,8 +34,8 @@
|
|||
"metric" : { "precision": { "ignore_unlabeled" : true }}
|
||||
}
|
||||
|
||||
- match: { quality_level: 1}
|
||||
- match: { details.amsterdam_query.quality_level: 1.0}
|
||||
- match: { metric_score: 1}
|
||||
- match: { details.amsterdam_query.metric_score: 1.0}
|
||||
- match: { details.amsterdam_query.unrated_docs: [ ]}
|
||||
- match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}}
|
||||
|
||||
|
|
|
@ -84,7 +84,7 @@ setup:
|
|||
"metric" : { "precision": { }}
|
||||
}
|
||||
|
||||
- match: {quality_level: 0.9}
|
||||
- match: {metric_score: 0.9}
|
||||
- match: {details.amsterdam_query.unrated_docs.0._id: "6"}
|
||||
|
||||
---
|
||||
|
|
|
@ -3,7 +3,7 @@ index:
|
|||
filter:
|
||||
doublemetaphonefilter:
|
||||
type: phonetic
|
||||
encoder: doublemetaphone
|
||||
encoder: double_metaphone
|
||||
metaphonefilter:
|
||||
type: phonetic
|
||||
encoder: metaphone
|
||||
|
@ -12,16 +12,16 @@ index:
|
|||
encoder: soundex
|
||||
refinedsoundexfilter:
|
||||
type: phonetic
|
||||
encoder: refinedsoundex
|
||||
encoder: refined_soundex
|
||||
caverphonefilter:
|
||||
type: phonetic
|
||||
encoder: caverphone
|
||||
beidermorsefilter:
|
||||
type: phonetic
|
||||
encoder: beidermorse
|
||||
encoder: beider_morse
|
||||
beidermorsefilterfrench:
|
||||
type: phonetic
|
||||
encoder: beidermorse
|
||||
encoder: beider_morse
|
||||
languageset : [ "french" ]
|
||||
koelnerphonetikfilter:
|
||||
type: phonetic
|
||||
|
|
|
@ -22,36 +22,6 @@ dependencies {
|
|||
compile "commons-codec:commons-codec:${versions.commonscodec}"
|
||||
}
|
||||
|
||||
|
||||
// needed to be consistent with ssl host checking
|
||||
String host = InetAddress.getLoopbackAddress().getHostAddress();
|
||||
|
||||
// location of keystore and files to generate it
|
||||
File keystore = new File(project.buildDir, 'keystore/test-node.jks')
|
||||
|
||||
// generate the keystore
|
||||
task createKey(type: LoggedExec) {
|
||||
doFirst {
|
||||
project.delete(keystore.parentFile)
|
||||
keystore.parentFile.mkdirs()
|
||||
}
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
|
||||
args '-genkey',
|
||||
'-alias', 'test-node',
|
||||
'-keystore', keystore,
|
||||
'-keyalg', 'RSA',
|
||||
'-keysize', '2048',
|
||||
'-validity', '712',
|
||||
'-dname', 'CN=' + host,
|
||||
'-keypass', 'keypass',
|
||||
'-storepass', 'keypass'
|
||||
}
|
||||
|
||||
// add keystore to test classpath: it expects it there
|
||||
sourceSets.test.resources.srcDir(keystore.parentFile)
|
||||
processTestResources.dependsOn(createKey)
|
||||
|
||||
dependencyLicenses {
|
||||
mapping from: /google-.*/, to: 'google'
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
1/*
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
|
@ -214,25 +214,6 @@ RestIntegTestTask integTestSecureHa = project.tasks.create('integTestSecureHa',
|
|||
description = "Runs rest tests against an elasticsearch cluster with HDFS configured with HA Namenode and secured by MIT Kerberos."
|
||||
}
|
||||
|
||||
if (rootProject.ext.compilerJavaVersion.isJava11()) {
|
||||
// TODO remove when: https://github.com/elastic/elasticsearch/issues/31498
|
||||
integTestRunner {
|
||||
systemProperty 'tests.rest.blacklist', [
|
||||
'hdfs_repository/30_snapshot/take snapshot',
|
||||
'hdfs_repository/40_restore/Create a snapshot and then restore it',
|
||||
'hdfs_repository/20_repository_verify/HDFS Repository Verify',
|
||||
'hdfs_repository/30_snapshot_get/Get a snapshot',
|
||||
'hdfs_repository/20_repository_create/HDFS Repository Creation',
|
||||
'hdfs_repository/20_repository_delete/HDFS Delete Repository',
|
||||
'hdfs_repository/30_snapshot_readonly/Get a snapshot - readonly',
|
||||
].join(',')
|
||||
}
|
||||
}
|
||||
if (rootProject.ext.runtimeJavaVersion.isJava11() || rootProject.ext.compilerJavaVersion.isJava11()) {
|
||||
// TODO remove when: https://github.com/elastic/elasticsearch/issues/31498
|
||||
integTestHa.enabled = false
|
||||
}
|
||||
|
||||
// Determine HDFS Fixture compatibility for the current build environment.
|
||||
boolean fixtureSupported = false
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
|
|
|
@ -61,6 +61,7 @@ grant {
|
|||
|
||||
// Hadoop depends on OS level user information for simple authentication
|
||||
// Unix: UnixLoginModule: com.sun.security.auth.module.UnixSystem.UnixSystem init
|
||||
permission java.lang.RuntimePermission "loadLibrary.jaas";
|
||||
permission java.lang.RuntimePermission "loadLibrary.jaas_unix";
|
||||
// Windows: NTLoginModule: com.sun.security.auth.module.NTSystem.loadNative
|
||||
permission java.lang.RuntimePermission "loadLibrary.jaas_nt";
|
||||
|
|
|
@ -114,9 +114,7 @@ if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3P
|
|||
|
||||
useFixture = true
|
||||
|
||||
} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath
|
||||
|| !s3EC2Bucket || !s3EC2BasePath
|
||||
|| !s3ECSBucket || !s3ECSBasePath) {
|
||||
} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath) {
|
||||
throw new IllegalArgumentException("not all options specified to run against external S3 service")
|
||||
}
|
||||
|
||||
|
@ -349,8 +347,13 @@ processTestResources {
|
|||
|
||||
project.afterEvaluate {
|
||||
if (useFixture == false) {
|
||||
// 30_repository_temporary_credentials is not ready for CI yet
|
||||
integTestRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/30_repository_temporary_credentials/*'
|
||||
// temporary_credentials, ec2_credentials and ecs_credentials are not ready for third-party-tests yet
|
||||
integTestRunner.systemProperty 'tests.rest.blacklist',
|
||||
[
|
||||
'repository_s3/30_repository_temporary_credentials/*',
|
||||
'repository_s3/40_repository_ec2_credentials/*',
|
||||
'repository_s3/50_repository_ecs_credentials/*'
|
||||
].join(",")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ public abstract class ArchiveTestCase extends PackagingTestCase {
|
|||
});
|
||||
|
||||
Platforms.onLinux(() -> {
|
||||
final String javaPath = sh.run("which java").stdout.trim();
|
||||
final String javaPath = sh.run("command -v java").stdout.trim();
|
||||
|
||||
try {
|
||||
sh.run("chmod -x '" + javaPath + "'");
|
||||
|
|
|
@ -30,16 +30,20 @@ import org.junit.BeforeClass;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.packaging.util.Cleanup.cleanEverything;
|
||||
import static org.elasticsearch.packaging.util.FileUtils.assertPathsDontExist;
|
||||
import static org.elasticsearch.packaging.util.FileUtils.mv;
|
||||
import static org.elasticsearch.packaging.util.Packages.SYSTEMD_SERVICE;
|
||||
import static org.elasticsearch.packaging.util.Packages.assertInstalled;
|
||||
import static org.elasticsearch.packaging.util.Packages.assertRemoved;
|
||||
import static org.elasticsearch.packaging.util.Packages.install;
|
||||
import static org.elasticsearch.packaging.util.Packages.remove;
|
||||
import static org.elasticsearch.packaging.util.Packages.runInstallCommand;
|
||||
import static org.elasticsearch.packaging.util.Packages.startElasticsearch;
|
||||
import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation;
|
||||
import static org.elasticsearch.packaging.util.Platforms.getOsRelease;
|
||||
|
@ -75,6 +79,21 @@ public abstract class PackageTestCase extends PackagingTestCase {
|
|||
assumeTrue("only compatible distributions", distribution().packaging.compatible);
|
||||
}
|
||||
|
||||
public void test05InstallFailsWhenJavaMissing() {
|
||||
final Shell sh = new Shell();
|
||||
final Result java = sh.run("command -v java");
|
||||
|
||||
final Path originalJavaPath = Paths.get(java.stdout.trim());
|
||||
final Path relocatedJavaPath = originalJavaPath.getParent().resolve("java.relocated");
|
||||
try {
|
||||
mv(originalJavaPath, relocatedJavaPath);
|
||||
final Result installResult = runInstallCommand(distribution());
|
||||
assertThat(installResult.exitCode, is(1));
|
||||
} finally {
|
||||
mv(relocatedJavaPath, originalJavaPath);
|
||||
}
|
||||
}
|
||||
|
||||
public void test10InstallPackage() {
|
||||
assertRemoved(distribution());
|
||||
installation = install(distribution());
|
||||
|
|
|
@ -67,7 +67,10 @@ public class Packages {
|
|||
Platforms.onDPKG(() -> {
|
||||
assertThat(status.exitCode, anyOf(is(0), is(1)));
|
||||
if (status.exitCode == 0) {
|
||||
assertTrue(Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find());
|
||||
assertTrue("an uninstalled status should be indicated: " + status.stdout,
|
||||
Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find() ||
|
||||
Pattern.compile("(?m)^Status:.+ok not-installed").matcher(status.stdout).find()
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -90,13 +93,27 @@ public class Packages {
|
|||
}
|
||||
|
||||
public static Installation install(Distribution distribution, String version) {
|
||||
final Result result = runInstallCommand(distribution, version);
|
||||
if (result.exitCode != 0) {
|
||||
throw new RuntimeException("Installing distribution " + distribution + " version " + version + " failed: " + result);
|
||||
}
|
||||
|
||||
return Installation.ofPackage(distribution.packaging);
|
||||
}
|
||||
|
||||
public static Result runInstallCommand(Distribution distribution) {
|
||||
return runInstallCommand(distribution, getCurrentVersion());
|
||||
}
|
||||
|
||||
public static Result runInstallCommand(Distribution distribution, String version) {
|
||||
final Shell sh = new Shell();
|
||||
final Path distributionFile = getDistributionFile(distribution, version);
|
||||
|
||||
Platforms.onRPM(() -> sh.run("rpm -i " + distributionFile));
|
||||
Platforms.onDPKG(() -> sh.run("dpkg -i " + distributionFile));
|
||||
|
||||
return Installation.ofPackage(distribution.packaging);
|
||||
if (Platforms.isRPM()) {
|
||||
return sh.runIgnoreExitCode("rpm -i " + distributionFile);
|
||||
} else {
|
||||
return sh.runIgnoreExitCode("dpkg -i " + distributionFile);
|
||||
}
|
||||
}
|
||||
|
||||
public static void remove(Distribution distribution) {
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
setup:
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: weighted_avg is only available as of 6.4.0
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
body:
|
||||
settings:
|
||||
number_of_replicas: 0
|
||||
mappings:
|
||||
doc:
|
||||
properties:
|
||||
int_field:
|
||||
type : integer
|
||||
double_field:
|
||||
type : double
|
||||
string_field:
|
||||
type: keyword
|
||||
|
||||
- do:
|
||||
bulk:
|
||||
refresh: true
|
||||
body:
|
||||
- index:
|
||||
_index: test_1
|
||||
_type: doc
|
||||
_id: 1
|
||||
- int_field: 1
|
||||
double_field: 1.0
|
||||
- index:
|
||||
_index: test_1
|
||||
_type: doc
|
||||
_id: 2
|
||||
- int_field: 2
|
||||
double_field: 2.0
|
||||
- index:
|
||||
_index: test_1
|
||||
_type: doc
|
||||
_id: 3
|
||||
- int_field: 3
|
||||
double_field: 3.0
|
||||
- index:
|
||||
_index: test_1
|
||||
_type: doc
|
||||
_id: 4
|
||||
- int_field: 4
|
||||
double_field: 4.0
|
||||
|
||||
---
|
||||
"Basic test":
|
||||
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
aggs:
|
||||
the_int_avg:
|
||||
weighted_avg:
|
||||
value:
|
||||
field: "int_field"
|
||||
weight:
|
||||
field: "int_field"
|
||||
the_double_avg:
|
||||
weighted_avg:
|
||||
value:
|
||||
field: "double_field"
|
||||
weight:
|
||||
field: "double_field"
|
||||
|
||||
- match: { hits.total: 4 }
|
||||
- length: { hits.hits: 4 }
|
||||
- match: { aggregations.the_int_avg.value: 3.0 }
|
||||
- match: { aggregations.the_double_avg.value: 3.0 }
|
||||
|
|
@ -917,6 +917,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
try {
|
||||
return store.stats();
|
||||
} catch (IOException e) {
|
||||
failShard("Failing shard because of exception during storeStats", e);
|
||||
throw new ElasticsearchException("io exception while building 'store stats'", e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -181,6 +181,8 @@ import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits;
|
|||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.weighted_avg.InternalWeightedAvg;
|
||||
import org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
|
||||
|
@ -335,6 +337,8 @@ public class SearchModule {
|
|||
private void registerAggregations(List<SearchPlugin> plugins) {
|
||||
registerAggregation(new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, AvgAggregationBuilder::parse)
|
||||
.addResultReader(InternalAvg::new));
|
||||
registerAggregation(new AggregationSpec(WeightedAvgAggregationBuilder.NAME, WeightedAvgAggregationBuilder::new,
|
||||
WeightedAvgAggregationBuilder::parse).addResultReader(InternalWeightedAvg::new));
|
||||
registerAggregation(new AggregationSpec(SumAggregationBuilder.NAME, SumAggregationBuilder::new, SumAggregationBuilder::parse)
|
||||
.addResultReader(InternalSum::new));
|
||||
registerAggregation(new AggregationSpec(MinAggregationBuilder.NAME, MinAggregationBuilder::new, MinAggregationBuilder::parse)
|
||||
|
|
|
@ -82,6 +82,7 @@ import org.elasticsearch.search.aggregations.metrics.tophits.TopHits;
|
|||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -107,6 +108,13 @@ public class AggregationBuilders {
|
|||
return new AvgAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Avg} aggregation with the given name.
|
||||
*/
|
||||
public static WeightedAvgAggregationBuilder weightedAvg(String name) {
|
||||
return new WeightedAvgAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Max} aggregation with the given name.
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.weighted_avg;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class InternalWeightedAvg extends InternalNumericMetricsAggregation.SingleValue implements WeightedAvg {
|
||||
private final double sum;
|
||||
private final double weight;
|
||||
|
||||
public InternalWeightedAvg(String name, double sum, double weight, DocValueFormat format, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.sum = sum;
|
||||
this.weight = weight;
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public InternalWeightedAvg(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
format = in.readNamedWriteable(DocValueFormat.class);
|
||||
sum = in.readDouble();
|
||||
weight = in.readDouble();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeNamedWriteable(format);
|
||||
out.writeDouble(sum);
|
||||
out.writeDouble(weight);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double value() {
|
||||
return getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getValue() {
|
||||
return sum / weight;
|
||||
}
|
||||
|
||||
double getSum() {
|
||||
return sum;
|
||||
}
|
||||
|
||||
double getWeight() {
|
||||
return weight;
|
||||
}
|
||||
|
||||
DocValueFormat getFormatter() {
|
||||
return format;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return WeightedAvgAggregationBuilder.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalWeightedAvg doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
|
||||
double weight = 0;
|
||||
double sum = 0;
|
||||
double sumCompensation = 0;
|
||||
double weightCompensation = 0;
|
||||
// Compute the sum of double values with Kahan summation algorithm which is more
|
||||
// accurate than naive summation.
|
||||
for (InternalAggregation aggregation : aggregations) {
|
||||
InternalWeightedAvg avg = (InternalWeightedAvg) aggregation;
|
||||
// If the weight is Inf or NaN, just add it to the running tally to "convert" to
|
||||
// Inf/NaN. This keeps the behavior bwc from before kahan summing
|
||||
if (Double.isFinite(avg.weight) == false) {
|
||||
weight += avg.weight;
|
||||
} else if (Double.isFinite(weight)) {
|
||||
double corrected = avg.weight - weightCompensation;
|
||||
double newWeight = weight + corrected;
|
||||
weightCompensation = (newWeight - weight) - corrected;
|
||||
weight = newWeight;
|
||||
}
|
||||
// If the avg is Inf or NaN, just add it to the running tally to "convert" to
|
||||
// Inf/NaN. This keeps the behavior bwc from before kahan summing
|
||||
if (Double.isFinite(avg.sum) == false) {
|
||||
sum += avg.sum;
|
||||
} else if (Double.isFinite(sum)) {
|
||||
double corrected = avg.sum - sumCompensation;
|
||||
double newSum = sum + corrected;
|
||||
sumCompensation = (newSum - sum) - corrected;
|
||||
sum = newSum;
|
||||
}
|
||||
}
|
||||
return new InternalWeightedAvg(getName(), sum, weight, format, pipelineAggregators(), getMetaData());
|
||||
}
|
||||
@Override
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), weight != 0 ? getValue() : null);
|
||||
if (weight != 0 && format != DocValueFormat.RAW) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(getValue()));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(sum, weight, format.getWriteableName());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
InternalWeightedAvg other = (InternalWeightedAvg) obj;
|
||||
return Objects.equals(sum, other.sum) &&
|
||||
Objects.equals(weight, other.weight) &&
|
||||
Objects.equals(format.getWriteableName(), other.format.getWriteableName());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.weighted_avg;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ParsedWeightedAvg extends ParsedSingleValueNumericMetricsAggregation implements WeightedAvg {
|
||||
|
||||
@Override
|
||||
public double getValue() {
|
||||
return value();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return WeightedAvgAggregationBuilder.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
// InternalWeightedAvg renders value only if the avg normalizer (count) is not 0.
|
||||
// We parse back `null` as Double.POSITIVE_INFINITY so we check for that value here to get the same xContent output
|
||||
boolean hasValue = value != Double.POSITIVE_INFINITY;
|
||||
builder.field(CommonFields.VALUE.getPreferredName(), hasValue ? value : null);
|
||||
if (hasValue && valueAsString != null) {
|
||||
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), valueAsString);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
private static final ObjectParser<ParsedWeightedAvg, Void> PARSER
|
||||
= new ObjectParser<>(ParsedWeightedAvg.class.getSimpleName(), true, ParsedWeightedAvg::new);
|
||||
|
||||
static {
|
||||
declareSingleValueFields(PARSER, Double.POSITIVE_INFINITY);
|
||||
}
|
||||
|
||||
public static ParsedWeightedAvg fromXContent(XContentParser parser, final String name) {
|
||||
ParsedWeightedAvg avg = PARSER.apply(parser, null);
|
||||
avg.setName(name);
|
||||
return avg;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.weighted_avg;
|
||||
|
||||
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
|
||||
|
||||
/**
|
||||
* An aggregation that computes the average of the values in the current bucket.
|
||||
*/
|
||||
public interface WeightedAvg extends NumericMetricsAggregation.SingleValue {
|
||||
|
||||
/**
|
||||
* The average value.
|
||||
*/
|
||||
double getValue();
|
||||
}
|
|
@ -0,0 +1,128 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.weighted_avg;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSourceParseHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class WeightedAvgAggregationBuilder extends MultiValuesSourceAggregationBuilder.LeafOnly<Numeric, WeightedAvgAggregationBuilder> {
|
||||
public static final String NAME = "weighted_avg";
|
||||
public static final ParseField VALUE_FIELD = new ParseField("value");
|
||||
public static final ParseField WEIGHT_FIELD = new ParseField("weight");
|
||||
|
||||
private static final ObjectParser<WeightedAvgAggregationBuilder, Void> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(WeightedAvgAggregationBuilder.NAME);
|
||||
MultiValuesSourceParseHelper.declareCommon(PARSER, true, ValueType.NUMERIC);
|
||||
MultiValuesSourceParseHelper.declareField(VALUE_FIELD.getPreferredName(), PARSER, true, false);
|
||||
MultiValuesSourceParseHelper.declareField(WEIGHT_FIELD.getPreferredName(), PARSER, true, false);
|
||||
}
|
||||
|
||||
public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, new WeightedAvgAggregationBuilder(aggregationName), null);
|
||||
}
|
||||
|
||||
public WeightedAvgAggregationBuilder(String name) {
|
||||
super(name, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
public WeightedAvgAggregationBuilder(WeightedAvgAggregationBuilder clone, Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
super(clone, factoriesBuilder, metaData);
|
||||
}
|
||||
|
||||
public WeightedAvgAggregationBuilder value(MultiValuesSourceFieldConfig valueConfig) {
|
||||
valueConfig = Objects.requireNonNull(valueConfig, "Configuration for field [" + VALUE_FIELD + "] cannot be null");
|
||||
field(VALUE_FIELD.getPreferredName(), valueConfig);
|
||||
return this;
|
||||
}
|
||||
|
||||
public WeightedAvgAggregationBuilder weight(MultiValuesSourceFieldConfig weightConfig) {
|
||||
weightConfig = Objects.requireNonNull(weightConfig, "Configuration for field [" + WEIGHT_FIELD + "] cannot be null");
|
||||
field(WEIGHT_FIELD.getPreferredName(), weightConfig);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public WeightedAvgAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
return new WeightedAvgAggregationBuilder(this, factoriesBuilder, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) {
|
||||
// Do nothing, no extra state to write to stream
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MultiValuesSourceAggregatorFactory<Numeric, ?> innerBuild(SearchContext context,
|
||||
Map<String, ValuesSourceConfig<Numeric>> configs,
|
||||
DocValueFormat format,
|
||||
AggregatorFactory<?> parent,
|
||||
Builder subFactoriesBuilder) throws IOException {
|
||||
return new WeightedAvgAggregatorFactory(name, configs, format, context, parent, subFactoriesBuilder, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int innerHashCode() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,158 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.weighted_avg;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.DoubleArray;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
|
||||
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSource;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder.VALUE_FIELD;
|
||||
import static org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder.WEIGHT_FIELD;
|
||||
|
||||
public class WeightedAvgAggregator extends NumericMetricsAggregator.SingleValue {
|
||||
|
||||
private final MultiValuesSource.NumericMultiValuesSource valuesSources;
|
||||
|
||||
private DoubleArray weights;
|
||||
private DoubleArray sums;
|
||||
private DoubleArray sumCompensations;
|
||||
private DoubleArray weightCompensations;
|
||||
private DocValueFormat format;
|
||||
|
||||
public WeightedAvgAggregator(String name, MultiValuesSource.NumericMultiValuesSource valuesSources, DocValueFormat format,
|
||||
SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
this.valuesSources = valuesSources;
|
||||
this.format = format;
|
||||
if (valuesSources != null) {
|
||||
final BigArrays bigArrays = context.bigArrays();
|
||||
weights = bigArrays.newDoubleArray(1, true);
|
||||
sums = bigArrays.newDoubleArray(1, true);
|
||||
sumCompensations = bigArrays.newDoubleArray(1, true);
|
||||
weightCompensations = bigArrays.newDoubleArray(1, true);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return valuesSources != null && valuesSources.needsScores();
|
||||
}
|
||||
|
||||
@Override
|
||||
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx,
|
||||
final LeafBucketCollector sub) throws IOException {
|
||||
if (valuesSources == null) {
|
||||
return LeafBucketCollector.NO_OP_COLLECTOR;
|
||||
}
|
||||
final BigArrays bigArrays = context.bigArrays();
|
||||
final SortedNumericDoubleValues docValues = valuesSources.getField(VALUE_FIELD.getPreferredName(), ctx);
|
||||
final SortedNumericDoubleValues docWeights = valuesSources.getField(WEIGHT_FIELD.getPreferredName(), ctx);
|
||||
|
||||
return new LeafBucketCollectorBase(sub, docValues) {
|
||||
@Override
|
||||
public void collect(int doc, long bucket) throws IOException {
|
||||
weights = bigArrays.grow(weights, bucket + 1);
|
||||
sums = bigArrays.grow(sums, bucket + 1);
|
||||
sumCompensations = bigArrays.grow(sumCompensations, bucket + 1);
|
||||
weightCompensations = bigArrays.grow(weightCompensations, bucket + 1);
|
||||
|
||||
if (docValues.advanceExact(doc) && docWeights.advanceExact(doc)) {
|
||||
if (docWeights.docValueCount() > 1) {
|
||||
throw new AggregationExecutionException("Encountered more than one weight for a " +
|
||||
"single document. Use a script to combine multiple weights-per-doc into a single value.");
|
||||
}
|
||||
// There should always be one weight if advanceExact lands us here, either
|
||||
// a real weight or a `missing` weight
|
||||
assert docWeights.docValueCount() == 1;
|
||||
final double weight = docWeights.nextValue();
|
||||
|
||||
final int numValues = docValues.docValueCount();
|
||||
assert numValues > 0;
|
||||
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
kahanSum(docValues.nextValue() * weight, sums, sumCompensations, bucket);
|
||||
kahanSum(weight, weights, weightCompensations, bucket);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static void kahanSum(double value, DoubleArray values, DoubleArray compensations, long bucket) {
|
||||
// Compute the sum of double values with Kahan summation algorithm which is more
|
||||
// accurate than naive summation.
|
||||
double sum = values.get(bucket);
|
||||
double compensation = compensations.get(bucket);
|
||||
|
||||
if (Double.isFinite(value) == false) {
|
||||
sum += value;
|
||||
} else if (Double.isFinite(sum)) {
|
||||
double corrected = value - compensation;
|
||||
double newSum = sum + corrected;
|
||||
compensation = (newSum - sum) - corrected;
|
||||
sum = newSum;
|
||||
}
|
||||
values.set(bucket, sum);
|
||||
compensations.set(bucket, compensation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double metric(long owningBucketOrd) {
|
||||
if (valuesSources == null || owningBucketOrd >= sums.size()) {
|
||||
return Double.NaN;
|
||||
}
|
||||
return sums.get(owningBucketOrd) / weights.get(owningBucketOrd);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildAggregation(long bucket) {
|
||||
if (valuesSources == null || bucket >= sums.size()) {
|
||||
return buildEmptyAggregation();
|
||||
}
|
||||
return new InternalWeightedAvg(name, sums.get(bucket), weights.get(bucket), format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalWeightedAvg(name, 0.0, 0L, format, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doClose() {
|
||||
Releasables.close(weights, sums, sumCompensations, weightCompensations);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.weighted_avg;
|
||||
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class WeightedAvgAggregatorFactory extends MultiValuesSourceAggregatorFactory<Numeric, WeightedAvgAggregatorFactory> {
|
||||
|
||||
public WeightedAvgAggregatorFactory(String name, Map<String, ValuesSourceConfig<Numeric>> configs,
|
||||
DocValueFormat format, SearchContext context, AggregatorFactory<?> parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, configs, format, context, parent, subFactoriesBuilder, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new WeightedAvgAggregator(name, null, format, context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(Map<String, ValuesSourceConfig<Numeric>> configs, DocValueFormat format,
|
||||
Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
MultiValuesSource.NumericMultiValuesSource numericMultiVS
|
||||
= new MultiValuesSource.NumericMultiValuesSource(configs, context.getQueryShardContext());
|
||||
if (numericMultiVS.areValuesSourcesEmpty()) {
|
||||
return createUnmapped(parent, pipelineAggregators, metaData);
|
||||
}
|
||||
return new WeightedAvgAggregator(name, numericMultiVS, format, context, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Class to encapsulate a set of ValuesSource objects labeled by field name
|
||||
*/
|
||||
public abstract class MultiValuesSource <VS extends ValuesSource> {
|
||||
protected Map<String, VS> values;
|
||||
|
||||
public static class NumericMultiValuesSource extends MultiValuesSource<ValuesSource.Numeric> {
|
||||
public NumericMultiValuesSource(Map<String, ValuesSourceConfig<ValuesSource.Numeric>> valuesSourceConfigs,
|
||||
QueryShardContext context) throws IOException {
|
||||
values = new HashMap<>(valuesSourceConfigs.size());
|
||||
for (Map.Entry<String, ValuesSourceConfig<ValuesSource.Numeric>> entry : valuesSourceConfigs.entrySet()) {
|
||||
values.put(entry.getKey(), entry.getValue().toValuesSource(context));
|
||||
}
|
||||
}
|
||||
|
||||
public SortedNumericDoubleValues getField(String fieldName, LeafReaderContext ctx) throws IOException {
|
||||
ValuesSource.Numeric value = values.get(fieldName);
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("Could not find field name [" + fieldName + "] in multiValuesSource");
|
||||
}
|
||||
return value.doubleValues(ctx);
|
||||
}
|
||||
}
|
||||
|
||||
public static class BytesMultiValuesSource extends MultiValuesSource<ValuesSource.Bytes> {
|
||||
public BytesMultiValuesSource(Map<String, ValuesSourceConfig<ValuesSource.Bytes>> valuesSourceConfigs,
|
||||
QueryShardContext context) throws IOException {
|
||||
values = new HashMap<>(valuesSourceConfigs.size());
|
||||
for (Map.Entry<String, ValuesSourceConfig<ValuesSource.Bytes>> entry : valuesSourceConfigs.entrySet()) {
|
||||
values.put(entry.getKey(), entry.getValue().toValuesSource(context));
|
||||
}
|
||||
}
|
||||
|
||||
public Object getField(String fieldName, LeafReaderContext ctx) throws IOException {
|
||||
ValuesSource.Bytes value = values.get(fieldName);
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("Could not find field name [" + fieldName + "] in multiValuesSource");
|
||||
}
|
||||
return value.bytesValues(ctx);
|
||||
}
|
||||
}
|
||||
|
||||
public static class GeoPointValuesSource extends MultiValuesSource<ValuesSource.GeoPoint> {
|
||||
public GeoPointValuesSource(Map<String, ValuesSourceConfig<ValuesSource.GeoPoint>> valuesSourceConfigs,
|
||||
QueryShardContext context) throws IOException {
|
||||
values = new HashMap<>(valuesSourceConfigs.size());
|
||||
for (Map.Entry<String, ValuesSourceConfig<ValuesSource.GeoPoint>> entry : valuesSourceConfigs.entrySet()) {
|
||||
values.put(entry.getKey(), entry.getValue().toValuesSource(context));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public boolean needsScores() {
|
||||
return values.values().stream().anyMatch(ValuesSource::needsScores);
|
||||
}
|
||||
|
||||
public String[] fieldNames() {
|
||||
return values.keySet().toArray(new String[0]);
|
||||
}
|
||||
|
||||
public boolean areValuesSourcesEmpty() {
|
||||
return values.values().stream().allMatch(Objects::isNull);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,268 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationInitializationException;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Similar to {@link ValuesSourceAggregationBuilder}, except it references multiple ValuesSources (e.g. so that an aggregation
|
||||
* can pull values from multiple fields).
|
||||
*
|
||||
* A limitation of this class is that all the ValuesSource's being refereenced must be of the same type.
|
||||
*/
|
||||
public abstract class MultiValuesSourceAggregationBuilder<VS extends ValuesSource, AB extends MultiValuesSourceAggregationBuilder<VS, AB>>
|
||||
extends AbstractAggregationBuilder<AB> {
|
||||
|
||||
|
||||
public abstract static class LeafOnly<VS extends ValuesSource, AB extends MultiValuesSourceAggregationBuilder<VS, AB>>
|
||||
extends MultiValuesSourceAggregationBuilder<VS, AB> {
|
||||
|
||||
protected LeafOnly(String name, ValueType targetValueType) {
|
||||
super(name, targetValueType);
|
||||
}
|
||||
|
||||
protected LeafOnly(LeafOnly<VS, AB> clone, Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
super(clone, factoriesBuilder, metaData);
|
||||
if (factoriesBuilder.count() > 0) {
|
||||
throw new AggregationInitializationException("Aggregator [" + name + "] of type ["
|
||||
+ getType() + "] cannot accept sub-aggregations");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream that does not serialize its targetValueType. This should be used by most subclasses.
|
||||
*/
|
||||
protected LeafOnly(StreamInput in, ValueType targetValueType) throws IOException {
|
||||
super(in, targetValueType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AB subAggregations(Builder subFactories) {
|
||||
throw new AggregationInitializationException("Aggregator [" + name + "] of type [" +
|
||||
getType() + "] cannot accept sub-aggregations");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private Map<String, MultiValuesSourceFieldConfig> fields = new HashMap<>();
|
||||
private final ValueType targetValueType;
|
||||
private ValueType valueType = null;
|
||||
private String format = null;
|
||||
|
||||
protected MultiValuesSourceAggregationBuilder(String name, ValueType targetValueType) {
|
||||
super(name);
|
||||
this.targetValueType = targetValueType;
|
||||
}
|
||||
|
||||
protected MultiValuesSourceAggregationBuilder(MultiValuesSourceAggregationBuilder<VS, AB> clone,
|
||||
Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
super(clone, factoriesBuilder, metaData);
|
||||
|
||||
this.fields = new HashMap<>(clone.fields);
|
||||
this.targetValueType = clone.targetValueType;
|
||||
this.valueType = clone.valueType;
|
||||
this.format = clone.format;
|
||||
}
|
||||
|
||||
protected MultiValuesSourceAggregationBuilder(StreamInput in, ValueType targetValueType)
|
||||
throws IOException {
|
||||
super(in);
|
||||
assert false == serializeTargetValueType() : "Wrong read constructor called for subclass that provides its targetValueType";
|
||||
this.targetValueType = targetValueType;
|
||||
read(in);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private void read(StreamInput in) throws IOException {
|
||||
fields = in.readMap(StreamInput::readString, MultiValuesSourceFieldConfig::new);
|
||||
valueType = in.readOptionalWriteable(ValueType::readFromStream);
|
||||
format = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final void doWriteTo(StreamOutput out) throws IOException {
|
||||
if (serializeTargetValueType()) {
|
||||
out.writeOptionalWriteable(targetValueType);
|
||||
}
|
||||
out.writeMap(fields, StreamOutput::writeString, (o, value) -> value.writeTo(o));
|
||||
out.writeOptionalWriteable(valueType);
|
||||
out.writeOptionalString(format);
|
||||
innerWriteTo(out);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write subclass' state to the stream
|
||||
*/
|
||||
protected abstract void innerWriteTo(StreamOutput out) throws IOException;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected AB field(String propertyName, MultiValuesSourceFieldConfig config) {
|
||||
if (config == null) {
|
||||
throw new IllegalArgumentException("[config] must not be null: [" + name + "]");
|
||||
}
|
||||
this.fields.put(propertyName, config);
|
||||
return (AB) this;
|
||||
}
|
||||
|
||||
public Map<String, MultiValuesSourceFieldConfig> fields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the {@link ValueType} for the value produced by this aggregation
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public AB valueType(ValueType valueType) {
|
||||
if (valueType == null) {
|
||||
throw new IllegalArgumentException("[valueType] must not be null: [" + name + "]");
|
||||
}
|
||||
this.valueType = valueType;
|
||||
return (AB) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the {@link ValueType} for the value produced by this aggregation
|
||||
*/
|
||||
public ValueType valueType() {
|
||||
return valueType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the format to use for the output of the aggregation.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public AB format(String format) {
|
||||
if (format == null) {
|
||||
throw new IllegalArgumentException("[format] must not be null: [" + name + "]");
|
||||
}
|
||||
this.format = format;
|
||||
return (AB) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the format to use for the output of the aggregation.
|
||||
*/
|
||||
public String format() {
|
||||
return format;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final MultiValuesSourceAggregatorFactory<VS, ?> doBuild(SearchContext context, AggregatorFactory<?> parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder) throws IOException {
|
||||
ValueType finalValueType = this.valueType != null ? this.valueType : targetValueType;
|
||||
|
||||
Map<String, ValuesSourceConfig<VS>> configs = new HashMap<>(fields.size());
|
||||
fields.forEach((key, value) -> {
|
||||
ValuesSourceConfig<VS> config = ValuesSourceConfig.resolve(context.getQueryShardContext(), finalValueType,
|
||||
value.getFieldName(), value.getScript(), value.getMissing(), value.getTimeZone(), format);
|
||||
configs.put(key, config);
|
||||
});
|
||||
DocValueFormat docValueFormat = resolveFormat(format, finalValueType);
|
||||
return innerBuild(context, configs, docValueFormat, parent, subFactoriesBuilder);
|
||||
}
|
||||
|
||||
|
||||
private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType) {
|
||||
if (valueType == null) {
|
||||
return DocValueFormat.RAW; // we can't figure it out
|
||||
}
|
||||
DocValueFormat valueFormat = valueType.defaultFormat;
|
||||
if (valueFormat instanceof DocValueFormat.Decimal && format != null) {
|
||||
valueFormat = new DocValueFormat.Decimal(format);
|
||||
}
|
||||
return valueFormat;
|
||||
}
|
||||
|
||||
protected abstract MultiValuesSourceAggregatorFactory<VS, ?> innerBuild(SearchContext context,
|
||||
Map<String, ValuesSourceConfig<VS>> configs, DocValueFormat format, AggregatorFactory<?> parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder) throws IOException;
|
||||
|
||||
|
||||
/**
|
||||
* Should this builder serialize its targetValueType? Defaults to false. All subclasses that override this to true
|
||||
* should use the three argument read constructor rather than the four argument version.
|
||||
*/
|
||||
protected boolean serializeTargetValueType() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (fields != null) {
|
||||
builder.field(CommonFields.FIELDS.getPreferredName(), fields);
|
||||
}
|
||||
if (format != null) {
|
||||
builder.field(CommonFields.FORMAT.getPreferredName(), format);
|
||||
}
|
||||
if (valueType != null) {
|
||||
builder.field(CommonFields.VALUE_TYPE.getPreferredName(), valueType.getPreferredName());
|
||||
}
|
||||
doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected abstract XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException;
|
||||
|
||||
@Override
|
||||
protected final int doHashCode() {
|
||||
return Objects.hash(fields, format, targetValueType, valueType, innerHashCode());
|
||||
}
|
||||
|
||||
protected abstract int innerHashCode();
|
||||
|
||||
@Override
|
||||
protected final boolean doEquals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
MultiValuesSourceAggregationBuilder that = (MultiValuesSourceAggregationBuilder) other;
|
||||
|
||||
return Objects.equals(this.fields, that.fields)
|
||||
&& Objects.equals(this.format, that.format)
|
||||
&& Objects.equals(this.valueType, that.valueType);
|
||||
}
|
||||
|
||||
protected abstract boolean innerEquals(Object obj);
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class MultiValuesSourceAggregatorFactory<VS extends ValuesSource, AF extends MultiValuesSourceAggregatorFactory<VS, AF>>
|
||||
extends AggregatorFactory<AF> {
|
||||
|
||||
protected final Map<String, ValuesSourceConfig<VS>> configs;
|
||||
protected final DocValueFormat format;
|
||||
|
||||
public MultiValuesSourceAggregatorFactory(String name, Map<String, ValuesSourceConfig<VS>> configs,
|
||||
DocValueFormat format, SearchContext context,
|
||||
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, context, parent, subFactoriesBuilder, metaData);
|
||||
this.configs = configs;
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
|
||||
return doCreateInternal(configs, format, parent, collectsFromSingleBucket,
|
||||
pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
protected abstract Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException;
|
||||
|
||||
protected abstract Aggregator doCreateInternal(Map<String, ValuesSourceConfig<VS>> configs,
|
||||
DocValueFormat format, Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException;
|
||||
|
||||
}
|
|
@ -0,0 +1,186 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.function.BiFunction;
|
||||
|
||||
public class MultiValuesSourceFieldConfig implements Writeable, ToXContentFragment {
|
||||
private String fieldName;
|
||||
private Object missing;
|
||||
private Script script;
|
||||
private DateTimeZone timeZone;
|
||||
|
||||
private static final String NAME = "field_config";
|
||||
|
||||
public static final BiFunction<Boolean, Boolean, ObjectParser<MultiValuesSourceFieldConfig.Builder, Void>> PARSER
|
||||
= (scriptable, timezoneAware) -> {
|
||||
|
||||
ObjectParser<MultiValuesSourceFieldConfig.Builder, Void> parser
|
||||
= new ObjectParser<>(MultiValuesSourceFieldConfig.NAME, MultiValuesSourceFieldConfig.Builder::new);
|
||||
|
||||
parser.declareString(MultiValuesSourceFieldConfig.Builder::setFieldName, ParseField.CommonFields.FIELD);
|
||||
parser.declareField(MultiValuesSourceFieldConfig.Builder::setMissing, XContentParser::objectText,
|
||||
ParseField.CommonFields.MISSING, ObjectParser.ValueType.VALUE);
|
||||
|
||||
if (scriptable) {
|
||||
parser.declareField(MultiValuesSourceFieldConfig.Builder::setScript,
|
||||
(p, context) -> Script.parse(p),
|
||||
Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING);
|
||||
}
|
||||
|
||||
if (timezoneAware) {
|
||||
parser.declareField(MultiValuesSourceFieldConfig.Builder::setTimeZone, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return DateTimeZone.forID(p.text());
|
||||
} else {
|
||||
return DateTimeZone.forOffsetHours(p.intValue());
|
||||
}
|
||||
}, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG);
|
||||
}
|
||||
return parser;
|
||||
};
|
||||
|
||||
private MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, DateTimeZone timeZone) {
|
||||
this.fieldName = fieldName;
|
||||
this.missing = missing;
|
||||
this.script = script;
|
||||
this.timeZone = timeZone;
|
||||
}
|
||||
|
||||
public MultiValuesSourceFieldConfig(StreamInput in) throws IOException {
|
||||
this.fieldName = in.readString();
|
||||
this.missing = in.readGenericValue();
|
||||
this.script = in.readOptionalWriteable(Script::new);
|
||||
this.timeZone = in.readOptionalTimeZone();
|
||||
}
|
||||
|
||||
public Object getMissing() {
|
||||
return missing;
|
||||
}
|
||||
|
||||
public Script getScript() {
|
||||
return script;
|
||||
}
|
||||
|
||||
public DateTimeZone getTimeZone() {
|
||||
return timeZone;
|
||||
}
|
||||
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
out.writeGenericValue(missing);
|
||||
out.writeOptionalWriteable(script);
|
||||
out.writeOptionalTimeZone(timeZone);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (missing != null) {
|
||||
builder.field(ParseField.CommonFields.MISSING.getPreferredName(), missing);
|
||||
}
|
||||
if (script != null) {
|
||||
builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName(), script);
|
||||
}
|
||||
if (fieldName != null) {
|
||||
builder.field(ParseField.CommonFields.FIELD.getPreferredName(), fieldName);
|
||||
}
|
||||
if (timeZone != null) {
|
||||
builder.field(ParseField.CommonFields.TIME_ZONE.getPreferredName(), timeZone);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private String fieldName;
|
||||
private Object missing = null;
|
||||
private Script script = null;
|
||||
private DateTimeZone timeZone = null;
|
||||
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
public Builder setFieldName(String fieldName) {
|
||||
this.fieldName = fieldName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Object getMissing() {
|
||||
return missing;
|
||||
}
|
||||
|
||||
public Builder setMissing(Object missing) {
|
||||
this.missing = missing;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Script getScript() {
|
||||
return script;
|
||||
}
|
||||
|
||||
public Builder setScript(Script script) {
|
||||
this.script = script;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DateTimeZone getTimeZone() {
|
||||
return timeZone;
|
||||
}
|
||||
|
||||
public Builder setTimeZone(DateTimeZone timeZone) {
|
||||
this.timeZone = timeZone;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MultiValuesSourceFieldConfig build() {
|
||||
if (Strings.isNullOrEmpty(fieldName) && script == null) {
|
||||
throw new IllegalArgumentException("[" + ParseField.CommonFields.FIELD.getPreferredName()
|
||||
+ "] and [" + Script.SCRIPT_PARSE_FIELD.getPreferredName() + "] cannot both be null. " +
|
||||
"Please specify one or the other.");
|
||||
}
|
||||
|
||||
if (Strings.isNullOrEmpty(fieldName) == false && script != null) {
|
||||
throw new IllegalArgumentException("[" + ParseField.CommonFields.FIELD.getPreferredName()
|
||||
+ "] and [" + Script.SCRIPT_PARSE_FIELD.getPreferredName() + "] cannot both be configured. " +
|
||||
"Please specify one or the other.");
|
||||
}
|
||||
|
||||
return new MultiValuesSourceFieldConfig(fieldName, missing, script, timeZone);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.AbstractObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
public final class MultiValuesSourceParseHelper {
|
||||
|
||||
public static <VS extends ValuesSource, T> void declareCommon(
|
||||
AbstractObjectParser<? extends MultiValuesSourceAggregationBuilder<VS, ?>, T> objectParser, boolean formattable,
|
||||
ValueType targetValueType) {
|
||||
|
||||
objectParser.declareField(MultiValuesSourceAggregationBuilder::valueType, p -> {
|
||||
ValueType valueType = ValueType.resolveForScript(p.text());
|
||||
if (targetValueType != null && valueType.isNotA(targetValueType)) {
|
||||
throw new ParsingException(p.getTokenLocation(),
|
||||
"Aggregation [" + objectParser.getName() + "] was configured with an incompatible value type ["
|
||||
+ valueType + "]. It can only work on value of type ["
|
||||
+ targetValueType + "]");
|
||||
}
|
||||
return valueType;
|
||||
}, ValueType.VALUE_TYPE, ObjectParser.ValueType.STRING);
|
||||
|
||||
if (formattable) {
|
||||
objectParser.declareField(MultiValuesSourceAggregationBuilder::format, XContentParser::text,
|
||||
ParseField.CommonFields.FORMAT, ObjectParser.ValueType.STRING);
|
||||
}
|
||||
}
|
||||
|
||||
public static <VS extends ValuesSource, T> void declareField(String fieldName,
|
||||
AbstractObjectParser<? extends MultiValuesSourceAggregationBuilder<VS, ?>, T> objectParser,
|
||||
boolean scriptable, boolean timezoneAware) {
|
||||
|
||||
objectParser.declareField((o, fieldConfig) -> o.field(fieldName, fieldConfig.build()),
|
||||
(p, c) -> MultiValuesSourceFieldConfig.PARSER.apply(scriptable, timezoneAware).parse(p, null),
|
||||
new ParseField(fieldName), ObjectParser.ValueType.OBJECT);
|
||||
}
|
||||
}
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
@ -95,6 +96,8 @@ public enum ValueType implements Writeable {
|
|||
private final byte id;
|
||||
private String preferredName;
|
||||
|
||||
public static final ParseField VALUE_TYPE = new ParseField("value_type", "valueType");
|
||||
|
||||
ValueType(byte id, String description, String preferredName, ValuesSourceType valuesSourceType,
|
||||
Class<? extends IndexFieldData> fieldDataType, DocValueFormat defaultFormat) {
|
||||
this.id = id;
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.script.Script;
|
|||
import org.joda.time.DateTimeZone;
|
||||
|
||||
public final class ValuesSourceParserHelper {
|
||||
static final ParseField TIME_ZONE = new ParseField("time_zone");
|
||||
|
||||
private ValuesSourceParserHelper() {} // utility class, no instantiation
|
||||
|
||||
|
@ -62,10 +61,10 @@ public final class ValuesSourceParserHelper {
|
|||
|
||||
|
||||
objectParser.declareField(ValuesSourceAggregationBuilder::field, XContentParser::text,
|
||||
new ParseField("field"), ObjectParser.ValueType.STRING);
|
||||
ParseField.CommonFields.FIELD, ObjectParser.ValueType.STRING);
|
||||
|
||||
objectParser.declareField(ValuesSourceAggregationBuilder::missing, XContentParser::objectText,
|
||||
new ParseField("missing"), ObjectParser.ValueType.VALUE);
|
||||
ParseField.CommonFields.MISSING, ObjectParser.ValueType.VALUE);
|
||||
|
||||
objectParser.declareField(ValuesSourceAggregationBuilder::valueType, p -> {
|
||||
ValueType valueType = ValueType.resolveForScript(p.text());
|
||||
|
@ -76,11 +75,11 @@ public final class ValuesSourceParserHelper {
|
|||
+ targetValueType + "]");
|
||||
}
|
||||
return valueType;
|
||||
}, new ParseField("value_type", "valueType"), ObjectParser.ValueType.STRING);
|
||||
}, ValueType.VALUE_TYPE, ObjectParser.ValueType.STRING);
|
||||
|
||||
if (formattable) {
|
||||
objectParser.declareField(ValuesSourceAggregationBuilder::format, XContentParser::text,
|
||||
new ParseField("format"), ObjectParser.ValueType.STRING);
|
||||
ParseField.CommonFields.FORMAT, ObjectParser.ValueType.STRING);
|
||||
}
|
||||
|
||||
if (scriptable) {
|
||||
|
@ -96,7 +95,7 @@ public final class ValuesSourceParserHelper {
|
|||
} else {
|
||||
return DateTimeZone.forOffsetHours(p.intValue());
|
||||
}
|
||||
}, TIME_ZONE, ObjectParser.ValueType.LONG);
|
||||
}, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,9 +19,37 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
public enum ValuesSourceType {
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
public enum ValuesSourceType implements Writeable {
|
||||
ANY,
|
||||
NUMERIC,
|
||||
BYTES,
|
||||
GEOPOINT;
|
||||
|
||||
public static final ParseField VALUE_SOURCE_TYPE = new ParseField("value_source_type");
|
||||
|
||||
public static ValuesSourceType fromString(String name) {
|
||||
return valueOf(name.trim().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static ValuesSourceType fromStream(StreamInput in) throws IOException {
|
||||
return in.readEnum(ValuesSourceType.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
ValuesSourceType state = this;
|
||||
out.writeEnum(state);
|
||||
}
|
||||
|
||||
public String value() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -205,8 +205,15 @@ public class ReloadSecureSettingsIT extends ESIntegTestCase {
|
|||
assertThat(nodesMap.size(), equalTo(cluster().size()));
|
||||
for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) {
|
||||
assertThat(nodeResponse.reloadException(), notNullValue());
|
||||
// Running in a JVM with a BouncyCastle FIPS Security Provider, decrypting the Keystore with the wrong
|
||||
// password returns a SecurityException if the DataInputStream can't be fully consumed
|
||||
if (inFipsJvm()) {
|
||||
assertThat(nodeResponse.reloadException(), instanceOf(SecurityException.class));
|
||||
} else {
|
||||
assertThat(nodeResponse.reloadException(), instanceOf(IOException.class));
|
||||
}
|
||||
|
||||
}
|
||||
} catch (final AssertionError e) {
|
||||
reloadSettingsError.set(e);
|
||||
} finally {
|
||||
|
|
|
@ -27,6 +27,8 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FilterDirectory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -112,6 +114,7 @@ import org.elasticsearch.test.DummyShardLock;
|
|||
import org.elasticsearch.test.FieldMaskingReader;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
|
@ -138,6 +141,7 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.LongFunction;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
|
@ -1162,6 +1166,81 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
closeShards(shard);
|
||||
}
|
||||
|
||||
|
||||
public void testShardStatsWithFailures() throws IOException {
|
||||
allowShardFailures();
|
||||
final ShardId shardId = new ShardId("index", "_na_", 0);
|
||||
final ShardRouting shardRouting = newShardRouting(shardId, "node", true, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, ShardRoutingState.INITIALIZING);
|
||||
final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir());
|
||||
|
||||
|
||||
ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetaData metaData = IndexMetaData.builder(shardRouting.getIndexName())
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
||||
// Override two Directory methods to make them fail at our will
|
||||
// We use AtomicReference here to inject failure in the middle of the test not immediately
|
||||
// We use Supplier<IOException> instead of IOException to produce meaningful stacktrace
|
||||
// (remember stack trace is filled when exception is instantiated)
|
||||
AtomicReference<Supplier<IOException>> exceptionToThrow = new AtomicReference<>();
|
||||
AtomicBoolean throwWhenMarkingStoreCorrupted = new AtomicBoolean(false);
|
||||
Directory directory = new FilterDirectory(newFSDirectory(shardPath.resolveIndex())) {
|
||||
//fileLength method is called during storeStats try block
|
||||
//it's not called when store is marked as corrupted
|
||||
@Override
|
||||
public long fileLength(String name) throws IOException {
|
||||
Supplier<IOException> ex = exceptionToThrow.get();
|
||||
if (ex == null) {
|
||||
return super.fileLength(name);
|
||||
} else {
|
||||
throw ex.get();
|
||||
}
|
||||
}
|
||||
|
||||
//listAll method is called when marking store as corrupted
|
||||
@Override
|
||||
public String[] listAll() throws IOException {
|
||||
Supplier<IOException> ex = exceptionToThrow.get();
|
||||
if (throwWhenMarkingStoreCorrupted.get() && ex != null) {
|
||||
throw ex.get();
|
||||
} else {
|
||||
return super.listAll();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
try (Store store = createStore(shardId, new IndexSettings(metaData, Settings.EMPTY), directory)) {
|
||||
IndexShard shard = newShard(shardRouting, shardPath, metaData, store,
|
||||
null, new InternalEngineFactory(), () -> {
|
||||
}, EMPTY_EVENT_LISTENER);
|
||||
AtomicBoolean failureCallbackTriggered = new AtomicBoolean(false);
|
||||
shard.addShardFailureCallback((ig)->failureCallbackTriggered.set(true));
|
||||
|
||||
recoverShardFromStore(shard);
|
||||
|
||||
final boolean corruptIndexException = randomBoolean();
|
||||
|
||||
if (corruptIndexException) {
|
||||
exceptionToThrow.set(() -> new CorruptIndexException("Test CorruptIndexException", "Test resource"));
|
||||
throwWhenMarkingStoreCorrupted.set(randomBoolean());
|
||||
} else {
|
||||
exceptionToThrow.set(() -> new IOException("Test IOException"));
|
||||
}
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, shard::storeStats);
|
||||
assertTrue(failureCallbackTriggered.get());
|
||||
|
||||
if (corruptIndexException && !throwWhenMarkingStoreCorrupted.get()) {
|
||||
assertTrue(store.isMarkedCorrupted());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testRefreshMetric() throws IOException {
|
||||
IndexShard shard = newStartedShard();
|
||||
assertThat(shard.refreshStats().getTotal(), equalTo(2L)); // refresh on: finalize and end of recovery
|
||||
|
@ -1868,6 +1947,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE),
|
||||
shard.shardPath(),
|
||||
shard.indexSettings().getIndexMetaData(),
|
||||
null,
|
||||
wrapper,
|
||||
new InternalEngineFactory(),
|
||||
() -> {},
|
||||
|
@ -2020,6 +2100,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE),
|
||||
shard.shardPath(),
|
||||
shard.indexSettings().getIndexMetaData(),
|
||||
null,
|
||||
wrapper,
|
||||
new InternalEngineFactory(),
|
||||
() -> {},
|
||||
|
@ -2506,7 +2587,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "true", "checksum", "fix")))
|
||||
.build();
|
||||
final IndexShard newShard = newShard(shardRouting, indexShard.shardPath(), indexMetaData,
|
||||
null, indexShard.engineFactory, indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER);
|
||||
null, null, indexShard.engineFactory, indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER);
|
||||
|
||||
Store.MetadataSnapshot storeFileMetaDatas = newShard.snapshotStoreMetadata();
|
||||
assertTrue("at least 2 files, commit and data: " + storeFileMetaDatas.toString(), storeFileMetaDatas.size() > 1);
|
||||
|
@ -3005,7 +3086,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId);
|
||||
AtomicBoolean markedInactive = new AtomicBoolean();
|
||||
AtomicReference<IndexShard> primaryRef = new AtomicReference<>();
|
||||
IndexShard primary = newShard(shardRouting, shardPath, metaData, null, new InternalEngineFactory(), () -> {
|
||||
IndexShard primary = newShard(shardRouting, shardPath, metaData, null, null, new InternalEngineFactory(), () -> {
|
||||
}, new IndexEventListener() {
|
||||
@Override
|
||||
public void onShardInactive(IndexShard indexShard) {
|
||||
|
|
|
@ -97,6 +97,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice
|
|||
terminate(threadPool);
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32308")
|
||||
public void testRandomClusterStateUpdates() {
|
||||
// we have an IndicesClusterStateService per node in the cluster
|
||||
final Map<DiscoveryNode, IndicesClusterStateService> clusterStateServiceMap = new HashMap<>();
|
||||
|
|
|
@ -105,6 +105,7 @@ public class BlobStoreRepositoryRestoreTests extends IndexShardTestCase {
|
|||
shard.shardPath(),
|
||||
shard.indexSettings().getIndexMetaData(),
|
||||
null,
|
||||
null,
|
||||
new InternalEngineFactory(),
|
||||
() -> {},
|
||||
EMPTY_EVENT_LISTENER);
|
||||
|
|
|
@ -655,6 +655,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
|
|||
|
||||
public void testFieldAlias() throws IOException {
|
||||
int numRootDocs = randomIntBetween(1, 20);
|
||||
int expectedNestedDocs = 0;
|
||||
|
||||
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
|
||||
NumberFieldMapper.NumberType.LONG);
|
||||
|
@ -665,6 +666,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
|
|||
for (int i = 0; i < numRootDocs; i++) {
|
||||
List<Document> documents = new ArrayList<>();
|
||||
int numNestedDocs = randomIntBetween(0, 20);
|
||||
expectedNestedDocs += numNestedDocs;
|
||||
generateDocuments(documents, numNestedDocs, i, NESTED_OBJECT, VALUE_FIELD_NAME);
|
||||
|
||||
Document document = new Document();
|
||||
|
@ -681,7 +683,6 @@ public class NestedAggregatorTests extends AggregatorTestCase {
|
|||
try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
|
||||
NestedAggregationBuilder agg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation(
|
||||
max(MAX_AGG_NAME).field(VALUE_FIELD_NAME));
|
||||
|
||||
NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation(
|
||||
max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias"));
|
||||
|
||||
|
@ -690,8 +691,8 @@ public class NestedAggregatorTests extends AggregatorTestCase {
|
|||
Nested aliasNested = search(newSearcher(indexReader, false, true),
|
||||
new MatchAllDocsQuery(), aliasAgg, fieldType);
|
||||
|
||||
assertTrue(nested.getDocCount() > 0);
|
||||
assertEquals(nested, aliasNested);
|
||||
assertEquals(expectedNestedDocs, nested.getDocCount());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -169,6 +169,7 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
|
|||
|
||||
public void testFieldAlias() throws IOException {
|
||||
int numParentDocs = randomIntBetween(1, 20);
|
||||
int expectedParentDocs = 0;
|
||||
|
||||
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
|
||||
NumberFieldMapper.NumberType.LONG);
|
||||
|
@ -179,6 +180,10 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
|
|||
for (int i = 0; i < numParentDocs; i++) {
|
||||
List<Document> documents = new ArrayList<>();
|
||||
int numNestedDocs = randomIntBetween(0, 20);
|
||||
if (numNestedDocs > 0) {
|
||||
expectedParentDocs++;
|
||||
}
|
||||
|
||||
for (int nested = 0; nested < numNestedDocs; nested++) {
|
||||
Document document = new Document();
|
||||
document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)),
|
||||
|
@ -203,7 +208,6 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
|
|||
}
|
||||
|
||||
try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
|
||||
|
||||
MaxAggregationBuilder maxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME);
|
||||
MaxAggregationBuilder aliasMaxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias");
|
||||
|
||||
|
@ -220,8 +224,8 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
|
|||
ReverseNested reverseNested = nested.getAggregations().get(REVERSE_AGG_NAME);
|
||||
ReverseNested aliasReverseNested = aliasNested.getAggregations().get(REVERSE_AGG_NAME);
|
||||
|
||||
assertTrue(reverseNested.getDocCount() > 0);
|
||||
assertEquals(reverseNested, aliasReverseNested);
|
||||
assertEquals(expectedParentDocs, reverseNested.getDocCount());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,428 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.weighted_avg;
|
||||
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class WeightedAvgAggregatorTests extends AggregatorTestCase {
|
||||
|
||||
public void testNoDocs() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> {
|
||||
// Intentionally not writing any docs
|
||||
}, avg -> {
|
||||
assertEquals(Double.NaN, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testNoMatchingField() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> {
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 7)));
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 3)));
|
||||
}, avg -> {
|
||||
assertEquals(Double.NaN, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testSomeMatchesSortedNumericDocValuesNoWeight() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 7),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
}, avg -> {
|
||||
assertEquals(4, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testSomeMatchesSortedNumericDocValuesWeights() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 7),
|
||||
new SortedNumericDocValuesField("weight_field", 2)));
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2),
|
||||
new SortedNumericDocValuesField("weight_field", 3)));
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3),
|
||||
new SortedNumericDocValuesField("weight_field", 3)));
|
||||
|
||||
}, avg -> {
|
||||
// (7*2 + 2*3 + 3*3) / (2+3+3) == 3.625
|
||||
assertEquals(3.625, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testSomeMatchesNumericDocValues() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(new DocValuesFieldExistsQuery("value_field"), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 7),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 2),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", 3),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
}, avg -> {
|
||||
assertEquals(4, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testQueryFiltering() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(IntPoint.newRangeQuery("value_field", 0, 3), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(new IntPoint("value_field", 7), new SortedNumericDocValuesField("value_field", 7),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new IntPoint("value_field", 1), new SortedNumericDocValuesField("value_field", 2),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new IntPoint("value_field", 3), new SortedNumericDocValuesField("value_field", 3),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
}, avg -> {
|
||||
assertEquals(2.5, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testQueryFilteringWeights() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(IntPoint.newRangeQuery("filter_field", 0, 3), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(new IntPoint("filter_field", 7), new SortedNumericDocValuesField("value_field", 7),
|
||||
new SortedNumericDocValuesField("weight_field", 2)));
|
||||
iw.addDocument(Arrays.asList(new IntPoint("filter_field", 2), new SortedNumericDocValuesField("value_field", 2),
|
||||
new SortedNumericDocValuesField("weight_field", 3)));
|
||||
iw.addDocument(Arrays.asList(new IntPoint("filter_field", 3), new SortedNumericDocValuesField("value_field", 3),
|
||||
new SortedNumericDocValuesField("weight_field", 4)));
|
||||
}, avg -> {
|
||||
double value = (2.0*3.0 + 3.0*4.0) / (3.0+4.0);
|
||||
assertEquals(value, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testQueryFiltersAll() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(IntPoint.newRangeQuery("value_field", -1, 0), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(new IntPoint("value_field", 7), new SortedNumericDocValuesField("value_field", 7)));
|
||||
iw.addDocument(Arrays.asList(new IntPoint("value_field", 1), new SortedNumericDocValuesField("value_field", 2)));
|
||||
iw.addDocument(Arrays.asList(new IntPoint("value_field", 3), new SortedNumericDocValuesField("value_field", 7)));
|
||||
}, avg -> {
|
||||
assertEquals(Double.NaN, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testQueryFiltersAllWeights() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(IntPoint.newRangeQuery("value_field", -1, 0), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(new IntPoint("filter_field", 7), new SortedNumericDocValuesField("value_field", 7),
|
||||
new SortedNumericDocValuesField("weight_field", 2)));
|
||||
iw.addDocument(Arrays.asList(new IntPoint("filter_field", 2), new SortedNumericDocValuesField("value_field", 2),
|
||||
new SortedNumericDocValuesField("weight_field", 3)));
|
||||
iw.addDocument(Arrays.asList(new IntPoint("filter_field", 3), new SortedNumericDocValuesField("value_field", 3),
|
||||
new SortedNumericDocValuesField("weight_field", 4)));
|
||||
}, avg -> {
|
||||
assertEquals(Double.NaN, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testValueSetMissing() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder()
|
||||
.setFieldName("value_field")
|
||||
.setMissing(2)
|
||||
.build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("weight_field", 2)));
|
||||
iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("weight_field", 3)));
|
||||
iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("weight_field", 4)));
|
||||
}, avg -> {
|
||||
double value = (2.0*2.0 + 2.0*3.0 + 2.0*4.0) / (2.0+3.0+4.0);
|
||||
assertEquals(value, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testWeightSetMissing() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder()
|
||||
.setFieldName("weight_field")
|
||||
.setMissing(2)
|
||||
.build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("value_field", 2)));
|
||||
iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("value_field", 3)));
|
||||
iw.addDocument(Collections.singletonList(new SortedNumericDocValuesField("value_field", 4)));
|
||||
}, avg -> {
|
||||
double value = (2.0*2.0 + 3.0*2.0 + 4.0*2.0) / (2.0+2.0+2.0);
|
||||
assertEquals(value, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testWeightSetTimezone() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder()
|
||||
.setFieldName("weight_field")
|
||||
.setTimeZone(DateTimeZone.UTC)
|
||||
.build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 4),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
}, avg -> {
|
||||
fail("Should not have executed test case");
|
||||
}));
|
||||
assertThat(e.getMessage(), equalTo("Field [weight_field] of type [long] does not support custom time zones"));
|
||||
}
|
||||
|
||||
public void testValueSetTimezone() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder()
|
||||
.setFieldName("value_field")
|
||||
.setTimeZone(DateTimeZone.UTC)
|
||||
.build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 4),
|
||||
new SortedNumericDocValuesField("weight_field", 1)));
|
||||
}, avg -> {
|
||||
fail("Should not have executed test case");
|
||||
}));
|
||||
assertThat(e.getMessage(), equalTo("Field [value_field] of type [long] does not support custom time zones"));
|
||||
}
|
||||
|
||||
public void testMultiValues() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder()
|
||||
.setFieldName("value_field")
|
||||
.build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
|
||||
testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 2),
|
||||
new SortedNumericDocValuesField("value_field", 3), new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 3),
|
||||
new SortedNumericDocValuesField("value_field", 4), new SortedNumericDocValuesField("weight_field", 1)));
|
||||
iw.addDocument(Arrays.asList(new SortedNumericDocValuesField("value_field", 4),
|
||||
new SortedNumericDocValuesField("value_field", 5), new SortedNumericDocValuesField("weight_field", 1)));
|
||||
}, avg -> {
|
||||
double value = (((2.0+3.0)/2.0) + ((3.0+4.0)/2.0) + ((4.0+5.0)/2.0)) / (1.0+1.0+1.0);
|
||||
assertEquals(value, avg.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testMultiWeight() throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder()
|
||||
.setFieldName("weight_field")
|
||||
.build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
|
||||
AggregationExecutionException e = expectThrows(AggregationExecutionException.class,
|
||||
() -> testCase(new MatchAllDocsQuery(), aggregationBuilder, iw -> {
|
||||
iw.addDocument(Arrays.asList(
|
||||
new SortedNumericDocValuesField("value_field", 2),
|
||||
new SortedNumericDocValuesField("weight_field", 2), new SortedNumericDocValuesField("weight_field", 3)));
|
||||
iw.addDocument(Arrays.asList(
|
||||
new SortedNumericDocValuesField("value_field", 3),
|
||||
new SortedNumericDocValuesField("weight_field", 3), new SortedNumericDocValuesField("weight_field", 4)));
|
||||
iw.addDocument(Arrays.asList(
|
||||
new SortedNumericDocValuesField("value_field", 4),
|
||||
new SortedNumericDocValuesField("weight_field", 4), new SortedNumericDocValuesField("weight_field", 5)));
|
||||
}, avg -> {
|
||||
fail("Should have thrown exception");
|
||||
}));
|
||||
assertThat(e.getMessage(), containsString("Encountered more than one weight for a single document. " +
|
||||
"Use a script to combine multiple weights-per-doc into a single value."));
|
||||
}
|
||||
|
||||
|
||||
public void testSummationAccuracy() throws IOException {
|
||||
// Summing up a normal array and expect an accurate value
|
||||
double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7};
|
||||
verifyAvgOfDoubles(values, 0.9, 0d);
|
||||
|
||||
// Summing up an array which contains NaN and infinities and expect a result same as naive summation
|
||||
int n = randomIntBetween(5, 10);
|
||||
values = new double[n];
|
||||
double sum = 0;
|
||||
for (int i = 0; i < n; i++) {
|
||||
values[i] = frequently()
|
||||
? randomFrom(Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY)
|
||||
: randomDoubleBetween(Double.MIN_VALUE, Double.MAX_VALUE, true);
|
||||
sum += values[i];
|
||||
}
|
||||
verifyAvgOfDoubles(values, sum / n, 1e-10);
|
||||
|
||||
// Summing up some big double values and expect infinity result
|
||||
n = randomIntBetween(5, 10);
|
||||
double[] largeValues = new double[n];
|
||||
for (int i = 0; i < n; i++) {
|
||||
largeValues[i] = Double.MAX_VALUE;
|
||||
}
|
||||
verifyAvgOfDoubles(largeValues, Double.POSITIVE_INFINITY, 0d);
|
||||
|
||||
for (int i = 0; i < n; i++) {
|
||||
largeValues[i] = -Double.MAX_VALUE;
|
||||
}
|
||||
verifyAvgOfDoubles(largeValues, Double.NEGATIVE_INFINITY, 0d);
|
||||
}
|
||||
|
||||
private void verifyAvgOfDoubles(double[] values, double expected, double delta) throws IOException {
|
||||
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
|
||||
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
|
||||
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
|
||||
.value(valueConfig)
|
||||
.weight(weightConfig);
|
||||
testCase(new MatchAllDocsQuery(), aggregationBuilder,
|
||||
iw -> {
|
||||
for (double value : values) {
|
||||
iw.addDocument(Arrays.asList(new NumericDocValuesField("value_field", NumericUtils.doubleToSortableLong(value)),
|
||||
new SortedNumericDocValuesField("weight_field", NumericUtils.doubleToSortableLong(1.0))));
|
||||
}
|
||||
},
|
||||
avg -> assertEquals(expected, avg.getValue(), delta),
|
||||
NumberFieldMapper.NumberType.DOUBLE
|
||||
);
|
||||
}
|
||||
|
||||
private void testCase(Query query, WeightedAvgAggregationBuilder aggregationBuilder,
|
||||
CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
|
||||
Consumer<InternalWeightedAvg> verify) throws IOException {
|
||||
testCase(query, aggregationBuilder, buildIndex, verify, NumberFieldMapper.NumberType.LONG);
|
||||
}
|
||||
|
||||
private void testCase(Query query, WeightedAvgAggregationBuilder aggregationBuilder,
|
||||
CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
|
||||
Consumer<InternalWeightedAvg> verify,
|
||||
NumberFieldMapper.NumberType fieldNumberType) throws IOException {
|
||||
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
buildIndex.accept(indexWriter);
|
||||
indexWriter.close();
|
||||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
|
||||
try {
|
||||
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(fieldNumberType);
|
||||
fieldType.setName("value_field");
|
||||
fieldType.setHasDocValues(true);
|
||||
|
||||
MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(fieldNumberType);
|
||||
fieldType2.setName("weight_field");
|
||||
fieldType2.setHasDocValues(true);
|
||||
|
||||
WeightedAvgAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType, fieldType2);
|
||||
aggregator.preCollection();
|
||||
indexSearcher.search(query, aggregator);
|
||||
aggregator.postCollection();
|
||||
verify.accept((InternalWeightedAvg) aggregator.buildAggregation(0L));
|
||||
} finally {
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class MultiValuesSourceFieldConfigTests extends ESTestCase {
|
||||
public void testMissingFieldScript() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MultiValuesSourceFieldConfig.Builder().build());
|
||||
assertThat(e.getMessage(), equalTo("[field] and [script] cannot both be null. Please specify one or the other."));
|
||||
}
|
||||
|
||||
public void testBothFieldScript() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new MultiValuesSourceFieldConfig.Builder().setFieldName("foo").setScript(new Script("foo")).build());
|
||||
assertThat(e.getMessage(), equalTo("[field] and [script] cannot both be configured. Please specify one or the other."));
|
||||
}
|
||||
}
|
|
@ -265,7 +265,8 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
|||
RecoverySource.PeerRecoverySource.INSTANCE);
|
||||
|
||||
final IndexShard newReplica =
|
||||
newShard(shardRouting, shardPath, indexMetaData, null, getEngineFactory(shardRouting), () -> {}, EMPTY_EVENT_LISTENER);
|
||||
newShard(shardRouting, shardPath, indexMetaData, null, null, getEngineFactory(shardRouting),
|
||||
() -> {}, EMPTY_EVENT_LISTENER);
|
||||
replicas.add(newReplica);
|
||||
updateAllocationIDsOnPrimary();
|
||||
return newReplica;
|
||||
|
|
|
@ -163,15 +163,20 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
|||
return Settings.EMPTY;
|
||||
}
|
||||
|
||||
private Store createStore(IndexSettings indexSettings, ShardPath shardPath) throws IOException {
|
||||
final ShardId shardId = shardPath.getShardId();
|
||||
|
||||
protected Store createStore(IndexSettings indexSettings, ShardPath shardPath) throws IOException {
|
||||
return createStore(shardPath.getShardId(), indexSettings, newFSDirectory(shardPath.resolveIndex()));
|
||||
}
|
||||
|
||||
protected Store createStore(ShardId shardId, IndexSettings indexSettings, Directory directory) throws IOException {
|
||||
final DirectoryService directoryService = new DirectoryService(shardId, indexSettings) {
|
||||
@Override
|
||||
public Directory newDirectory() throws IOException {
|
||||
return newFSDirectory(shardPath.resolveIndex());
|
||||
return directory;
|
||||
}
|
||||
};
|
||||
return new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -284,7 +289,7 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
|||
final ShardId shardId = routing.shardId();
|
||||
final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir());
|
||||
ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId);
|
||||
return newShard(routing, shardPath, indexMetaData, indexSearcherWrapper, engineFactory, globalCheckpointSyncer,
|
||||
return newShard(routing, shardPath, indexMetaData, null, indexSearcherWrapper, engineFactory, globalCheckpointSyncer,
|
||||
EMPTY_EVENT_LISTENER, listeners);
|
||||
}
|
||||
|
||||
|
@ -293,20 +298,23 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
|||
* @param routing shard routing to use
|
||||
* @param shardPath path to use for shard data
|
||||
* @param indexMetaData indexMetaData for the shard, including any mapping
|
||||
* @param store an optional custom store to use. If null a default file based store will be created
|
||||
* @param indexSearcherWrapper an optional wrapper to be used during searchers
|
||||
* @param globalCheckpointSyncer callback for syncing global checkpoints
|
||||
* @param indexEventListener index even listener
|
||||
* @param indexEventListener index event listener
|
||||
* @param listeners an optional set of listeners to add to the shard
|
||||
*/
|
||||
protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMetaData indexMetaData,
|
||||
@Nullable IndexSearcherWrapper indexSearcherWrapper,
|
||||
@Nullable Store store, @Nullable IndexSearcherWrapper indexSearcherWrapper,
|
||||
@Nullable EngineFactory engineFactory,
|
||||
Runnable globalCheckpointSyncer,
|
||||
IndexEventListener indexEventListener, IndexingOperationListener... listeners) throws IOException {
|
||||
final Settings nodeSettings = Settings.builder().put("node.name", routing.currentNodeId()).build();
|
||||
final IndexSettings indexSettings = new IndexSettings(indexMetaData, nodeSettings);
|
||||
final IndexShard indexShard;
|
||||
final Store store = createStore(indexSettings, shardPath);
|
||||
if (store == null) {
|
||||
store = createStore(indexSettings, shardPath);
|
||||
}
|
||||
boolean success = false;
|
||||
try {
|
||||
IndexCache indexCache = new IndexCache(indexSettings, new DisabledQueryCache(indexSettings), null);
|
||||
|
@ -357,6 +365,7 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
|||
current.shardPath(),
|
||||
current.indexSettings().getIndexMetaData(),
|
||||
null,
|
||||
null,
|
||||
current.engineFactory,
|
||||
current.getGlobalCheckpointSyncer(),
|
||||
EMPTY_EVENT_LISTENER, listeners);
|
||||
|
|
|
@ -176,6 +176,7 @@ import java.net.InetSocketAddress;
|
|||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.Security;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
@ -2364,4 +2365,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
public static boolean inFipsJvm() {
|
||||
return Security.getProviders()[0].getName().toLowerCase(Locale.ROOT).contains("fips");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ live?
|
|||
|
||||
==== Request Body
|
||||
|
||||
There is no request body for the Get Jobs API.
|
||||
There is no request body for the Get Rollup Caps API.
|
||||
|
||||
==== Authorization
|
||||
|
||||
|
|
|
@ -104,39 +104,28 @@ integTestRunner {
|
|||
systemProperty 'tests.rest.blacklist', blacklist.join(',')
|
||||
}
|
||||
|
||||
// location of generated keystores and certificates
|
||||
// location for keys and certificates
|
||||
File keystoreDir = new File(project.buildDir, 'keystore')
|
||||
File nodeKey = file("$keystoreDir/testnode.pem")
|
||||
File nodeCert = file("$keystoreDir/testnode.crt")
|
||||
|
||||
// Generate the node's keystore
|
||||
File nodeKeystore = new File(keystoreDir, 'test-node.jks')
|
||||
task createNodeKeyStore(type: LoggedExec) {
|
||||
doFirst {
|
||||
if (nodeKeystore.parentFile.exists() == false) {
|
||||
nodeKeystore.parentFile.mkdirs()
|
||||
// Add key and certs to test classpath: it expects them there
|
||||
// User cert and key PEM files instead of a JKS Keystore for the cluster's trust material so that
|
||||
// it can run in a FIPS 140 JVM
|
||||
// TODO: Remove all existing uses of cross project file references when the new approach for referencing static files is available
|
||||
// https://github.com/elastic/elasticsearch/pull/32201
|
||||
task copyKeyCerts(type: Copy) {
|
||||
from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) {
|
||||
include 'testnode.crt', 'testnode.pem'
|
||||
}
|
||||
if (nodeKeystore.exists()) {
|
||||
delete nodeKeystore
|
||||
into keystoreDir
|
||||
}
|
||||
}
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
|
||||
args '-genkey',
|
||||
'-alias', 'test-node',
|
||||
'-keystore', nodeKeystore,
|
||||
'-keyalg', 'RSA',
|
||||
'-keysize', '2048',
|
||||
'-validity', '712',
|
||||
'-dname', 'CN=smoke-test-plugins-ssl',
|
||||
'-keypass', 'keypass',
|
||||
'-storepass', 'keypass'
|
||||
}
|
||||
|
||||
// Add keystores to test classpath: it expects it there
|
||||
sourceSets.test.resources.srcDir(keystoreDir)
|
||||
processTestResources.dependsOn(createNodeKeyStore)
|
||||
processTestResources.dependsOn(copyKeyCerts)
|
||||
|
||||
integTestCluster {
|
||||
dependsOn createNodeKeyStore
|
||||
dependsOn copyKeyCerts
|
||||
setting 'xpack.ml.enabled', 'true'
|
||||
setting 'xpack.security.enabled', 'true'
|
||||
setting 'logger.org.elasticsearch.xpack.ml.datafeed', 'TRACE'
|
||||
|
@ -145,17 +134,19 @@ integTestCluster {
|
|||
setting 'xpack.monitoring.exporters._local.enabled', 'false'
|
||||
setting 'xpack.security.authc.token.enabled', 'true'
|
||||
setting 'xpack.security.transport.ssl.enabled', 'true'
|
||||
setting 'xpack.security.transport.ssl.keystore.path', nodeKeystore.name
|
||||
setting 'xpack.security.transport.ssl.key', nodeKey.name
|
||||
setting 'xpack.security.transport.ssl.certificate', nodeCert.name
|
||||
setting 'xpack.security.transport.ssl.verification_mode', 'certificate'
|
||||
setting 'xpack.security.audit.enabled', 'true'
|
||||
setting 'xpack.license.self_generated.type', 'trial'
|
||||
keystoreSetting 'bootstrap.password', 'x-pack-test-password'
|
||||
keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass'
|
||||
keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode'
|
||||
distribution = 'zip' // this is important since we use the reindex module in ML
|
||||
|
||||
setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
|
||||
|
||||
extraConfigFile nodeKeystore.name, nodeKeystore
|
||||
extraConfigFile nodeKey.name, nodeKey
|
||||
extraConfigFile nodeCert.name, nodeCert
|
||||
|
||||
waitCondition = { NodeInfo node, AntBuilder ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
|
@ -67,12 +68,14 @@ public class StartBasicLicenseTests extends AbstractLicensesIntegrationTestCase
|
|||
}
|
||||
|
||||
RestClient restClient = getRestClient();
|
||||
Response response = restClient.performRequest("GET", "/_xpack/license/basic_status");
|
||||
Response response = restClient.performRequest(new Request("GET", "/_xpack/license/basic_status"));
|
||||
String body = Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
assertEquals("{\"eligible_to_start_basic\":true}", body);
|
||||
|
||||
Response response2 = restClient.performRequest("POST", "/_xpack/license/start_basic?acknowledge=true");
|
||||
Request ackRequest = new Request("POST", "/_xpack/license/start_basic");
|
||||
ackRequest.addParameter("acknowledge", "true");
|
||||
Response response2 = restClient.performRequest(ackRequest);
|
||||
String body2 = Streams.copyToString(new InputStreamReader(response2.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(200, response2.getStatusLine().getStatusCode());
|
||||
assertTrue(body2.contains("\"acknowledged\":true"));
|
||||
|
@ -86,20 +89,19 @@ public class StartBasicLicenseTests extends AbstractLicensesIntegrationTestCase
|
|||
long expirationMillis = licensingClient.prepareGetLicense().get().license().expiryDate();
|
||||
assertEquals(LicenseService.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS, expirationMillis);
|
||||
|
||||
Response response3 = restClient.performRequest("GET", "/_xpack/license");
|
||||
Response response3 = restClient.performRequest(new Request("GET", "/_xpack/license"));
|
||||
String body3 = Streams.copyToString(new InputStreamReader(response3.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertTrue(body3.contains("\"type\" : \"basic\""));
|
||||
assertFalse(body3.contains("expiry_date"));
|
||||
assertFalse(body3.contains("expiry_date_in_millis"));
|
||||
|
||||
|
||||
Response response4 = restClient.performRequest("GET", "/_xpack/license/basic_status");
|
||||
Response response4 = restClient.performRequest(new Request("GET", "/_xpack/license/basic_status"));
|
||||
String body4 = Streams.copyToString(new InputStreamReader(response4.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(200, response3.getStatusLine().getStatusCode());
|
||||
assertEquals("{\"eligible_to_start_basic\":false}", body4);
|
||||
|
||||
ResponseException ex = expectThrows(ResponseException.class,
|
||||
() -> restClient.performRequest("POST", "/_xpack/license/start_basic"));
|
||||
() -> restClient.performRequest(new Request("POST", "/_xpack/license/start_basic")));
|
||||
Response response5 = ex.getResponse();
|
||||
String body5 = Streams.copyToString(new InputStreamReader(response5.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(403, response5.getStatusLine().getStatusCode());
|
||||
|
@ -118,7 +120,7 @@ public class StartBasicLicenseTests extends AbstractLicensesIntegrationTestCase
|
|||
assertEquals("trial", getLicenseResponse.license().type());
|
||||
});
|
||||
|
||||
Response response2 = getRestClient().performRequest("POST", "/_xpack/license/start_basic");
|
||||
Response response2 = getRestClient().performRequest(new Request("POST", "/_xpack/license/start_basic"));
|
||||
String body2 = Streams.copyToString(new InputStreamReader(response2.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(200, response2.getStatusLine().getStatusCode());
|
||||
assertTrue(body2.contains("\"acknowledged\":false"));
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.license;
|
||||
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
|
@ -54,13 +55,13 @@ public class StartTrialLicenseTests extends AbstractLicensesIntegrationTestCase
|
|||
ensureStartingWithBasic();
|
||||
|
||||
RestClient restClient = getRestClient();
|
||||
Response response = restClient.performRequest("GET", "/_xpack/license/trial_status");
|
||||
Response response = restClient.performRequest(new Request("GET", "/_xpack/license/trial_status"));
|
||||
String body = Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
assertEquals("{\"eligible_to_start_trial\":true}", body);
|
||||
|
||||
// Test that starting will fail without acknowledgement
|
||||
Response response2 = restClient.performRequest("POST", "/_xpack/license/start_trial");
|
||||
Response response2 = restClient.performRequest(new Request("POST", "/_xpack/license/start_trial"));
|
||||
String body2 = Streams.copyToString(new InputStreamReader(response2.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(200, response2.getStatusLine().getStatusCode());
|
||||
assertTrue(body2.contains("\"trial_was_started\":false"));
|
||||
|
@ -74,7 +75,10 @@ public class StartTrialLicenseTests extends AbstractLicensesIntegrationTestCase
|
|||
|
||||
String type = randomFrom(LicenseService.VALID_TRIAL_TYPES);
|
||||
|
||||
Response response3 = restClient.performRequest("POST", "/_xpack/license/start_trial?acknowledge=true&type=" + type);
|
||||
Request ackRequest = new Request("POST", "/_xpack/license/start_trial");
|
||||
ackRequest.addParameter("acknowledge", "true");
|
||||
ackRequest.addParameter("type", type);
|
||||
Response response3 = restClient.performRequest(ackRequest);
|
||||
String body3 = Streams.copyToString(new InputStreamReader(response3.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(200, response3.getStatusLine().getStatusCode());
|
||||
assertTrue(body3.contains("\"trial_was_started\":true"));
|
||||
|
@ -86,15 +90,17 @@ public class StartTrialLicenseTests extends AbstractLicensesIntegrationTestCase
|
|||
assertEquals(type, postTrialLicenseResponse.license().type());
|
||||
});
|
||||
|
||||
Response response4 = restClient.performRequest("GET", "/_xpack/license/trial_status");
|
||||
Response response4 = restClient.performRequest(new Request("GET", "/_xpack/license/trial_status"));
|
||||
String body4 = Streams.copyToString(new InputStreamReader(response4.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(200, response4.getStatusLine().getStatusCode());
|
||||
assertEquals("{\"eligible_to_start_trial\":false}", body4);
|
||||
|
||||
String secondAttemptType = randomFrom(LicenseService.VALID_TRIAL_TYPES);
|
||||
|
||||
ResponseException ex = expectThrows(ResponseException.class,
|
||||
() -> restClient.performRequest("POST", "/_xpack/license/start_trial?acknowledge=true&type=" + secondAttemptType));
|
||||
Request startTrialWhenStartedRequest = new Request("POST", "/_xpack/license/start_trial");
|
||||
startTrialWhenStartedRequest.addParameter("acknowledge", "true");
|
||||
startTrialWhenStartedRequest.addParameter("type", secondAttemptType);
|
||||
ResponseException ex = expectThrows(ResponseException.class, () -> restClient.performRequest(startTrialWhenStartedRequest));
|
||||
Response response5 = ex.getResponse();
|
||||
String body5 = Streams.copyToString(new InputStreamReader(response5.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(403, response5.getStatusLine().getStatusCode());
|
||||
|
@ -105,8 +111,9 @@ public class StartTrialLicenseTests extends AbstractLicensesIntegrationTestCase
|
|||
public void testInvalidType() throws Exception {
|
||||
ensureStartingWithBasic();
|
||||
|
||||
ResponseException ex = expectThrows(ResponseException.class, () ->
|
||||
getRestClient().performRequest("POST", "/_xpack/license/start_trial?type=basic"));
|
||||
Request request = new Request("POST", "/_xpack/license/start_trial");
|
||||
request.addParameter("type", "basic");
|
||||
ResponseException ex = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request));
|
||||
Response response = ex.getResponse();
|
||||
String body = Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8));
|
||||
assertEquals(400, response.getStatusLine().getStatusCode());
|
||||
|
|
|
@ -44,20 +44,11 @@ public class MlRestTestStateCleaner {
|
|||
}
|
||||
|
||||
try {
|
||||
int statusCode = adminClient.performRequest("POST", "/_xpack/ml/datafeeds/_all/_stop")
|
||||
.getStatusLine().getStatusCode();
|
||||
if (statusCode != 200) {
|
||||
logger.error("Got status code " + statusCode + " when stopping datafeeds");
|
||||
}
|
||||
adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop"));
|
||||
} catch (Exception e1) {
|
||||
logger.warn("failed to stop all datafeeds. Forcing stop", e1);
|
||||
try {
|
||||
int statusCode = adminClient
|
||||
.performRequest("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true")
|
||||
.getStatusLine().getStatusCode();
|
||||
if (statusCode != 200) {
|
||||
logger.error("Got status code " + statusCode + " when stopping datafeeds");
|
||||
}
|
||||
adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true"));
|
||||
} catch (Exception e2) {
|
||||
logger.warn("Force-closing all data feeds failed", e2);
|
||||
}
|
||||
|
@ -67,10 +58,7 @@ public class MlRestTestStateCleaner {
|
|||
|
||||
for (Map<String, Object> datafeed : datafeeds) {
|
||||
String datafeedId = (String) datafeed.get("datafeed_id");
|
||||
int statusCode = adminClient.performRequest("DELETE", "/_xpack/ml/datafeeds/" + datafeedId).getStatusLine().getStatusCode();
|
||||
if (statusCode != 200) {
|
||||
logger.error("Got status code " + statusCode + " when deleting datafeed " + datafeedId);
|
||||
}
|
||||
adminClient.performRequest(new Request("DELETE", "/_xpack/ml/datafeeds/" + datafeedId));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,17 +74,11 @@ public class MlRestTestStateCleaner {
|
|||
}
|
||||
|
||||
try {
|
||||
int statusCode = adminClient
|
||||
.performRequest("POST", "/_xpack/ml/anomaly_detectors/_all/_close")
|
||||
.getStatusLine().getStatusCode();
|
||||
if (statusCode != 200) {
|
||||
logger.error("Got status code " + statusCode + " when closing all jobs");
|
||||
}
|
||||
adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close"));
|
||||
} catch (Exception e1) {
|
||||
logger.warn("failed to close all jobs. Forcing closed", e1);
|
||||
try {
|
||||
adminClient.performRequest("POST",
|
||||
"/_xpack/ml/anomaly_detectors/_all/_close?force=true");
|
||||
adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close?force=true"));
|
||||
} catch (Exception e2) {
|
||||
logger.warn("Force-closing all jobs failed", e2);
|
||||
}
|
||||
|
@ -106,10 +88,7 @@ public class MlRestTestStateCleaner {
|
|||
|
||||
for (Map<String, Object> jobConfig : jobConfigs) {
|
||||
String jobId = (String) jobConfig.get("job_id");
|
||||
int statusCode = adminClient.performRequest("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId).getStatusLine().getStatusCode();
|
||||
if (statusCode != 200) {
|
||||
logger.error("Got status code " + statusCode + " when deleting job " + jobId);
|
||||
}
|
||||
adminClient.performRequest(new Request("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.core.rollup;
|
||||
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
|
@ -17,7 +18,6 @@ import java.io.BufferedReader;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -35,8 +35,9 @@ public class RollupRestTestStateCleaner {
|
|||
private static void waitForPendingTasks(RestClient adminClient) throws Exception {
|
||||
ESTestCase.assertBusy(() -> {
|
||||
try {
|
||||
Response response = adminClient.performRequest("GET", "/_cat/tasks",
|
||||
Collections.singletonMap("detailed", "true"));
|
||||
Request request = new Request("GET", "/_cat/tasks");
|
||||
request.addParameter("detailed", "true");
|
||||
Response response = adminClient.performRequest(request);
|
||||
if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
|
||||
try (BufferedReader responseReader = new BufferedReader(
|
||||
new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
|
||||
|
@ -63,7 +64,7 @@ public class RollupRestTestStateCleaner {
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static void deleteAllJobs(RestClient adminClient) throws Exception {
|
||||
Response response = adminClient.performRequest("GET", "/_xpack/rollup/job/_all");
|
||||
Response response = adminClient.performRequest(new Request("GET", "/_xpack/rollup/job/_all"));
|
||||
Map<String, Object> jobs = ESRestTestCase.entityAsMap(response);
|
||||
List<Map<String, Object>> jobConfigs =
|
||||
(List<Map<String, Object>>) XContentMapValues.extractValue("jobs", jobs);
|
||||
|
@ -75,7 +76,7 @@ public class RollupRestTestStateCleaner {
|
|||
for (Map<String, Object> jobConfig : jobConfigs) {
|
||||
String jobId = (String) ((Map<String, Object>) jobConfig.get("config")).get("id");
|
||||
try {
|
||||
response = adminClient.performRequest("DELETE", "/_xpack/rollup/job/" + jobId);
|
||||
response = adminClient.performRequest(new Request("DELETE", "/_xpack/rollup/job/" + jobId));
|
||||
} catch (Exception e) {
|
||||
// ok
|
||||
}
|
||||
|
|
|
@ -78,6 +78,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase {
|
|||
/**
|
||||
* Tests reloading a keystore that is used in the KeyManager of SSLContext
|
||||
*/
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32124")
|
||||
public void testReloadingKeyStore() throws Exception {
|
||||
assumeFalse("Can't run in a FIPS JVM", inFipsJvm());
|
||||
final Path tempDir = createTempDir();
|
||||
|
@ -191,6 +192,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase {
|
|||
* Tests the reloading of SSLContext when the trust store is modified. The same store is used as a TrustStore (for the
|
||||
* reloadable SSLContext used in the HTTPClient) and as a KeyStore for the MockWebServer
|
||||
*/
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32124")
|
||||
public void testReloadingTrustStore() throws Exception {
|
||||
assumeFalse("Can't run in a FIPS JVM", inFipsJvm());
|
||||
Path tempDir = createTempDir();
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.apache.http.HttpStatus;
|
|||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
|
@ -25,12 +26,10 @@ import java.io.IOException;
|
|||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public final class XPackRestTestHelper {
|
||||
|
@ -47,8 +46,9 @@ public final class XPackRestTestHelper {
|
|||
ESTestCase.awaitBusy(() -> {
|
||||
String response;
|
||||
try {
|
||||
response = EntityUtils
|
||||
.toString(client.performRequest("GET", "/_cat/nodes", singletonMap("h", "master,version")).getEntity());
|
||||
Request request = new Request("GET", "/_cat/nodes");
|
||||
request.addParameter("h", "master,version");
|
||||
response = EntityUtils.toString(client.performRequest(request).getEntity());
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ public final class XPackRestTestHelper {
|
|||
ESTestCase.awaitBusy(() -> {
|
||||
Map<?, ?> response;
|
||||
try {
|
||||
String string = EntityUtils.toString(client.performRequest("GET", "/_template/" + template).getEntity());
|
||||
String string = EntityUtils.toString(client.performRequest(new Request("GET", "/_template/" + template)).getEntity());
|
||||
response = XContentHelper.convertToMap(JsonXContent.jsonXContent, string, false);
|
||||
} catch (ResponseException e) {
|
||||
if (e.getResponse().getStatusLine().getStatusCode() == 404) {
|
||||
|
@ -89,8 +89,9 @@ public final class XPackRestTestHelper {
|
|||
public static void waitForPendingTasks(RestClient adminClient) throws Exception {
|
||||
ESTestCase.assertBusy(() -> {
|
||||
try {
|
||||
Response response = adminClient.performRequest("GET", "/_cat/tasks",
|
||||
Collections.singletonMap("detailed", "true"));
|
||||
Request request = new Request("GET", "/_cat/tasks");
|
||||
request.addParameter("detailed", "true");
|
||||
Response response = adminClient.performRequest(request);
|
||||
// Check to see if there are tasks still active. We exclude the
|
||||
// list tasks
|
||||
// actions tasks form this otherwise we will always fail
|
||||
|
|
|
@ -9,6 +9,7 @@ import org.apache.http.HttpEntity;
|
|||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
|
@ -311,13 +312,15 @@ public abstract class PublishableHttpResource extends HttpResource {
|
|||
final Set<Integer> exists, final Set<Integer> doesNotExist) {
|
||||
logger.trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, resourceOwnerName, resourceOwnerType);
|
||||
|
||||
final Set<Integer> expectedResponseCodes = Sets.union(exists, doesNotExist);
|
||||
|
||||
final Request request = new Request("GET", resourceBasePath + "/" + resourceName);
|
||||
addParameters(request);
|
||||
// avoid exists and DNE parameters from being an exception by default
|
||||
final Map<String, String> getParameters = new HashMap<>(parameters);
|
||||
getParameters.put("ignore", expectedResponseCodes.stream().map(i -> i.toString()).collect(Collectors.joining(",")));
|
||||
final Set<Integer> expectedResponseCodes = Sets.union(exists, doesNotExist);
|
||||
request.addParameter("ignore", expectedResponseCodes.stream().map(i -> i.toString()).collect(Collectors.joining(",")));
|
||||
|
||||
try {
|
||||
final Response response = client.performRequest("GET", resourceBasePath + "/" + resourceName, getParameters);
|
||||
final Response response = client.performRequest(request);
|
||||
final int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
// checking the content is the job of whoever called this function by checking the tuple's response
|
||||
|
@ -385,8 +388,12 @@ public abstract class PublishableHttpResource extends HttpResource {
|
|||
|
||||
boolean success = false;
|
||||
|
||||
final Request request = new Request("PUT", resourceBasePath + "/" + resourceName);
|
||||
addParameters(request);
|
||||
request.setEntity(body.get());
|
||||
|
||||
try {
|
||||
final Response response = client.performRequest("PUT", resourceBasePath + "/" + resourceName, parameters, body.get());
|
||||
final Response response = client.performRequest(request);
|
||||
final int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
// 200 or 201
|
||||
|
@ -431,12 +438,15 @@ public abstract class PublishableHttpResource extends HttpResource {
|
|||
|
||||
boolean success = false;
|
||||
|
||||
Request request = new Request("DELETE", resourceBasePath + "/" + resourceName);
|
||||
addParameters(request);
|
||||
if (false == parameters.containsKey("ignore")) {
|
||||
// avoid 404 being an exception by default
|
||||
final Map<String, String> deleteParameters = new HashMap<>(parameters);
|
||||
deleteParameters.putIfAbsent("ignore", Integer.toString(RestStatus.NOT_FOUND.getStatus()));
|
||||
request.addParameter("ignore", Integer.toString(RestStatus.NOT_FOUND.getStatus()));
|
||||
}
|
||||
|
||||
try {
|
||||
final Response response = client.performRequest("DELETE", resourceBasePath + "/" + resourceName, deleteParameters);
|
||||
final Response response = client.performRequest(request);
|
||||
final int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
// 200 or 404 (not found is just as good as deleting it!)
|
||||
|
@ -498,4 +508,9 @@ public abstract class PublishableHttpResource extends HttpResource {
|
|||
return true;
|
||||
}
|
||||
|
||||
private void addParameters(Request request) {
|
||||
for (Map.Entry<String, String> param : parameters.entrySet()) {
|
||||
request.addParameter(param.getKey(), param.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ import org.apache.logging.log4j.Logger;
|
|||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
@ -16,7 +17,6 @@ import org.elasticsearch.common.xcontent.XContentHelper;
|
|||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
|
@ -27,11 +27,6 @@ public class VersionHttpResource extends HttpResource {
|
|||
|
||||
private static final Logger logger = Loggers.getLogger(VersionHttpResource.class);
|
||||
|
||||
/**
|
||||
* The parameters to pass with every version request to limit the output to just the version number.
|
||||
*/
|
||||
public static final Map<String, String> PARAMETERS = Collections.singletonMap("filter_path", "version.number");
|
||||
|
||||
/**
|
||||
* The minimum supported version of Elasticsearch.
|
||||
*/
|
||||
|
@ -59,7 +54,9 @@ public class VersionHttpResource extends HttpResource {
|
|||
logger.trace("checking [{}] to ensure that it supports the minimum version [{}]", resourceOwnerName, minimumVersion);
|
||||
|
||||
try {
|
||||
return validateVersion(client.performRequest("GET", "/", PARAMETERS));
|
||||
Request request = new Request("GET", "/");
|
||||
request.addParameter("filter_path", "version.number");
|
||||
return validateVersion(client.performRequest(request));
|
||||
} catch (IOException | RuntimeException e) {
|
||||
logger.error(
|
||||
(Supplier<?>)() ->
|
||||
|
|
|
@ -11,6 +11,7 @@ import org.apache.http.RequestLine;
|
|||
import org.apache.http.StatusLine;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
|
@ -20,6 +21,8 @@ import org.elasticsearch.common.util.set.Sets;
|
|||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -30,10 +33,10 @@ import java.util.stream.Collectors;
|
|||
import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.GET_DOES_NOT_EXIST;
|
||||
import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.GET_EXISTS;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
/**
|
||||
* Base test helper for any {@link PublishableHttpResource}.
|
||||
|
@ -87,7 +90,9 @@ public abstract class AbstractPublishableHttpResourceTestCase extends ESTestCase
|
|||
final ResponseException responseException = responseException("GET", endpoint, failedCheckStatus());
|
||||
final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException);
|
||||
|
||||
when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenThrow(e);
|
||||
Request request = new Request("GET", endpoint);
|
||||
addParameters(request, getParameters(resource.getParameters()));
|
||||
when(client.performRequest(request)).thenThrow(e);
|
||||
|
||||
assertThat(resource.doCheck(client), is(CheckResponse.ERROR));
|
||||
}
|
||||
|
@ -123,7 +128,9 @@ public abstract class AbstractPublishableHttpResourceTestCase extends ESTestCase
|
|||
final ResponseException responseException = responseException("DELETE", endpoint, failedCheckStatus());
|
||||
final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException);
|
||||
|
||||
when(client.performRequest("DELETE", endpoint, deleteParameters(resource.getParameters()))).thenThrow(e);
|
||||
Request request = new Request("DELETE", endpoint);
|
||||
addParameters(request, deleteParameters(resource.getParameters()));
|
||||
when(client.performRequest(request)).thenThrow(e);
|
||||
|
||||
assertThat(resource.doCheck(client), is(CheckResponse.ERROR));
|
||||
}
|
||||
|
@ -173,9 +180,15 @@ public abstract class AbstractPublishableHttpResourceTestCase extends ESTestCase
|
|||
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
|
||||
final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"));
|
||||
|
||||
when(client.performRequest(eq("PUT"), eq(endpoint), eq(resource.getParameters()), any(bodyType))).thenThrow(e);
|
||||
when(client.performRequest(Mockito.any(Request.class))).thenThrow(e);
|
||||
|
||||
assertThat(resource.doPublish(client), is(false));
|
||||
ArgumentCaptor<Request> request = ArgumentCaptor.forClass(Request.class);
|
||||
verify(client).performRequest(request.capture());
|
||||
assertThat(request.getValue().getMethod(), is("PUT"));
|
||||
assertThat(request.getValue().getEndpoint(), is(endpoint));
|
||||
assertThat(request.getValue().getParameters(), is(resource.getParameters()));
|
||||
assertThat(request.getValue().getEntity(), instanceOf(bodyType));
|
||||
}
|
||||
|
||||
protected void assertParameters(final PublishableHttpResource resource) {
|
||||
|
@ -244,7 +257,9 @@ public abstract class AbstractPublishableHttpResourceTestCase extends ESTestCase
|
|||
final String endpoint, final CheckResponse expected,
|
||||
final Response response)
|
||||
throws IOException {
|
||||
when(client.performRequest("GET", endpoint, expectedParameters)).thenReturn(response);
|
||||
Request request = new Request("GET", endpoint);
|
||||
addParameters(request, expectedParameters);
|
||||
when(client.performRequest(request)).thenReturn(response);
|
||||
|
||||
assertThat(resource.doCheck(client), is(expected));
|
||||
}
|
||||
|
@ -257,9 +272,14 @@ public abstract class AbstractPublishableHttpResourceTestCase extends ESTestCase
|
|||
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
|
||||
final Response response = response("GET", endpoint, status);
|
||||
|
||||
when(client.performRequest(eq("PUT"), eq(endpoint), eq(resource.getParameters()), any(bodyType))).thenReturn(response);
|
||||
ArgumentCaptor<Request> request = ArgumentCaptor.forClass(Request.class);
|
||||
when(client.performRequest(request.capture())).thenReturn(response);
|
||||
|
||||
assertThat(resource.doPublish(client), is(expected));
|
||||
assertThat(request.getValue().getMethod(), is("PUT"));
|
||||
assertThat(request.getValue().getEndpoint(), is(endpoint));
|
||||
assertThat(request.getValue().getParameters(), is(resource.getParameters()));
|
||||
assertThat(request.getValue().getEntity(), instanceOf(bodyType));
|
||||
}
|
||||
|
||||
protected void doCheckAsDeleteWithStatusCode(final PublishableHttpResource resource,
|
||||
|
@ -277,7 +297,9 @@ public abstract class AbstractPublishableHttpResourceTestCase extends ESTestCase
|
|||
final String endpoint, final CheckResponse expected,
|
||||
final Response response)
|
||||
throws IOException {
|
||||
when(client.performRequest("DELETE", endpoint, deleteParameters(resource.getParameters()))).thenReturn(response);
|
||||
Request request = new Request("DELETE", endpoint);
|
||||
addParameters(request, deleteParameters(resource.getParameters()));
|
||||
when(client.performRequest(request)).thenReturn(response);
|
||||
|
||||
assertThat(resource.doCheck(client), is(expected));
|
||||
}
|
||||
|
@ -427,4 +449,9 @@ public abstract class AbstractPublishableHttpResourceTestCase extends ESTestCase
|
|||
return entity;
|
||||
}
|
||||
|
||||
protected void addParameters(Request request, Map<String, String> parameters) {
|
||||
for (Map.Entry<String, String> param : parameters.entrySet()) {
|
||||
request.addParameter(param.getKey(), param.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.apache.http.StatusLine;
|
|||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
|
@ -23,6 +24,9 @@ import org.elasticsearch.rest.RestStatus;
|
|||
import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils;
|
||||
import org.elasticsearch.xpack.monitoring.exporter.ClusterAlertsUtil;
|
||||
import org.elasticsearch.xpack.monitoring.exporter.Exporter;
|
||||
import org.hamcrest.Description;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.TypeSafeMatcher;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -37,10 +41,9 @@ import static org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplat
|
|||
import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse.DOES_NOT_EXIST;
|
||||
import static org.elasticsearch.xpack.monitoring.exporter.http.PublishableHttpResource.CheckResponse.EXISTS;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyMapOf;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Matchers.startsWith;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.mockito.Matchers.argThat;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
@ -101,7 +104,8 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
final HttpEntity entity = new StringEntity("{\"version\":{\"number\":\"unknown\"}}", ContentType.APPLICATION_JSON);
|
||||
|
||||
when(versionResponse.getEntity()).thenReturn(entity);
|
||||
when(client.performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class))).thenReturn(versionResponse);
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), is("/")))))
|
||||
.thenReturn(versionResponse);
|
||||
|
||||
assertTrue(resources.isDirty());
|
||||
assertFalse(resources.checkAndPublish(client));
|
||||
|
@ -140,7 +144,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
final List<Response> otherResponses = getTemplateResponses(1, successful, unsuccessful);
|
||||
|
||||
// last check fails implies that N - 2 publishes succeeded!
|
||||
when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_template/")))))
|
||||
.thenReturn(first, otherResponses.toArray(new Response[otherResponses.size()]))
|
||||
.thenThrow(exception);
|
||||
whenSuccessfulPutTemplates(otherResponses.size() + 1);
|
||||
|
@ -148,7 +152,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
expectedGets += 1 + successful + unsuccessful;
|
||||
expectedPuts = (successfulFirst ? 0 : 1) + unsuccessful;
|
||||
} else {
|
||||
when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_template/")))))
|
||||
.thenThrow(exception);
|
||||
}
|
||||
|
||||
|
@ -185,7 +189,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
whenGetTemplates(successful, unsuccessful + 2);
|
||||
|
||||
// previous publishes must have succeeded
|
||||
when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_template/")))))
|
||||
.thenReturn(firstSuccess, otherResponses.toArray(new Response[otherResponses.size()]))
|
||||
.thenThrow(exception);
|
||||
|
||||
|
@ -197,7 +201,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
// fail the check so that it has to attempt the PUT
|
||||
whenGetTemplates(0, 1);
|
||||
|
||||
when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_template/")))))
|
||||
.thenThrow(exception);
|
||||
}
|
||||
|
||||
|
@ -238,7 +242,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
}
|
||||
|
||||
// last check fails
|
||||
when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_ingest/pipeline/")))))
|
||||
.thenReturn(first)
|
||||
.thenThrow(exception);
|
||||
if (successfulFirst == false) {
|
||||
|
@ -248,7 +252,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
expectedGets = EXPECTED_PIPELINES;
|
||||
expectedPuts = successfulFirst ? 0 : 1;
|
||||
} else {
|
||||
when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_ingest/pipeline/")))))
|
||||
.thenThrow(exception);
|
||||
}
|
||||
|
||||
|
@ -285,10 +289,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
whenGetPipelines(0, 2);
|
||||
|
||||
// previous publishes must have succeeded
|
||||
when(client.performRequest(eq("PUT"),
|
||||
startsWith("/_ingest/pipeline/"),
|
||||
anyMapOf(String.class, String.class),
|
||||
any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_ingest/pipeline/")))))
|
||||
.thenReturn(firstSuccess)
|
||||
.thenThrow(exception);
|
||||
|
||||
|
@ -300,10 +301,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
// fail the check so that it has to attempt the PUT
|
||||
whenGetPipelines(0, 1);
|
||||
|
||||
when(client.performRequest(eq("PUT"),
|
||||
startsWith("/_ingest/pipeline/"),
|
||||
anyMapOf(String.class, String.class),
|
||||
any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_ingest/pipeline/")))))
|
||||
.thenThrow(exception);
|
||||
}
|
||||
|
||||
|
@ -334,7 +332,8 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
whenSuccessfulPutPipelines(unsuccessfulGetPipelines);
|
||||
|
||||
// there's only one check
|
||||
when(client.performRequest(eq("GET"), eq("/_xpack"), anyMapOf(String.class, String.class))).thenThrow(exception);
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), is("/_xpack")))))
|
||||
.thenThrow(exception);
|
||||
|
||||
assertTrue(resources.isDirty());
|
||||
assertFalse(resources.checkAndPublish(client));
|
||||
|
@ -382,7 +381,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
final List<Response> otherResponses = getWatcherResponses(1, successful, unsuccessful);
|
||||
|
||||
// last check fails implies that N - 2 publishes succeeded!
|
||||
when(client.performRequest(eq("GET"), startsWith("/_xpack/watcher/watch/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenReturn(first, otherResponses.toArray(new Response[otherResponses.size()]))
|
||||
.thenThrow(exception);
|
||||
whenSuccessfulPutWatches(otherResponses.size() + 1);
|
||||
|
@ -398,7 +397,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
// there is no form of an unsuccessful delete; only success or error
|
||||
final List<Response> responses = successfulDeleteResponses(successful);
|
||||
|
||||
when(client.performRequest(eq("DELETE"), startsWith("/_xpack/watcher/watch/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("DELETE"), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenReturn(responses.get(0), responses.subList(1, successful).toArray(new Response[successful - 1]))
|
||||
.thenThrow(exception);
|
||||
|
||||
|
@ -407,7 +406,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
} else {
|
||||
final String method = validLicense ? "GET" : "DELETE";
|
||||
|
||||
when(client.performRequest(eq(method), startsWith("/_xpack/watcher/watch/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is(method), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenThrow(exception);
|
||||
}
|
||||
|
||||
|
@ -463,10 +462,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
whenGetWatches(successful, unsuccessful + 2);
|
||||
|
||||
// previous publishes must have succeeded
|
||||
when(client.performRequest(eq("PUT"),
|
||||
startsWith("/_xpack/watcher/watch/"),
|
||||
anyMapOf(String.class, String.class),
|
||||
any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenReturn(firstSuccess, otherResponses.toArray(new Response[otherResponses.size()]))
|
||||
.thenThrow(exception);
|
||||
|
||||
|
@ -478,10 +474,7 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
// fail the check so that it has to attempt the PUT
|
||||
whenGetWatches(0, 1);
|
||||
|
||||
when(client.performRequest(eq("PUT"),
|
||||
startsWith("/_xpack/watcher/watch/"),
|
||||
anyMapOf(String.class, String.class),
|
||||
any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenThrow(exception);
|
||||
}
|
||||
|
||||
|
@ -715,17 +708,18 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
final HttpEntity entity = new StringEntity("{\"version\":{\"number\":\"" + Version.CURRENT + "\"}}", ContentType.APPLICATION_JSON);
|
||||
|
||||
when(versionResponse.getEntity()).thenReturn(entity);
|
||||
when(client.performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class))).thenReturn(versionResponse);
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), is("/")))))
|
||||
.thenReturn(versionResponse);
|
||||
}
|
||||
|
||||
private void whenGetTemplates(final int successful, final int unsuccessful) throws IOException {
|
||||
final List<Response> gets = getTemplateResponses(0, successful, unsuccessful);
|
||||
|
||||
if (gets.size() == 1) {
|
||||
when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_template/")))))
|
||||
.thenReturn(gets.get(0));
|
||||
} else {
|
||||
when(client.performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_template/")))))
|
||||
.thenReturn(gets.get(0), gets.subList(1, gets.size()).toArray(new Response[gets.size() - 1]));
|
||||
}
|
||||
}
|
||||
|
@ -735,10 +729,10 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
|
||||
// empty is possible if they all exist
|
||||
if (successful == 1) {
|
||||
when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_template/")))))
|
||||
.thenReturn(successfulPuts.get(0));
|
||||
} else if (successful > 1) {
|
||||
when(client.performRequest(eq("PUT"), startsWith("/_template/"), anyMapOf(String.class, String.class), any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_template/")))))
|
||||
.thenReturn(successfulPuts.get(0), successfulPuts.subList(1, successful).toArray(new Response[successful - 1]));
|
||||
}
|
||||
}
|
||||
|
@ -747,10 +741,10 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
final List<Response> gets = getPipelineResponses(0, successful, unsuccessful);
|
||||
|
||||
if (gets.size() == 1) {
|
||||
when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_ingest/pipeline/")))))
|
||||
.thenReturn(gets.get(0));
|
||||
} else {
|
||||
when(client.performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_ingest/pipeline/")))))
|
||||
.thenReturn(gets.get(0), gets.subList(1, gets.size()).toArray(new Response[gets.size() - 1]));
|
||||
}
|
||||
}
|
||||
|
@ -760,16 +754,10 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
|
||||
// empty is possible if they all exist
|
||||
if (successful == 1) {
|
||||
when(client.performRequest(eq("PUT"),
|
||||
startsWith("/_ingest/pipeline/"),
|
||||
anyMapOf(String.class, String.class),
|
||||
any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_ingest/pipeline/")))))
|
||||
.thenReturn(successfulPuts.get(0));
|
||||
} else if (successful > 1) {
|
||||
when(client.performRequest(eq("PUT"),
|
||||
startsWith("/_ingest/pipeline/"),
|
||||
anyMapOf(String.class, String.class),
|
||||
any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_ingest/pipeline/")))))
|
||||
.thenReturn(successfulPuts.get(0), successfulPuts.subList(1, successful).toArray(new Response[successful - 1]));
|
||||
}
|
||||
}
|
||||
|
@ -787,7 +775,8 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
final Response successfulGet = response("GET", "_xpack", successfulCheckStatus(), entity);
|
||||
|
||||
// empty is possible if they all exist
|
||||
when(client.performRequest(eq("GET"), eq("/_xpack"), anyMapOf(String.class, String.class))).thenReturn(successfulGet);
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), is("/_xpack")))))
|
||||
.thenReturn(successfulGet);
|
||||
}
|
||||
|
||||
private void whenWatcherCannotBeUsed() throws IOException {
|
||||
|
@ -805,17 +794,18 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
}
|
||||
|
||||
// empty is possible if they all exist
|
||||
when(client.performRequest(eq("GET"), eq("/_xpack"), anyMapOf(String.class, String.class))).thenReturn(response);
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), is("/_xpack")))))
|
||||
.thenReturn(response);
|
||||
}
|
||||
|
||||
private void whenGetWatches(final int successful, final int unsuccessful) throws IOException {
|
||||
final List<Response> gets = getWatcherResponses(0, successful, unsuccessful);
|
||||
|
||||
if (gets.size() == 1) {
|
||||
when(client.performRequest(eq("GET"), startsWith("/_xpack/watcher/watch/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenReturn(gets.get(0));
|
||||
} else {
|
||||
when(client.performRequest(eq("GET"), startsWith("/_xpack/watcher/watch/"), anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenReturn(gets.get(0), gets.subList(1, gets.size()).toArray(new Response[gets.size() - 1]));
|
||||
}
|
||||
}
|
||||
|
@ -825,16 +815,10 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
|
||||
// empty is possible if they all exist
|
||||
if (successful == 1) {
|
||||
when(client.performRequest(eq("PUT"),
|
||||
startsWith("/_xpack/watcher/watch/"),
|
||||
anyMapOf(String.class, String.class),
|
||||
any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenReturn(successfulPuts.get(0));
|
||||
} else if (successful > 1) {
|
||||
when(client.performRequest(eq("PUT"),
|
||||
startsWith("/_xpack/watcher/watch/"),
|
||||
anyMapOf(String.class, String.class),
|
||||
any(HttpEntity.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenReturn(successfulPuts.get(0), successfulPuts.subList(1, successful).toArray(new Response[successful - 1]));
|
||||
}
|
||||
}
|
||||
|
@ -844,64 +828,55 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
|
||||
// empty is possible if they all exist
|
||||
if (successful == 1) {
|
||||
when(client.performRequest(eq("DELETE"),
|
||||
startsWith("/_xpack/watcher/watch/"),
|
||||
anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("DELETE"), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenReturn(successfulDeletes.get(0));
|
||||
} else if (successful > 1) {
|
||||
when(client.performRequest(eq("DELETE"),
|
||||
startsWith("/_xpack/watcher/watch/"),
|
||||
anyMapOf(String.class, String.class)))
|
||||
when(client.performRequest(argThat(new RequestMatcher(is("DELETE"), startsWith("/_xpack/watcher/watch/")))))
|
||||
.thenReturn(successfulDeletes.get(0), successfulDeletes.subList(1, successful).toArray(new Response[successful - 1]));
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyVersionCheck() throws IOException {
|
||||
verify(client).performRequest(eq("GET"), eq("/"), anyMapOf(String.class, String.class));
|
||||
verify(client).performRequest(argThat(new RequestMatcher(is("GET"), is("/"))));
|
||||
}
|
||||
|
||||
private void verifyGetTemplates(final int called) throws IOException {
|
||||
verify(client, times(called)).performRequest(eq("GET"), startsWith("/_template/"), anyMapOf(String.class, String.class));
|
||||
verify(client, times(called))
|
||||
.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_template/"))));
|
||||
}
|
||||
|
||||
private void verifyPutTemplates(final int called) throws IOException {
|
||||
verify(client, times(called)).performRequest(eq("PUT"), // method
|
||||
startsWith("/_template/"), // endpoint
|
||||
anyMapOf(String.class, String.class), // parameters (e.g., timeout)
|
||||
any(HttpEntity.class)); // raw template
|
||||
verify(client, times(called))
|
||||
.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_template/"))));
|
||||
}
|
||||
|
||||
private void verifyGetPipelines(final int called) throws IOException {
|
||||
verify(client, times(called)).performRequest(eq("GET"), startsWith("/_ingest/pipeline/"), anyMapOf(String.class, String.class));
|
||||
verify(client, times(called))
|
||||
.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_ingest/pipeline/"))));
|
||||
}
|
||||
|
||||
private void verifyPutPipelines(final int called) throws IOException {
|
||||
verify(client, times(called)).performRequest(eq("PUT"), // method
|
||||
startsWith("/_ingest/pipeline/"), // endpoint
|
||||
anyMapOf(String.class, String.class), // parameters (e.g., timeout)
|
||||
any(HttpEntity.class)); // raw template
|
||||
verify(client, times(called))
|
||||
.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_ingest/pipeline/"))));
|
||||
}
|
||||
|
||||
private void verifyWatcherCheck() throws IOException {
|
||||
verify(client).performRequest(eq("GET"), eq("/_xpack"), anyMapOf(String.class, String.class));
|
||||
verify(client).performRequest(argThat(new RequestMatcher(is("GET"), is("/_xpack"))));
|
||||
}
|
||||
|
||||
private void verifyDeleteWatches(final int called) throws IOException {
|
||||
verify(client, times(called)).performRequest(eq("DELETE"), // method
|
||||
startsWith("/_xpack/watcher/watch/"), // endpoint
|
||||
anyMapOf(String.class, String.class));// parameters (e.g., timeout)
|
||||
verify(client, times(called))
|
||||
.performRequest(argThat(new RequestMatcher(is("DELETE"), startsWith("/_xpack/watcher/watch/"))));
|
||||
}
|
||||
|
||||
private void verifyGetWatches(final int called) throws IOException {
|
||||
verify(client, times(called)).performRequest(eq("GET"),
|
||||
startsWith("/_xpack/watcher/watch/"),
|
||||
anyMapOf(String.class, String.class));
|
||||
verify(client, times(called))
|
||||
.performRequest(argThat(new RequestMatcher(is("GET"), startsWith("/_xpack/watcher/watch/"))));
|
||||
}
|
||||
|
||||
private void verifyPutWatches(final int called) throws IOException {
|
||||
verify(client, times(called)).performRequest(eq("PUT"), // method
|
||||
startsWith("/_xpack/watcher/watch/"), // endpoint
|
||||
anyMapOf(String.class, String.class), // parameters (e.g., timeout)
|
||||
any(HttpEntity.class)); // raw template
|
||||
verify(client, times(called))
|
||||
.performRequest(argThat(new RequestMatcher(is("PUT"), startsWith("/_xpack/watcher/watch/"))));
|
||||
}
|
||||
|
||||
private ClusterService mockClusterService(final ClusterState state) {
|
||||
|
@ -922,4 +897,24 @@ public class HttpExporterResourceTests extends AbstractPublishableHttpResourceTe
|
|||
return state;
|
||||
}
|
||||
|
||||
private static class RequestMatcher extends TypeSafeMatcher<Request> {
|
||||
private final Matcher<String> method;
|
||||
private final Matcher<String> endpoint;
|
||||
|
||||
private RequestMatcher(Matcher<String> method, Matcher<String> endpoint) {
|
||||
this.method = method;
|
||||
this.endpoint = endpoint;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchesSafely(Request item) {
|
||||
return method.matches(item.getMethod()) && endpoint.matches(item.getEndpoint());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void describeTo(Description description) {
|
||||
description.appendText("method is ").appendDescriptionOf(method);
|
||||
description.appendText(" and endpoint is ").appendDescriptionOf(endpoint);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.apache.http.HttpEntity;
|
|||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
|
@ -61,13 +62,15 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
|
||||
final RestStatus failedStatus = failedCheckStatus();
|
||||
final Response response = response("GET", endpoint, failedStatus);
|
||||
final Request request = new Request("GET", endpoint);
|
||||
addParameters(request, getParameters(resource.getParameters()));
|
||||
|
||||
when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenReturn(response);
|
||||
when(client.performRequest(request)).thenReturn(response);
|
||||
|
||||
sometimesAssertSimpleCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, CheckResponse.ERROR, response);
|
||||
|
||||
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters()));
|
||||
verify(client).performRequest(request);
|
||||
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class));
|
||||
|
||||
verifyNoMoreInteractions(client, logger);
|
||||
|
@ -95,8 +98,10 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
final Response response = response("GET", endpoint, failedStatus);
|
||||
final XContent xContent = mock(XContent.class);
|
||||
final int minimumVersion = randomInt();
|
||||
final Request request = new Request("GET", endpoint);
|
||||
addParameters(request, getParameters(resource.getParameters()));
|
||||
|
||||
when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenReturn(response);
|
||||
when(client.performRequest(request)).thenReturn(response);
|
||||
|
||||
assertThat(resource.versionCheckForResource(client, logger,
|
||||
resourceBasePath, resourceName, resourceType, owner, ownerType,
|
||||
|
@ -104,7 +109,7 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
is(CheckResponse.ERROR));
|
||||
|
||||
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters()));
|
||||
verify(client).performRequest(request);
|
||||
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class));
|
||||
|
||||
verifyNoMoreInteractions(client, logger);
|
||||
|
@ -117,8 +122,10 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
final HttpEntity entity = entityForResource(CheckResponse.ERROR, resourceName, minimumVersion);
|
||||
final Response response = response("GET", endpoint, okStatus, entity);
|
||||
final XContent xContent = mock(XContent.class);
|
||||
final Request request = new Request("GET", endpoint);
|
||||
addParameters(request, getParameters(resource.getParameters()));
|
||||
|
||||
when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenReturn(response);
|
||||
when(client.performRequest(request)).thenReturn(response);
|
||||
|
||||
assertThat(resource.versionCheckForResource(client, logger,
|
||||
resourceBasePath, resourceName, resourceType, owner, ownerType,
|
||||
|
@ -127,7 +134,7 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
|
||||
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
verify(logger).debug("{} [{}] found on the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters()));
|
||||
verify(client).performRequest(request);
|
||||
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), any(ResponseException.class));
|
||||
|
||||
verifyNoMoreInteractions(client, logger);
|
||||
|
@ -140,12 +147,14 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException);
|
||||
final Response response = e == responseException ? responseException.getResponse() : null;
|
||||
|
||||
when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenThrow(e);
|
||||
Request request = new Request("GET", endpoint);
|
||||
addParameters(request, getParameters(resource.getParameters()));
|
||||
when(client.performRequest(request)).thenThrow(e);
|
||||
|
||||
sometimesAssertSimpleCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, CheckResponse.ERROR, response);
|
||||
|
||||
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters()));
|
||||
verify(client).performRequest(request);
|
||||
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e));
|
||||
|
||||
verifyNoMoreInteractions(client, logger);
|
||||
|
@ -162,13 +171,16 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
public void testPutResourceFalseWithException() throws IOException {
|
||||
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
|
||||
final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"));
|
||||
final Request request = new Request("PUT", endpoint);
|
||||
addParameters(request, resource.getParameters());
|
||||
request.setEntity(entity);
|
||||
|
||||
when(client.performRequest("PUT", endpoint, resource.getParameters(), entity)).thenThrow(e);
|
||||
when(client.performRequest(request)).thenThrow(e);
|
||||
|
||||
assertThat(resource.putResource(client, logger, resourceBasePath, resourceName, body, resourceType, owner, ownerType), is(false));
|
||||
|
||||
verify(logger).trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
verify(client).performRequest("PUT", endpoint, resource.getParameters(), entity);
|
||||
verify(client).performRequest(request);
|
||||
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e));
|
||||
|
||||
verifyNoMoreInteractions(client, logger);
|
||||
|
@ -190,13 +202,15 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
final ResponseException responseException = responseException("DELETE", endpoint, failedStatus);
|
||||
final Exception e = randomFrom(new IOException("expected"), new RuntimeException("expected"), responseException);
|
||||
final Map<String, String> deleteParameters = deleteParameters(resource.getParameters());
|
||||
final Request request = new Request("DELETE", endpoint);
|
||||
addParameters(request, deleteParameters);
|
||||
|
||||
when(client.performRequest("DELETE", endpoint, deleteParameters)).thenThrow(e);
|
||||
when(client.performRequest(request)).thenThrow(e);
|
||||
|
||||
assertThat(resource.deleteResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), is(false));
|
||||
|
||||
verify(logger).trace("deleting {} [{}] from the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
verify(client).performRequest("DELETE", endpoint, deleteParameters);
|
||||
verify(client).performRequest(request);
|
||||
verify(logger).error(any(org.apache.logging.log4j.util.Supplier.class), eq(e));
|
||||
|
||||
verifyNoMoreInteractions(client, logger);
|
||||
|
@ -277,13 +291,15 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
throws IOException {
|
||||
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
|
||||
final Response response = response("GET", endpoint, status);
|
||||
final Request request = new Request("GET", endpoint);
|
||||
addParameters(request, getParameters(resource.getParameters()));
|
||||
|
||||
when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenReturn(response);
|
||||
when(client.performRequest(request)).thenReturn(response);
|
||||
|
||||
sometimesAssertSimpleCheckForResource(client, logger, resourceBasePath, resourceName, resourceType, expected, response);
|
||||
|
||||
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters()));
|
||||
verify(client).performRequest(request);
|
||||
|
||||
if (expected == CheckResponse.EXISTS || expected == CheckResponse.DOES_NOT_EXIST) {
|
||||
verify(response).getStatusLine();
|
||||
|
@ -310,8 +326,10 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
final HttpEntity entity = status == RestStatus.OK ? entityForResource(expected, resourceName, minimumVersion) : null;
|
||||
final Response response = response("GET", endpoint, status, entity);
|
||||
final XContent xContent = XContentType.JSON.xContent();
|
||||
final Request request = new Request("GET", endpoint);
|
||||
addParameters(request, getParameters(resource.getParameters()));
|
||||
|
||||
when(client.performRequest("GET", endpoint, getParameters(resource.getParameters()))).thenReturn(response);
|
||||
when(client.performRequest(request)).thenReturn(response);
|
||||
|
||||
assertThat(resource.versionCheckForResource(client, logger,
|
||||
resourceBasePath, resourceName, resourceType, owner, ownerType,
|
||||
|
@ -319,7 +337,7 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
is(expected));
|
||||
|
||||
verify(logger).trace("checking if {} [{}] exists on the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
verify(client).performRequest("GET", endpoint, getParameters(resource.getParameters()));
|
||||
verify(client).performRequest(request);
|
||||
|
||||
if (shouldReplace || expected == CheckResponse.EXISTS) {
|
||||
verify(response).getStatusLine();
|
||||
|
@ -341,13 +359,16 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
private void assertPutResource(final RestStatus status, final boolean expected) throws IOException {
|
||||
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
|
||||
final Response response = response("PUT", endpoint, status);
|
||||
final Request request = new Request("PUT", endpoint);
|
||||
addParameters(request, resource.getParameters());
|
||||
request.setEntity(entity);
|
||||
|
||||
when(client.performRequest("PUT", endpoint, resource.getParameters(), entity)).thenReturn(response);
|
||||
when(client.performRequest(request)).thenReturn(response);
|
||||
|
||||
assertThat(resource.putResource(client, logger, resourceBasePath, resourceName, body, resourceType, owner, ownerType),
|
||||
is(expected));
|
||||
|
||||
verify(client).performRequest("PUT", endpoint, resource.getParameters(), entity);
|
||||
verify(client).performRequest(request);
|
||||
verify(response).getStatusLine();
|
||||
|
||||
verify(logger).trace("uploading {} [{}] to the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
|
@ -388,12 +409,14 @@ public class PublishableHttpResourceTests extends AbstractPublishableHttpResourc
|
|||
final String endpoint = concatenateEndpoint(resourceBasePath, resourceName);
|
||||
final Response response = response("DELETE", endpoint, status);
|
||||
final Map<String, String> deleteParameters = deleteParameters(resource.getParameters());
|
||||
final Request request = new Request("DELETE", endpoint);
|
||||
addParameters(request, deleteParameters);
|
||||
|
||||
when(client.performRequest("DELETE", endpoint, deleteParameters)).thenReturn(response);
|
||||
when(client.performRequest(request)).thenReturn(response);
|
||||
|
||||
assertThat(resource.deleteResource(client, logger, resourceBasePath, resourceName, resourceType, owner, ownerType), is(expected));
|
||||
|
||||
verify(client).performRequest("DELETE", endpoint, deleteParameters);
|
||||
verify(client).performRequest(request);
|
||||
verify(response).getStatusLine();
|
||||
|
||||
verify(logger).trace("deleting {} [{}] from the [{}] {}", resourceType, resourceName, owner, ownerType);
|
||||
|
|
|
@ -6,8 +6,9 @@
|
|||
package org.elasticsearch.xpack.monitoring.exporter.http;
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.nio.entity.NStringEntity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -73,8 +74,9 @@ public class VersionHttpResourceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testDoCheckAndPublishFailedWithIOException() throws IOException {
|
||||
// request fails for some reason
|
||||
when(client.performRequest("GET", "/", VersionHttpResource.PARAMETERS)).thenThrow(new IOException("expected"));
|
||||
Request request = new Request("GET", "/");
|
||||
request.addParameter("filter_path", "version.number");
|
||||
when(client.performRequest(request)).thenThrow(new IOException("expected"));
|
||||
|
||||
final VersionHttpResource resource = new VersionHttpResource(owner, Version.CURRENT);
|
||||
|
||||
|
@ -82,12 +84,14 @@ public class VersionHttpResourceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private Response responseForJSON(final String json) throws IOException {
|
||||
final StringEntity entity = new StringEntity(json, ContentType.APPLICATION_JSON);
|
||||
final NStringEntity entity = new NStringEntity(json, ContentType.APPLICATION_JSON);
|
||||
|
||||
final Response response = mock(Response.class);
|
||||
when(response.getEntity()).thenReturn(entity);
|
||||
|
||||
when(client.performRequest("GET", "/", VersionHttpResource.PARAMETERS)).thenReturn(response);
|
||||
Request request = new Request("GET", "/");
|
||||
request.addParameter("filter_path", "version.number");
|
||||
when(client.performRequest(request)).thenReturn(response);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ssl;
|
|||
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.bootstrap.JavaVersion;
|
||||
import org.elasticsearch.common.settings.MockSecureSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
|
@ -92,6 +93,7 @@ public class SSLReloadIntegTests extends SecurityIntegTestCase {
|
|||
}
|
||||
|
||||
public void testThatSSLConfigurationReloadsOnModification() throws Exception {
|
||||
assumeFalse("test fails on JDK 11 currently", JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0);
|
||||
Path keyPath = createTempDir().resolve("testnode_updated.pem");
|
||||
Path certPath = createTempDir().resolve("testnode_updated.crt");
|
||||
Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), keyPath);
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
xpack.ssl.certificates: {}
|
||||
|
||||
- length: { $body: 1 }
|
||||
- match: { $body.0.path: "test-node.jks" }
|
||||
- match: { $body.0.format: "jks" }
|
||||
- match: { $body.0.alias: "test-node" }
|
||||
- match: { $body.0.path: "testnode.crt" }
|
||||
- match: { $body.0.format: "PEM" }
|
||||
- match: { $body.0.has_private_key: true }
|
||||
|
|
|
@ -52,5 +52,5 @@
|
|||
"metric" : { "precision": { "ignore_unlabeled" : true }}
|
||||
}
|
||||
|
||||
- match: { quality_level: 1 }
|
||||
- match: { metric_score: 1 }
|
||||
|
||||
|
|
|
@ -125,7 +125,7 @@ subprojects {
|
|||
|
||||
String output = "${buildDir}/generated-resources/${project.name}"
|
||||
task copyTestNodeKeystore(type: Copy) {
|
||||
from project(xpackModule('core'))
|
||||
from project(':x-pack:plugin:core')
|
||||
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
|
||||
into outputDir
|
||||
}
|
||||
|
|
|
@ -18,59 +18,45 @@ integTestRunner {
|
|||
systemProperty 'es.set.netty.runtime.available.processors', 'false'
|
||||
}
|
||||
|
||||
// location of generated keystores and certificates
|
||||
// location for keys and certificates
|
||||
File keystoreDir = new File(project.buildDir, 'keystore')
|
||||
|
||||
// Generate the node's keystore
|
||||
File nodeKeystore = new File(keystoreDir, 'test-node.jks')
|
||||
task createNodeKeyStore(type: LoggedExec) {
|
||||
doFirst {
|
||||
if (nodeKeystore.parentFile.exists() == false) {
|
||||
nodeKeystore.parentFile.mkdirs()
|
||||
File nodeKey = file("$keystoreDir/testnode.pem")
|
||||
File nodeCert = file("$keystoreDir/testnode.crt")
|
||||
// Add key and certs to test classpath: it expects it there
|
||||
task copyKeyCerts(type: Copy) {
|
||||
from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) {
|
||||
include 'testnode.crt', 'testnode.pem'
|
||||
}
|
||||
if (nodeKeystore.exists()) {
|
||||
delete nodeKeystore
|
||||
into keystoreDir
|
||||
}
|
||||
}
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
|
||||
args '-genkey',
|
||||
'-alias', 'test-node',
|
||||
'-keystore', nodeKeystore,
|
||||
'-keyalg', 'RSA',
|
||||
'-keysize', '2048',
|
||||
'-validity', '712',
|
||||
'-dname', 'CN=smoke-test-plugins-ssl',
|
||||
'-keypass', 'keypass',
|
||||
'-storepass', 'keypass'
|
||||
}
|
||||
|
||||
// Add keystores to test classpath: it expects it there
|
||||
// Add keys and cets to test classpath: it expects it there
|
||||
sourceSets.test.resources.srcDir(keystoreDir)
|
||||
processTestResources.dependsOn(createNodeKeyStore)
|
||||
processTestResources.dependsOn(copyKeyCerts)
|
||||
|
||||
integTestCluster {
|
||||
dependsOn createNodeKeyStore
|
||||
dependsOn copyKeyCerts
|
||||
setting 'xpack.security.enabled', 'true'
|
||||
setting 'xpack.ml.enabled', 'true'
|
||||
setting 'logger.org.elasticsearch.xpack.ml.datafeed', 'TRACE'
|
||||
setting 'xpack.monitoring.enabled', 'false'
|
||||
setting 'xpack.security.authc.token.enabled', 'true'
|
||||
setting 'xpack.security.transport.ssl.enabled', 'true'
|
||||
setting 'xpack.security.transport.ssl.keystore.path', nodeKeystore.name
|
||||
setting 'xpack.security.transport.ssl.key', nodeKey.name
|
||||
setting 'xpack.security.transport.ssl.certificate', nodeCert.name
|
||||
setting 'xpack.security.transport.ssl.verification_mode', 'certificate'
|
||||
setting 'xpack.security.audit.enabled', 'true'
|
||||
setting 'xpack.license.self_generated.type', 'trial'
|
||||
|
||||
keystoreSetting 'bootstrap.password', 'x-pack-test-password'
|
||||
keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass'
|
||||
keystoreSetting 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode'
|
||||
|
||||
numNodes = 3
|
||||
|
||||
setupCommand 'setupDummyUser',
|
||||
'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser'
|
||||
|
||||
extraConfigFile nodeKeystore.name, nodeKeystore
|
||||
extraConfigFile nodeKey.name, nodeKey
|
||||
extraConfigFile nodeCert.name, nodeCert
|
||||
|
||||
waitCondition = { node, ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
|
|
|
@ -124,9 +124,11 @@ abstract class MlNativeAutodetectIntegTestCase extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
protected Settings externalClusterClientSettings() {
|
||||
Path keyStore;
|
||||
Path key;
|
||||
Path certificate;
|
||||
try {
|
||||
keyStore = PathUtils.get(getClass().getResource("/test-node.jks").toURI());
|
||||
key = PathUtils.get(getClass().getResource("/testnode.pem").toURI());
|
||||
certificate = PathUtils.get(getClass().getResource("/testnode.crt").toURI());
|
||||
} catch (URISyntaxException e) {
|
||||
throw new IllegalStateException("error trying to get keystore path", e);
|
||||
}
|
||||
|
@ -135,8 +137,9 @@ abstract class MlNativeAutodetectIntegTestCase extends ESIntegTestCase {
|
|||
builder.put(SecurityField.USER_SETTING.getKey(), "x_pack_rest_user:" + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING);
|
||||
builder.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), true);
|
||||
builder.put("xpack.security.transport.ssl.enabled", true);
|
||||
builder.put("xpack.security.transport.ssl.keystore.path", keyStore.toAbsolutePath().toString());
|
||||
builder.put("xpack.security.transport.ssl.keystore.password", "keypass");
|
||||
builder.put("xpack.security.transport.ssl.key", key.toAbsolutePath().toString());
|
||||
builder.put("xpack.security.transport.ssl.certificate", certificate.toAbsolutePath().toString());
|
||||
builder.put("xpack.security.transport.ssl.key_passphrase", "testnode");
|
||||
builder.put("xpack.security.transport.ssl.verification_mode", "certificate");
|
||||
return builder.build();
|
||||
}
|
||||
|
|
|
@ -107,7 +107,7 @@ subprojects {
|
|||
|
||||
String output = "${buildDir}/generated-resources/${project.name}"
|
||||
task copyTestNodeKeystore(type: Copy) {
|
||||
from project(xpackModule('core'))
|
||||
from project(':x-pack:plugin:core')
|
||||
.file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks')
|
||||
into outputDir
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
|||
import org.elasticsearch.gradle.test.NodeInfo
|
||||
|
||||
import javax.net.ssl.HttpsURLConnection
|
||||
import javax.net.ssl.KeyManagerFactory
|
||||
import javax.net.ssl.KeyManager
|
||||
import javax.net.ssl.SSLContext
|
||||
import javax.net.ssl.TrustManagerFactory
|
||||
import java.nio.charset.StandardCharsets
|
||||
|
@ -26,135 +26,27 @@ task copyXPackPluginProps(type: Copy) {
|
|||
}
|
||||
project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps)
|
||||
|
||||
// needed to be consistent with ssl host checking
|
||||
Object san = new SanEvaluator()
|
||||
|
||||
// location of generated keystores and certificates
|
||||
File keystoreDir = new File(project.buildDir, 'keystore')
|
||||
|
||||
// Generate the node's keystore
|
||||
File nodeKeystore = new File(keystoreDir, 'test-node.jks')
|
||||
task createNodeKeyStore(type: LoggedExec) {
|
||||
doFirst {
|
||||
if (nodeKeystore.parentFile.exists() == false) {
|
||||
nodeKeystore.parentFile.mkdirs()
|
||||
}
|
||||
if (nodeKeystore.exists()) {
|
||||
delete nodeKeystore
|
||||
}
|
||||
}
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
|
||||
args '-genkey',
|
||||
'-alias', 'test-node',
|
||||
'-keystore', nodeKeystore,
|
||||
'-keyalg', 'RSA',
|
||||
'-keysize', '2048',
|
||||
'-validity', '712',
|
||||
'-dname', 'CN=smoke-test-plugins-ssl',
|
||||
'-keypass', 'keypass',
|
||||
'-storepass', 'keypass',
|
||||
'-ext', san
|
||||
}
|
||||
|
||||
// Generate the client's keystore
|
||||
File clientKeyStore = new File(keystoreDir, 'test-client.jks')
|
||||
task createClientKeyStore(type: LoggedExec) {
|
||||
doFirst {
|
||||
if (clientKeyStore.parentFile.exists() == false) {
|
||||
clientKeyStore.parentFile.mkdirs()
|
||||
}
|
||||
if (clientKeyStore.exists()) {
|
||||
delete clientKeyStore
|
||||
}
|
||||
}
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
|
||||
args '-genkey',
|
||||
'-alias', 'test-client',
|
||||
'-keystore', clientKeyStore,
|
||||
'-keyalg', 'RSA',
|
||||
'-keysize', '2048',
|
||||
'-validity', '712',
|
||||
'-dname', 'CN=smoke-test-plugins-ssl',
|
||||
'-keypass', 'keypass',
|
||||
'-storepass', 'keypass',
|
||||
'-ext', san
|
||||
}
|
||||
|
||||
// Export the node's certificate
|
||||
File nodeCertificate = new File(keystoreDir, 'test-node.cert')
|
||||
task exportNodeCertificate(type: LoggedExec) {
|
||||
dependsOn createNodeKeyStore
|
||||
doFirst {
|
||||
if (nodeCertificate.parentFile.exists() == false) {
|
||||
nodeCertificate.parentFile.mkdirs()
|
||||
}
|
||||
if (nodeCertificate.exists()) {
|
||||
delete nodeCertificate
|
||||
}
|
||||
}
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
args '-export',
|
||||
'-alias', 'test-node',
|
||||
'-keystore', nodeKeystore,
|
||||
'-storepass', 'keypass',
|
||||
'-file', nodeCertificate
|
||||
}
|
||||
|
||||
// Import the node certificate in the client's keystore
|
||||
task importNodeCertificateInClientKeyStore(type: LoggedExec) {
|
||||
dependsOn createClientKeyStore, exportNodeCertificate
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
args '-import',
|
||||
'-alias', 'test-node',
|
||||
'-keystore', clientKeyStore,
|
||||
'-storepass', 'keypass',
|
||||
'-file', nodeCertificate,
|
||||
'-noprompt'
|
||||
}
|
||||
|
||||
// Export the client's certificate
|
||||
File clientCertificate = new File(keystoreDir, 'test-client.cert')
|
||||
task exportClientCertificate(type: LoggedExec) {
|
||||
dependsOn createClientKeyStore
|
||||
doFirst {
|
||||
if (clientCertificate.parentFile.exists() == false) {
|
||||
clientCertificate.parentFile.mkdirs()
|
||||
}
|
||||
if (clientCertificate.exists()) {
|
||||
delete clientCertificate
|
||||
}
|
||||
}
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
args '-export',
|
||||
'-alias', 'test-client',
|
||||
'-keystore', clientKeyStore,
|
||||
'-storepass', 'keypass',
|
||||
'-file', clientCertificate
|
||||
}
|
||||
|
||||
// Import the client certificate in the node's keystore
|
||||
task importClientCertificateInNodeKeyStore(type: LoggedExec) {
|
||||
dependsOn createNodeKeyStore, exportClientCertificate
|
||||
executable = new File(project.runtimeJavaHome, 'bin/keytool')
|
||||
args '-import',
|
||||
'-alias', 'test-client',
|
||||
'-keystore', nodeKeystore,
|
||||
'-storepass', 'keypass',
|
||||
'-file', clientCertificate,
|
||||
'-noprompt'
|
||||
}
|
||||
|
||||
forbiddenPatterns {
|
||||
exclude '**/*.cert'
|
||||
}
|
||||
File nodeKeystore = file("$keystoreDir/testnode.jks")
|
||||
File nodeKey = file("$keystoreDir/testnode.pem")
|
||||
File nodeCert = file("$keystoreDir/testnode.crt")
|
||||
File clientKeyStore = file("$keystoreDir/testclient.jks")
|
||||
File clientKey = file("$keystoreDir/testclient.pem")
|
||||
File clientCert = file("$keystoreDir/testclient.crt")
|
||||
|
||||
// Add keystores to test classpath: it expects it there
|
||||
task copyKeyCerts(type: Copy) {
|
||||
from('./') {
|
||||
include '*.crt', '*.pem', '*.jks'
|
||||
}
|
||||
into keystoreDir
|
||||
}
|
||||
// Add keystores to test classpath: it expects it there
|
||||
sourceSets.test.resources.srcDir(keystoreDir)
|
||||
processTestResources.dependsOn(importNodeCertificateInClientKeyStore, importClientCertificateInNodeKeyStore)
|
||||
processTestResources.dependsOn(copyKeyCerts)
|
||||
|
||||
integTestCluster.dependsOn(importClientCertificateInNodeKeyStore, importNodeCertificateInClientKeyStore)
|
||||
integTestCluster.dependsOn(copyKeyCerts)
|
||||
|
||||
ext.pluginsCount = 0
|
||||
project(':plugins').getChildProjects().each { pluginName, pluginProject ->
|
||||
|
@ -167,8 +59,7 @@ integTestCluster {
|
|||
setting 'xpack.monitoring.collection.interval', '1s'
|
||||
setting 'xpack.monitoring.exporters._http.type', 'http'
|
||||
setting 'xpack.monitoring.exporters._http.enabled', 'false'
|
||||
setting 'xpack.monitoring.exporters._http.ssl.truststore.path', clientKeyStore.name
|
||||
setting 'xpack.monitoring.exporters._http.ssl.truststore.password', 'keypass'
|
||||
setting 'xpack.ssl.certificate_authorities', 'testnode.crt'
|
||||
setting 'xpack.monitoring.exporters._http.auth.username', 'monitoring_agent'
|
||||
setting 'xpack.monitoring.exporters._http.auth.password', 'x-pack-test-password'
|
||||
setting 'xpack.monitoring.exporters._http.ssl.verification_mode', 'full'
|
||||
|
@ -176,15 +67,19 @@ integTestCluster {
|
|||
setting 'xpack.license.self_generated.type', 'trial'
|
||||
setting 'xpack.security.enabled', 'true'
|
||||
setting 'xpack.security.http.ssl.enabled', 'true'
|
||||
setting 'xpack.security.http.ssl.keystore.path', nodeKeystore.name
|
||||
keystoreSetting 'xpack.security.http.ssl.keystore.secure_password', 'keypass'
|
||||
setting 'xpack.security.http.ssl.key', 'testnode.pem'
|
||||
setting 'xpack.security.http.ssl.certificate', 'testnode.crt'
|
||||
keystoreSetting 'xpack.security.http.ssl.secure_key_passphrase', 'testnode'
|
||||
|
||||
setting 'xpack.index_lifecycle.enabled', 'false'
|
||||
setting 'xpack.ml.enabled', 'false'
|
||||
|
||||
// copy keystores into config/
|
||||
// copy keystores, keys and certificates into config/
|
||||
extraConfigFile nodeKeystore.name, nodeKeystore
|
||||
extraConfigFile nodeKey.name, nodeKey
|
||||
extraConfigFile nodeCert.name, nodeCert
|
||||
extraConfigFile clientKeyStore.name, clientKeyStore
|
||||
extraConfigFile clientKey.name, clientKey
|
||||
extraConfigFile clientCert.name, clientCert
|
||||
|
||||
setupCommand 'setupTestUser',
|
||||
'bin/elasticsearch-users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'
|
||||
|
@ -194,13 +89,12 @@ integTestCluster {
|
|||
waitCondition = { NodeInfo node, AntBuilder ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
KeyStore keyStore = KeyStore.getInstance("JKS");
|
||||
keyStore.load(clientKeyStore.newInputStream(), 'keypass'.toCharArray());
|
||||
KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
|
||||
kmf.init(keyStore, 'keypass'.toCharArray());
|
||||
keyStore.load(clientKeyStore.newInputStream(), 'testclient'.toCharArray());
|
||||
TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
|
||||
tmf.init(keyStore);
|
||||
// We don't need a KeyManager as there won't be client auth required so pass an empty array
|
||||
SSLContext sslContext = SSLContext.getInstance("TLSv1.2");
|
||||
sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom());
|
||||
sslContext.init(new KeyManager[0], tmf.getTrustManagers(), new SecureRandom());
|
||||
for (int i = 0; i < 10; i++) {
|
||||
// we use custom wait logic here for HTTPS
|
||||
HttpsURLConnection httpURLConnection = null;
|
||||
|
@ -246,159 +140,3 @@ processTestResources {
|
|||
MavenFilteringHack.filter(it, expansions)
|
||||
}
|
||||
}
|
||||
|
||||
/** A lazy evaluator to find the san to use for certificate generation. */
|
||||
class SanEvaluator {
|
||||
|
||||
private static String san = null
|
||||
|
||||
String toString() {
|
||||
synchronized (SanEvaluator.class) {
|
||||
if (san == null) {
|
||||
san = getSubjectAlternativeNameString()
|
||||
}
|
||||
}
|
||||
return san
|
||||
}
|
||||
|
||||
// Code stolen from NetworkUtils/InetAddresses/NetworkAddress to support SAN
|
||||
/** Return all interfaces (and subinterfaces) on the system */
|
||||
private static List<NetworkInterface> getInterfaces() throws SocketException {
|
||||
List<NetworkInterface> all = new ArrayList<>();
|
||||
addAllInterfaces(all, Collections.list(NetworkInterface.getNetworkInterfaces()));
|
||||
Collections.sort(all, new Comparator<NetworkInterface>() {
|
||||
@Override
|
||||
public int compare(NetworkInterface left, NetworkInterface right) {
|
||||
return Integer.compare(left.getIndex(), right.getIndex());
|
||||
}
|
||||
});
|
||||
return all;
|
||||
}
|
||||
|
||||
/** Helper for getInterfaces, recursively adds subinterfaces to {@code target} */
|
||||
private static void addAllInterfaces(List<NetworkInterface> target, List<NetworkInterface> level) {
|
||||
if (!level.isEmpty()) {
|
||||
target.addAll(level);
|
||||
for (NetworkInterface intf : level) {
|
||||
addAllInterfaces(target, Collections.list(intf.getSubInterfaces()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static String getSubjectAlternativeNameString() {
|
||||
List<InetAddress> list = new ArrayList<>();
|
||||
for (NetworkInterface intf : getInterfaces()) {
|
||||
if (intf.isUp()) {
|
||||
// NOTE: some operating systems (e.g. BSD stack) assign a link local address to the loopback interface
|
||||
// while technically not a loopback address, some of these treat them as one (e.g. OS X "localhost") so we must too,
|
||||
// otherwise things just won't work out of box. So we include all addresses from loopback interfaces.
|
||||
for (InetAddress address : Collections.list(intf.getInetAddresses())) {
|
||||
if (intf.isLoopback() || address.isLoopbackAddress()) {
|
||||
list.add(address);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (list.isEmpty()) {
|
||||
throw new IllegalArgumentException("no up-and-running loopback addresses found, got " + getInterfaces());
|
||||
}
|
||||
|
||||
StringBuilder builder = new StringBuilder("san=");
|
||||
for (int i = 0; i < list.size(); i++) {
|
||||
InetAddress address = list.get(i);
|
||||
String hostAddress;
|
||||
if (address instanceof Inet6Address) {
|
||||
hostAddress = compressedIPV6Address((Inet6Address)address);
|
||||
} else {
|
||||
hostAddress = address.getHostAddress();
|
||||
}
|
||||
builder.append("ip:").append(hostAddress);
|
||||
String hostname = address.getHostName();
|
||||
if (hostname.equals(address.getHostAddress()) == false) {
|
||||
builder.append(",dns:").append(hostname);
|
||||
}
|
||||
|
||||
if (i != (list.size() - 1)) {
|
||||
builder.append(",");
|
||||
}
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
private static String compressedIPV6Address(Inet6Address inet6Address) {
|
||||
byte[] bytes = inet6Address.getAddress();
|
||||
int[] hextets = new int[8];
|
||||
for (int i = 0; i < hextets.length; i++) {
|
||||
hextets[i] = (bytes[2 * i] & 255) << 8 | bytes[2 * i + 1] & 255;
|
||||
}
|
||||
compressLongestRunOfZeroes(hextets);
|
||||
return hextetsToIPv6String(hextets);
|
||||
}
|
||||
|
||||
/**
|
||||
* Identify and mark the longest run of zeroes in an IPv6 address.
|
||||
*
|
||||
* <p>Only runs of two or more hextets are considered. In case of a tie, the
|
||||
* leftmost run wins. If a qualifying run is found, its hextets are replaced
|
||||
* by the sentinel value -1.
|
||||
*
|
||||
* @param hextets {@code int[]} mutable array of eight 16-bit hextets
|
||||
*/
|
||||
private static void compressLongestRunOfZeroes(int[] hextets) {
|
||||
int bestRunStart = -1;
|
||||
int bestRunLength = -1;
|
||||
int runStart = -1;
|
||||
for (int i = 0; i < hextets.length + 1; i++) {
|
||||
if (i < hextets.length && hextets[i] == 0) {
|
||||
if (runStart < 0) {
|
||||
runStart = i;
|
||||
}
|
||||
} else if (runStart >= 0) {
|
||||
int runLength = i - runStart;
|
||||
if (runLength > bestRunLength) {
|
||||
bestRunStart = runStart;
|
||||
bestRunLength = runLength;
|
||||
}
|
||||
runStart = -1;
|
||||
}
|
||||
}
|
||||
if (bestRunLength >= 2) {
|
||||
Arrays.fill(hextets, bestRunStart, bestRunStart + bestRunLength, -1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a list of hextets into a human-readable IPv6 address.
|
||||
*
|
||||
* <p>In order for "::" compression to work, the input should contain negative
|
||||
* sentinel values in place of the elided zeroes.
|
||||
*
|
||||
* @param hextets {@code int[]} array of eight 16-bit hextets, or -1s
|
||||
*/
|
||||
private static String hextetsToIPv6String(int[] hextets) {
|
||||
/*
|
||||
* While scanning the array, handle these state transitions:
|
||||
* start->num => "num" start->gap => "::"
|
||||
* num->num => ":num" num->gap => "::"
|
||||
* gap->num => "num" gap->gap => ""
|
||||
*/
|
||||
StringBuilder buf = new StringBuilder(39);
|
||||
boolean lastWasNumber = false;
|
||||
for (int i = 0; i < hextets.length; i++) {
|
||||
boolean thisIsNumber = hextets[i] >= 0;
|
||||
if (thisIsNumber) {
|
||||
if (lastWasNumber) {
|
||||
buf.append(':');
|
||||
}
|
||||
buf.append(Integer.toHexString(hextets[i]));
|
||||
} else {
|
||||
if (i == 0 || lastWasNumber) {
|
||||
buf.append("::");
|
||||
}
|
||||
}
|
||||
lastWasNumber = thisIsNumber;
|
||||
}
|
||||
return buf.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ public class SmokeTestPluginsSslClientYamlTestSuiteIT extends ESClientYamlSuiteT
|
|||
|
||||
private static final String USER = "test_user";
|
||||
private static final String PASS = "x-pack-test-password";
|
||||
private static final String KEYSTORE_PASS = "keypass";
|
||||
private static final String KEYSTORE_PASS = "testnode";
|
||||
|
||||
public SmokeTestPluginsSslClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
|
@ -45,7 +45,7 @@ public class SmokeTestPluginsSslClientYamlTestSuiteIT extends ESClientYamlSuiteT
|
|||
@BeforeClass
|
||||
public static void getKeyStore() {
|
||||
try {
|
||||
keyStore = PathUtils.get(SmokeTestPluginsSslClientYamlTestSuiteIT.class.getResource("/test-node.jks").toURI());
|
||||
keyStore = PathUtils.get(SmokeTestPluginsSslClientYamlTestSuiteIT.class.getResource("/testnode.jks").toURI());
|
||||
} catch (URISyntaxException e) {
|
||||
throw new ElasticsearchException("exception while reading the store", e);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIID1zCCAr+gAwIBAgIJALnUl/KSS74pMA0GCSqGSIb3DQEBCwUAMEoxDDAKBgNV
|
||||
BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEiMCAGA1UEAxMZRWxhc3Rp
|
||||
Y3NlYXJjaCBUZXN0IENsaWVudDAeFw0xNTA5MjMxODUyNTVaFw0xOTA5MjIxODUy
|
||||
NTVaMEoxDDAKBgNVBAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEiMCAG
|
||||
A1UEAxMZRWxhc3RpY3NlYXJjaCBUZXN0IENsaWVudDCCASIwDQYJKoZIhvcNAQEB
|
||||
BQADggEPADCCAQoCggEBAMKm+P6vDAff0c6BWKGdhnYoNl9HijLIgfU3d9CQcqKt
|
||||
wT+yUW3DPSVjIfaLmDIGj6Hl8jTHWPB7ZP4fzhrPi6m4qlRGclJMECBuNASZFiPD
|
||||
tEDv3msoeqOKQet6n7PZvgpWM7hxYZO4P1aMKJtRsFAdvBAdZUnv0spR5G4UZTHz
|
||||
SKmMeanIKFkLaD0XVKiLQu9/z9M6roDQeAEoCJ/8JsanG8ih2ymfPHIZuNyYIOrV
|
||||
ekHN2zU6bnVn8/PCeZSjS6h5xYw+Jl5gzGI/n+F5CZ+THoH8pM4pGp6xRVzpiH12
|
||||
gvERGwgSIDXdn/+uZZj+4lE7n2ENRSOt5KcOGG99r60CAwEAAaOBvzCBvDAJBgNV
|
||||
HRMEAjAAMB0GA1UdDgQWBBSSFhBXNp7AaNrHdlgCV0mCEzt7ajCBjwYDVR0RBIGH
|
||||
MIGEgglsb2NhbGhvc3SCFWxvY2FsaG9zdC5sb2NhbGRvbWFpboIKbG9jYWxob3N0
|
||||
NIIXbG9jYWxob3N0NC5sb2NhbGRvbWFpbjSCCmxvY2FsaG9zdDaCF2xvY2FsaG9z
|
||||
dDYubG9jYWxkb21haW42hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMA0GCSqGSIb3
|
||||
DQEBCwUAA4IBAQANvAkddfLxn4/BCY4LY/1ET3d7ZRldjFTyjjHRYJ3CYBXWVahM
|
||||
skLxIcFNca8YjKfXoX8mcK+NQK/dAbGHXqk76yMlkrKjh1OQiZ1YAX5ryYerGrZ9
|
||||
9N3E9wnbn72bW3iumoLlqmTWlHEpMI0Ql6J75BQLTgKHxCPupVA5sTbWkKwGjXXA
|
||||
i84rUlzhDJOR8jk3/7ct0iZO8Hk6AWMcNix5Wka3IDGUXuEVevYRlxgVyCxcnZWC
|
||||
7JWREpar5aIPQFkY6VCEglxwUyXbHZw5T/u6XaKKnS7gz8RiwRh68ddSQJeEHi5e
|
||||
4onUD7bOCJgfsiUwdiCkDbfN9Yum8OIpmBRs
|
||||
-----END CERTIFICATE-----
|
Binary file not shown.
|
@ -0,0 +1,30 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
Proc-Type: 4,ENCRYPTED
|
||||
DEK-Info: DES-EDE3-CBC,C98A45E4AFC263C2
|
||||
|
||||
wLuUEXldYc54r4ryWd6jw6UMGYwn6+ibGKHp4sD92l42lmI2UrCT/Mb/E0O+KMMy
|
||||
pHgc5/dBWkXgMiqDyLIhHk4kgT40rdw5W5lZkAA4Qt/Yzd+rbscTvzp09zrF6Fll
|
||||
czgoE7FrvhOKiEOakerTit4pIPYosdX606cpVQE2lq9oZs9HVMcLzdAZj8A/P/4g
|
||||
fo4X3+zqVYC/LH4n00bhNoeeej2o1lEJ+l9u9hptT2ATXle6pANa83Ldg4OxJyj8
|
||||
dkR9ahnAMCvYTSjEU7nwmGNPeFX0PIUjJKQivr410cYG104DC30Yy+XrIUfjTVUi
|
||||
agwlMpHoBq79/ZRUJR3xPLkIGgw4g+RPt45D9eKsEsV4vqy8SFlgaoJ2mKUKleZy
|
||||
i7D9ouzMKQ3sYE4eQVQ5o3K8ZPn5eozCwCVIp7jGSsuvDpLA9peZSwWPfc5y8JFD
|
||||
/64usCt1J8Mv/e9NVllC8ZA+ZmDitTiwLZysczpMOaFqqeUbk9EJst38n4nBzRV2
|
||||
quxvg9W/iveQIydFyftCtNfRkpbp0NCsLz293dBYwZacHsPcY27IBCwXHiICjiAW
|
||||
q7bnisXsgSaQMhMNRGW9YElZGb7ZWxoIzcyNBisGI8zxn48ObERVOmkOFxY/gs9T
|
||||
YmpVMliWtmRG6hb6iCh9b7z8THRquxgTGE9ZFBwtLUKg33aubtgAfnUh/Xq2Ue5K
|
||||
l+ZCqDGEi/FSIjVENUNNntAx/vXeNPbkoGLb/HSJwAh+sjpaLGQ54xixCtE9l3NY
|
||||
o2QAiZ804KLPaGtbbOv7wPumxQ+8mxG5FN0hTRrsMW9t8pBXw47iMy/T2H21TD5D
|
||||
E5XbM6kFeBrnsWnZJ2/ieXqDE4SX0tm3WEvZlDg7N7jV8QDM/D3Xdkb/sqJRabMG
|
||||
tQRgwkLiB+mZ5MAfGLogI2/lOEayrBVz4qYdXojewxY4LtaZ5HiUIlyA9CJelMvD
|
||||
nS52I6+FpaFhvuZC10qaM9Ph9TNyx+XKRUsPILuDiBRnYiHUKs1qASl5tjn2yyjM
|
||||
71WSo7A7btOckzhDZdMVf1T472f0LGsRYoQebMhotqCuR7yArZHzTeWB0CjL3tOz
|
||||
j3QlhKt2E1jx43bSK5tBasd9Bpmn2onvdwu1RRP8cyQBsXJSDy4/8t/g63+C3wod
|
||||
8VPrlKhK+TenK9EoEqJ2mNuNq+duOjTXfK/7GM5s0BFKv+i2ckpDi1NPckd2gXjF
|
||||
yUFZhmK6k0WC4jjWloMt+WQpi1rXMEXwCypgTrqWbvD0p6+X3uQmP57L4yHQcZoW
|
||||
Qcs5GnihJ0DIhw9vYDhBhNo0WY1oBO20nVCN3R/JIpp3uDtg64WvfvMSXzJIPBCY
|
||||
s+/GM5TtuD6mERDu3+qXxWwiy4PMQRcgjRTMEZ3A4Iv77YfQRkcd6S9qjUUuR/5D
|
||||
xs+J4ryb1biz9ofW7I+Dbz4SArWSgwcuh14AV9RBv6Rh9m83rjT2K0yvbe/+7hHW
|
||||
R8nzRMqJcGNGCHmRjA/cwoiv6+k2J/RbCJqnR3RmNex/85XaXBfZwRfHXVbzZQfa
|
||||
SrFaaNLf1hMwGLAJjIcQRxa3yZbjFXVx1Bp4hh8rKNWaOItjavNtNg==
|
||||
-----END RSA PRIVATE KEY-----
|
|
@ -0,0 +1,23 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIID0zCCArugAwIBAgIJALi5bDfjMszLMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV
|
||||
BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp
|
||||
Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTUwOTIzMTg1MjU3WhcNMTkwOTIyMTg1MjU3
|
||||
WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV
|
||||
BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC
|
||||
AQ8AMIIBCgKCAQEA3rGZ1QbsW0+MuyrSLmMfDFKtLBkIFW8V0gRuurFg1PUKKNR1
|
||||
Mq2tMVwjjYETAU/UY0iKZOzjgvYPKhDTYBTte/WHR1ZK4CYVv7TQX/gtFQG/ge/c
|
||||
7u0sLch9p7fbd+/HZiLS/rBEZDIohvgUvzvnA8+OIYnw4kuxKo/5iboAIS41klMg
|
||||
/lATm8V71LMY68inht71/ZkQoAHKgcR9z4yNYvQ1WqKG8DG8KROXltll3sTrKbl5
|
||||
zJhn660es/1ZnR6nvwt6xnSTl/mNHMjkfv1bs4rJ/py3qPxicdoSIn/KyojUcgHV
|
||||
F38fuAy2CQTdjVG5fWj9iz+mQvLm3+qsIYQdFwIDAQABo4G/MIG8MAkGA1UdEwQC
|
||||
MAAwHQYDVR0OBBYEFEMMWLWQi/g83PzlHYqAVnty5L7HMIGPBgNVHREEgYcwgYSC
|
||||
CWxvY2FsaG9zdIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghds
|
||||
b2NhbGhvc3Q0LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5s
|
||||
b2NhbGRvbWFpbjaHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAEwDQYJKoZIhvcNAQEL
|
||||
BQADggEBAMjGGXT8Nt1tbl2GkiKtmiuGE2Ej66YuZ37WSJViaRNDVHLlg87TCcHe
|
||||
k2rdO+6sFqQbbzEfwQ05T7xGmVu7tm54HwKMRugoQ3wct0bQC5wEWYN+oMDvSyO6
|
||||
M28mZwWb4VtR2IRyWP+ve5DHwTM9mxWa6rBlGzsQqH6YkJpZojzqk/mQTug+Y8aE
|
||||
mVoqRIPMHq9ob+S9qd5lp09+MtYpwPfTPx/NN+xMEooXWW/ARfpGhWPkg/FuCu4z
|
||||
1tFmCqHgNcWirzMm3dQpF78muE9ng6OB2MXQwL4VgnVkxmlZNHbkR2v/t8MyZJxC
|
||||
y4g6cTMM3S/UMt5/+aIB2JAuMKyuD+A=
|
||||
-----END CERTIFICATE-----
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue