Merge branch 'master' into index-lifecycle

This commit is contained in:
Colin Goodheart-Smithe 2018-07-26 10:49:57 +01:00
commit 99e8b5bc13
No known key found for this signature in database
GPG Key ID: F975E7BDD739B3C7
44 changed files with 547 additions and 279 deletions

View File

@ -1,9 +1,6 @@
[[painless-contexts]]
== Painless contexts
:es_version: https://www.elastic.co/guide/en/elasticsearch/reference/master
:xp_version: https://www.elastic.co/guide/en/x-pack/current
A Painless script is evaluated within a context. Each context has values that
are available as local variables, a whitelist that controls the available
classes, and the methods and fields within those classes (API), and
@ -18,41 +15,41 @@ specialized code may define new ways to use a Painless script.
| Name | Painless Documentation
| Elasticsearch Documentation
| Update | <<painless-update-context, Painless Documentation>>
| {es_version}/docs-update.html[Elasticsearch Documentation]
| {ref}/docs-update.html[Elasticsearch Documentation]
| Update by query | <<painless-update-by-query-context, Painless Documentation>>
| {es_version}/docs-update-by-query.html[Elasticsearch Documentation]
| {ref}/docs-update-by-query.html[Elasticsearch Documentation]
| Reindex | <<painless-reindex-context, Painless Documentation>>
| {es_version}/docs-reindex.html[Elasticsearch Documentation]
| {ref}/docs-reindex.html[Elasticsearch Documentation]
| Sort | <<painless-sort-context, Painless Documentation>>
| {es_version}/search-request-sort.html[Elasticsearch Documentation]
| {ref}/search-request-sort.html[Elasticsearch Documentation]
| Similarity | <<painless-similarity-context, Painless Documentation>>
| {es_version}/index-modules-similarity.html[Elasticsearch Documentation]
| Weight | <<painless-similarity-context, Painless Documentation>>
| {es_version}/index-modules-similarity.html[Elasticsearch Documentation]
| {ref}/index-modules-similarity.html[Elasticsearch Documentation]
| Weight | <<painless-weight-context, Painless Documentation>>
| {ref}/index-modules-similarity.html[Elasticsearch Documentation]
| Score | <<painless-score-context, Painless Documentation>>
| {es_version}/query-dsl-function-score-query.html[Elasticsearch Documentation]
| {ref}/query-dsl-function-score-query.html[Elasticsearch Documentation]
| Field | <<painless-field-context, Painless Documentation>>
| {es_version}/search-request-script-fields.html[Elasticsearch Documentation]
| {ref}/search-request-script-fields.html[Elasticsearch Documentation]
| Filter | <<painless-filter-context, Painless Documentation>>
| {es_version}/query-dsl-script-query.html[Elasticsearch Documentation]
| {ref}/query-dsl-script-query.html[Elasticsearch Documentation]
| Minimum should match | <<painless-min-should-match-context, Painless Documentation>>
| {es_version}/query-dsl-terms-set-query.html[Elasticsearch Documentation]
| {ref}/query-dsl-terms-set-query.html[Elasticsearch Documentation]
| Metric aggregation initialization | <<painless-metric-agg-init-context, Painless Documentation>>
| {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
| {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
| Metric aggregation map | <<painless-metric-agg-map-context, Painless Documentation>>
| {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
| {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
| Metric aggregation combine | <<painless-metric-agg-combine-context, Painless Documentation>>
| {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
| {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
| Metric aggregation reduce | <<painless-metric-agg-reduce-context, Painless Documentation>>
| {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
| {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation]
| Bucket aggregation | <<painless-bucket-agg-context, Painless Documentation>>
| {es_version}/search-aggregations-pipeline-bucket-script-aggregation.html[Elasticsearch Documentation]
| {ref}/search-aggregations-pipeline-bucket-script-aggregation.html[Elasticsearch Documentation]
| Ingest processor | <<painless-ingest-processor-context, Painless Documentation>>
| {es_version}/script-processor.html[Elasticsearch Documentation]
| {ref}/script-processor.html[Elasticsearch Documentation]
| Watcher condition | <<painless-watcher-condition-context, Painless Documentation>>
| {xp_version}/condition-script.html[Elasticsearch Documentation]
| {xpack-ref}/condition-script.html[Elasticsearch Documentation]
| Watcher transform | <<painless-watcher-transform-context, Painless Documentation>>
| {xp_version}/transform-script.html[Elasticsearch Documentation]
| {xpack-ref}/transform-script.html[Elasticsearch Documentation]
|====
include::painless-contexts/index.asciidoc[]

View File

@ -2,7 +2,7 @@
=== Bucket aggregation context
Use a Painless script in an
{es_version}/search-aggregations-pipeline-bucket-script-aggregation.html[bucket aggregation]
{ref}/search-aggregations-pipeline-bucket-script-aggregation.html[bucket aggregation]
to calculate a value as a result in a bucket.
*Variables*

View File

@ -2,7 +2,7 @@
=== Field context
Use a Painless script to create a
{es_version}/search-request-script-fields.html[script field] to return
{ref}/search-request-script-fields.html[script field] to return
a customized value for each document in the results of a query.
*Variables*
@ -14,7 +14,7 @@ a customized value for each document in the results of a query.
Contains the fields of the specified document where each field is a
`List` of values.
{es_version}/mapping-source-field.html[`ctx['_source']`] (`Map`)::
{ref}/mapping-source-field.html[`ctx['_source']`] (`Map`)::
Contains extracted JSON in a `Map` and `List` structure for the fields
existing in a stored document.

View File

@ -1,7 +1,7 @@
[[painless-filter-context]]
=== Filter context
Use a Painless script as a {es_version}/query-dsl-script-query.html[filter] in a
Use a Painless script as a {ref}/query-dsl-script-query.html[filter] in a
query to include and exclude documents.

View File

@ -1,7 +1,7 @@
[[painless-ingest-processor-context]]
=== Ingest processor context
Use a Painless script in an {es_version}/script-processor.html[ingest processor]
Use a Painless script in an {ref}/script-processor.html[ingest processor]
to modify documents upon insertion.
*Variables*
@ -9,10 +9,10 @@ to modify documents upon insertion.
`params` (`Map`, read-only)::
User-defined parameters passed in as part of the query.
{es_version}/mapping-index-field.html[`ctx['_index']`] (`String`)::
{ref}/mapping-index-field.html[`ctx['_index']`] (`String`)::
The name of the index.
{es_version}/mapping-type-field.html[`ctx['_type']`] (`String`)::
{ref}/mapping-type-field.html[`ctx['_type']`] (`String`)::
The type of document within an index.
`ctx` (`Map`)::
@ -21,10 +21,10 @@ to modify documents upon insertion.
*Side Effects*
{es_version}/mapping-index-field.html[`ctx['_index']`]::
{ref}/mapping-index-field.html[`ctx['_index']`]::
Modify this to change the destination index for the current document.
{es_version}/mapping-type-field.html[`ctx['_type']`]::
{ref}/mapping-type-field.html[`ctx['_type']`]::
Modify this to change the type for the current document.
`ctx` (`Map`, read-only)::

View File

@ -2,7 +2,7 @@
=== Metric aggregation combine context
Use a Painless script to
{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[combine]
{ref}/search-aggregations-metrics-scripted-metric-aggregation.html[combine]
values for use in a scripted metric aggregation. A combine script is run once
per shard following a <<painless-metric-agg-map-context, map script>> and is
optional as part of a full metric aggregation.

View File

@ -2,7 +2,7 @@
=== Metric aggregation initialization context
Use a Painless script to
{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[initialize]
{ref}/search-aggregations-metrics-scripted-metric-aggregation.html[initialize]
values for use in a scripted metric aggregation. An initialization script is
run prior to document collection once per shard and is optional as part of the
full metric aggregation.

View File

@ -2,7 +2,7 @@
=== Metric aggregation map context
Use a Painless script to
{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[map]
{ref}/search-aggregations-metrics-scripted-metric-aggregation.html[map]
values for use in a scripted metric aggregation. A map script is run once per
collected document following an optional
<<painless-metric-agg-init-context, initialization script>> and is required as

View File

@ -2,7 +2,7 @@
=== Metric aggregation reduce context
Use a Painless script to
{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[reduce]
{ref}/search-aggregations-metrics-scripted-metric-aggregation.html[reduce]
values to produce the result of a scripted metric aggregation. A reduce script
is run once on the coordinating node following a
<<painless-metric-agg-combine-context, combine script>> (or a

View File

@ -2,7 +2,7 @@
=== Minimum should match context
Use a Painless script to specify the
{es_version}/query-dsl-terms-set-query.html[minimum] number of terms that a
{ref}/query-dsl-terms-set-query.html[minimum] number of terms that a
specified field needs to match with for a document to be part of the query
results.

View File

@ -1,7 +1,7 @@
[[painless-reindex-context]]
=== Reindex context
Use a Painless script in a {es_version}/docs-reindex.html[reindex] operation to
Use a Painless script in a {ref}/docs-reindex.html[reindex] operation to
add, modify, or delete fields within each document in an original index as its
reindexed into a target index.
@ -13,22 +13,22 @@ reindexed into a target index.
`ctx['_op']` (`String`)::
The name of the operation.
{es_version}/mapping-routing-field.html[`ctx['_routing']`] (`String`)::
{ref}/mapping-routing-field.html[`ctx['_routing']`] (`String`)::
The value used to select a shard for document storage.
{es_version}/mapping-index-field.html[`ctx['_index']`] (`String`)::
{ref}/mapping-index-field.html[`ctx['_index']`] (`String`)::
The name of the index.
{es_version}/mapping-type-field.html[`ctx['_type']`] (`String`)::
{ref}/mapping-type-field.html[`ctx['_type']`] (`String`)::
The type of document within an index.
{es_version}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only)::
{ref}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only)::
The unique document id.
`ctx['_version']` (`int`)::
The current version of the document.
{es_version}/mapping-source-field.html[`ctx['_source']`] (`Map`)::
{ref}/mapping-source-field.html[`ctx['_source']`] (`Map`)::
Contains extracted JSON in a `Map` and `List` structure for the fields
existing in a stored document.
@ -39,22 +39,22 @@ reindexed into a target index.
specify no operation or `delete` to delete the current document from
the index.
{es_version}/mapping-routing-field.html[`ctx['_routing']`]::
{ref}/mapping-routing-field.html[`ctx['_routing']`]::
Modify this to change the routing value for the current document.
{es_version}/mapping-index-field.html[`ctx['_index']`]::
{ref}/mapping-index-field.html[`ctx['_index']`]::
Modify this to change the destination index for the current document.
{es_version}/mapping-type-field.html[`ctx['_type']`]::
{ref}/mapping-type-field.html[`ctx['_type']`]::
Modify this to change the type for the current document.
{es_version}/mapping-id-field.html[`ctx['_id']`]::
{ref}/mapping-id-field.html[`ctx['_id']`]::
Modify this to change the id for the current document.
`ctx['_version']` (`int`)::
Modify this to modify the version for the current document.
{es_version}/mapping-source-field.html[`ctx['_source']`]::
{ref}/mapping-source-field.html[`ctx['_source']`]::
Modify the values in the `Map/List` structure to add, modify, or delete
the fields of a document.

View File

@ -2,7 +2,7 @@
=== Score context
Use a Painless script in a
{es_version}/query-dsl-function-score-query.html[function score] to apply a new
{ref}/query-dsl-function-score-query.html[function score] to apply a new
score to documents returned from a query.
*Variables*

View File

@ -2,7 +2,7 @@
=== Similarity context
Use a Painless script to create a
{es_version}/index-modules-similarity.html[similarity] equation for scoring
{ref}/index-modules-similarity.html[similarity] equation for scoring
documents in a query.
*Variables*

View File

@ -2,7 +2,7 @@
=== Sort context
Use a Painless script to
{es_version}/search-request-sort.html[sort] the documents in a query.
{ref}/search-request-sort.html[sort] the documents in a query.
*Variables*

View File

@ -2,7 +2,7 @@
=== Update by query context
Use a Painless script in an
{es_version}/docs-update-by-query.html[update by query] operation to add,
{ref}/docs-update-by-query.html[update by query] operation to add,
modify, or delete fields within each of a set of documents collected as the
result of query.
@ -14,22 +14,22 @@ result of query.
`ctx['_op']` (`String`)::
The name of the operation.
{es_version}/mapping-routing-field.html[`ctx['_routing']`] (`String`, read-only)::
{ref}/mapping-routing-field.html[`ctx['_routing']`] (`String`, read-only)::
The value used to select a shard for document storage.
{es_version}/mapping-index-field.html[`ctx['_index']`] (`String`, read-only)::
{ref}/mapping-index-field.html[`ctx['_index']`] (`String`, read-only)::
The name of the index.
{es_version}/mapping-type-field.html[`ctx['_type']`] (`String`, read-only)::
{ref}/mapping-type-field.html[`ctx['_type']`] (`String`, read-only)::
The type of document within an index.
{es_version}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only)::
{ref}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only)::
The unique document id.
`ctx['_version']` (`int`, read-only)::
The current version of the document.
{es_version}/mapping-source-field.html[`ctx['_source']`] (`Map`)::
{ref}/mapping-source-field.html[`ctx['_source']`] (`Map`)::
Contains extracted JSON in a `Map` and `List` structure for the fields
existing in a stored document.
@ -40,7 +40,7 @@ result of query.
specify no operation or `delete` to delete the current document from
the index.
{es_version}/mapping-source-field.html[`ctx['_source']`]::
{ref}/mapping-source-field.html[`ctx['_source']`]::
Modify the values in the `Map/List` structure to add, modify, or delete
the fields of a document.

View File

@ -1,7 +1,7 @@
[[painless-update-context]]
=== Update context
Use a Painless script in an {es_version}/docs-update.html[update] operation to
Use a Painless script in an {ref}/docs-update.html[update] operation to
add, modify, or delete fields within a single document.
*Variables*
@ -12,16 +12,16 @@ add, modify, or delete fields within a single document.
`ctx['_op']` (`String`)::
The name of the operation.
{es_version}/mapping-routing-field.html[`ctx['_routing']`] (`String`, read-only)::
{ref}/mapping-routing-field.html[`ctx['_routing']`] (`String`, read-only)::
The value used to select a shard for document storage.
{es_version}/mapping-index-field.html[`ctx['_index']`] (`String`, read-only)::
{ref}/mapping-index-field.html[`ctx['_index']`] (`String`, read-only)::
The name of the index.
{es_version}/mapping-type-field.html[`ctx['_type']`] (`String`, read-only)::
{ref}/mapping-type-field.html[`ctx['_type']`] (`String`, read-only)::
The type of document within an index.
{es_version}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only)::
{ref}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only)::
The unique document id.
`ctx['_version']` (`int`, read-only)::
@ -30,7 +30,7 @@ add, modify, or delete fields within a single document.
`ctx['_now']` (`long`, read-only)::
The current timestamp in milliseconds.
{es_version}/mapping-source-field.html[`ctx['_source']`] (`Map`)::
{ref}/mapping-source-field.html[`ctx['_source']`] (`Map`)::
Contains extracted JSON in a `Map` and `List` structure for the fields
existing in a stored document.
@ -41,7 +41,7 @@ add, modify, or delete fields within a single document.
specify no operation or `delete` to delete the current document from
the index.
{es_version}/mapping-source-field.html[`ctx['_source']`]::
{ref}/mapping-source-field.html[`ctx['_source']`]::
Modify the values in the `Map/List` structure to add, modify, or delete
the fields of a document.

View File

@ -1,7 +1,7 @@
[[painless-watcher-condition-context]]
=== Watcher condition context
Use a Painless script as a {xp_version}/condition-script.html[watcher condition]
Use a Painless script as a {xpack-ref}/condition-script.html[watcher condition]
to test if a response is necessary.
*Variables*
@ -26,7 +26,7 @@ to test if a response is necessary.
`ctx['payload']` (`Map`, read-only)::
The accessible watch data based upon the
{xp_version}/input.html[watch input].
{xpack-ref}/input.html[watch input].
*Return*

View File

@ -1,7 +1,7 @@
[[painless-watcher-transform-context]]
=== Watcher transform context
Use a Painless script to {xp_version}/transform-script.html[transform] watch
Use a Painless script to {xpack-ref}/transform-script.html[transform] watch
data into a new payload for use in a response to a condition.
*Variables*
@ -26,7 +26,7 @@ data into a new payload for use in a response to a condition.
`ctx['payload']` (`Map`, read-only)::
The accessible watch data based upon the
{xp_version}/input.html[watch input].
{xpack-ref}/input.html[watch input].
*Return*

View File

@ -2,7 +2,7 @@
=== Weight context
Use a Painless script to create a
{es_version}/index-modules-similarity.html[weight] for use in a
{ref}/index-modules-similarity.html[weight] for use in a
<<painless-similarity-context, similarity script>>. Weight is used to prevent
recalculation of constants that remain the same across documents.

View File

@ -27,11 +27,17 @@ For more information about creating and updating the {es} keystore, see
`xpack.security.enabled`::
Set to `true` to enable {security} on the node. +
+
--
If set to `false`, which is the default value for basic and trial licenses,
{security} is disabled. It also affects all {kib} instances that connect to this
{es} instance; you do not need to disable {security} in those `kibana.yml` files.
For more information about disabling {security} in specific {kib} instances, see {kibana-ref}/security-settings-kb.html[{kib} security settings].
TIP: If you have gold or higher licenses, the default value is `true`; we
recommend that you explicitly add this setting to avoid confusion.
--
`xpack.security.hide_settings`::
A comma-separated list of settings that are omitted from the results of the
<<cluster-nodes-info,cluster nodes info API>>. You can use wildcards to include

View File

@ -67,6 +67,7 @@ public class ConvertProcessorTests extends ESTestCase {
assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(10));
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32370")
public void testConvertIntHexError() {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String value = "0x" + randomAlphaOfLengthBetween(1, 10);

View File

@ -103,10 +103,10 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
ScriptContext<?> context = entry.getKey();
if (context.instanceClazz.equals(SearchScript.class) || context.instanceClazz.equals(ExecutableScript.class)) {
contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class,
new PainlessLookupBuilder(entry.getValue()).build()));
PainlessLookupBuilder.buildFromWhitelists(entry.getValue())));
} else {
contextsToCompilers.put(context, new Compiler(context.instanceClazz,
new PainlessLookupBuilder(entry.getValue()).build()));
PainlessLookupBuilder.buildFromWhitelists(entry.getValue())));
}
}

View File

@ -126,14 +126,55 @@ public class PainlessLookupBuilder {
private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$");
private static final Pattern FIELD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$");
private final List<Whitelist> whitelists;
public static PainlessLookup buildFromWhitelists(List<Whitelist> whitelists) {
PainlessLookupBuilder painlessLookupBuilder = new PainlessLookupBuilder();
String origin = "internal error";
try {
for (Whitelist whitelist : whitelists) {
for (WhitelistClass whitelistClass : whitelist.whitelistStructs) {
origin = whitelistClass.origin;
painlessLookupBuilder.addPainlessClass(
whitelist.javaClassLoader, whitelistClass.javaClassName, whitelistClass.onlyFQNJavaClassName == false);
}
}
for (Whitelist whitelist : whitelists) {
for (WhitelistClass whitelistClass : whitelist.whitelistStructs) {
String targetCanonicalClassName = whitelistClass.javaClassName.replace('$', '.');
for (WhitelistConstructor whitelistConstructor : whitelistClass.whitelistConstructors) {
origin = whitelistConstructor.origin;
painlessLookupBuilder.addPainlessConstructor(
targetCanonicalClassName, whitelistConstructor.painlessParameterTypeNames);
}
for (WhitelistMethod whitelistMethod : whitelistClass.whitelistMethods) {
origin = whitelistMethod.origin;
painlessLookupBuilder.addPainlessMethod(
whitelist.javaClassLoader, targetCanonicalClassName, whitelistMethod.javaAugmentedClassName,
whitelistMethod.javaMethodName, whitelistMethod.painlessReturnTypeName,
whitelistMethod.painlessParameterTypeNames);
}
for (WhitelistField whitelistField : whitelistClass.whitelistFields) {
origin = whitelistField.origin;
painlessLookupBuilder.addPainlessField(
targetCanonicalClassName, whitelistField.javaFieldName, whitelistField.painlessFieldTypeName);
}
}
}
} catch (Exception exception) {
throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception);
}
return painlessLookupBuilder.build();
}
private final Map<String, Class<?>> canonicalClassNamesToClasses;
private final Map<Class<?>, PainlessClassBuilder> classesToPainlessClassBuilders;
public PainlessLookupBuilder(List<Whitelist> whitelists) {
this.whitelists = whitelists;
public PainlessLookupBuilder() {
canonicalClassNamesToClasses = new HashMap<>();
classesToPainlessClassBuilders = new HashMap<>();
@ -666,60 +707,6 @@ public class PainlessLookupBuilder {
}
public PainlessLookup build() {
String origin = "internal error";
try {
// first iteration collects all the Painless type names that
// are used for validation during the second iteration
for (Whitelist whitelist : whitelists) {
for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
PainlessClassBuilder painlessStruct =
classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(painlessTypeName));
if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) {
throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " +
"[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]");
}
origin = whitelistStruct.origin;
addPainlessClass(
whitelist.javaClassLoader, whitelistStruct.javaClassName, whitelistStruct.onlyFQNJavaClassName == false);
painlessStruct = classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(painlessTypeName));
classesToPainlessClassBuilders.put(painlessStruct.clazz, painlessStruct);
}
}
// second iteration adds all the constructors, methods, and fields that will
// be available in Painless along with validating they exist and all their types have
// been white-listed during the first iteration
for (Whitelist whitelist : whitelists) {
for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) {
origin = whitelistConstructor.origin;
addPainlessConstructor(painlessTypeName, whitelistConstructor.painlessParameterTypeNames);
}
for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) {
origin = whitelistMethod.origin;
addPainlessMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod.javaAugmentedClassName,
whitelistMethod.javaMethodName, whitelistMethod.painlessReturnTypeName,
whitelistMethod.painlessParameterTypeNames);
}
for (WhitelistField whitelistField : whitelistStruct.whitelistFields) {
origin = whitelistField.origin;
addPainlessField(painlessTypeName, whitelistField.javaFieldName, whitelistField.painlessFieldTypeName);
}
}
}
} catch (Exception exception) {
throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception);
}
copyPainlessClassMembers();
cacheRuntimeHandles();
setFunctionalInterfaceMethods();

View File

@ -20,7 +20,6 @@
package org.elasticsearch.painless;
import org.elasticsearch.painless.lookup.PainlessCast;
import org.elasticsearch.test.ESTestCase;
public class AnalyzerCasterTests extends ESTestCase {

View File

@ -19,14 +19,14 @@
package org.elasticsearch.painless;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.spi.Whitelist;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsString;
@ -38,7 +38,7 @@ import static org.hamcrest.Matchers.startsWith;
*/
public class BaseClassTests extends ScriptTestCase {
private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS);
public abstract static class Gets {

View File

@ -2,6 +2,9 @@ package org.elasticsearch.painless;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/*
* Licensed to Elasticsearch under one or more contributor
@ -22,10 +25,6 @@ import java.util.Collections;
* under the License.
*/
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class BasicStatementTests extends ScriptTestCase {
public void testIfStatement() {

View File

@ -37,7 +37,7 @@ import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;
public class DebugTests extends ScriptTestCase {
private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS);
public void testExplain() {
// Debug.explain can explain an object

View File

@ -40,7 +40,7 @@ final class Debugger {
PrintWriter outputWriter = new PrintWriter(output);
Textifier textifier = new Textifier();
try {
new Compiler(iface, new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build())
new Compiler(iface, PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS))
.compile("<debugging>", source, settings, textifier);
} catch (RuntimeException e) {
textifier.print(outputWriter);

View File

@ -19,6 +19,11 @@
package org.elasticsearch.painless;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.test.ESTestCase;
import java.lang.invoke.CallSite;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
@ -27,13 +32,8 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.test.ESTestCase;
public class DefBootstrapTests extends ESTestCase {
private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS);
/** calls toString() on integers, twice */
public void testOneType() throws Throwable {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.painless;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;

View File

@ -52,7 +52,7 @@ import static java.util.stream.Collectors.toList;
*/
public class PainlessDocGenerator {
private static final PainlessLookup PAINLESS_LOOKUP = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
private static final PainlessLookup PAINLESS_LOOKUP = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS);
private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class);
private static final Comparator<PainlessField> FIELD_NAME = comparing(f -> f.name);
private static final Comparator<PainlessMethod> METHOD_NAME = comparing(m -> m.name);

View File

@ -92,7 +92,7 @@ public abstract class ScriptTestCase extends ESTestCase {
public Object exec(String script, Map<String, Object> vars, Map<String,String> compileParams, Scorer scorer, boolean picky) {
// test for ambiguity errors before running the actual script if picky is true
if (picky) {
PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS);
ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, GenericElasticsearchScript.class);
CompilerSettings pickySettings = new CompilerSettings();
pickySettings.setPicky(true);

View File

@ -22,8 +22,8 @@ package org.elasticsearch.painless;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.script.ScriptedMetricAggContexts;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptedMetricAggContexts;
import java.util.ArrayList;
import java.util.Collections;

View File

@ -20,7 +20,6 @@
package org.elasticsearch.painless;
import junit.framework.AssertionFailedError;
import org.apache.lucene.util.Constants;
import org.elasticsearch.script.ScriptException;

View File

@ -20,21 +20,21 @@
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessCast;
import org.elasticsearch.painless.lookup.PainlessField;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.lookup.PainlessClass;
import org.elasticsearch.painless.FeatureTest;
import org.elasticsearch.painless.GenericElasticsearchScript;
import org.elasticsearch.painless.Locals.Variable;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.Operation;
import org.elasticsearch.painless.ScriptClassInfo;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.antlr.Walker;
import org.elasticsearch.painless.lookup.PainlessCast;
import org.elasticsearch.painless.lookup.PainlessClass;
import org.elasticsearch.painless.lookup.PainlessField;
import org.elasticsearch.painless.lookup.PainlessLookup;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
import org.elasticsearch.painless.lookup.PainlessMethod;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
@ -49,7 +49,7 @@ import static org.elasticsearch.painless.node.SSource.MainMethodReserved;
* Tests {@link Object#toString} implementations on all extensions of {@link ANode}.
*/
public class NodeToStringTests extends ESTestCase {
private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS);
public void testEAssignment() {
assertToString(

View File

@ -70,7 +70,6 @@ import java.util.Set;
* }
* </pre>
*/
@SuppressWarnings("unchecked")
public class RatedRequest implements Writeable, ToXContentObject {
private final String id;
private final List<String> summaryFields;
@ -250,6 +249,7 @@ public class RatedRequest implements Writeable, ToXContentObject {
private static final ParseField FIELDS_FIELD = new ParseField("summary_fields");
private static final ParseField TEMPLATE_ID_FIELD = new ParseField("template_id");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<RatedRequest, Void> PARSER = new ConstructingObjectParser<>("request",
a -> new RatedRequest((String) a[0], (List<RatedDocument>) a[1], (SearchSourceBuilder) a[2], (Map<String, Object>) a[3],
(String) a[4]));

View File

@ -21,6 +21,7 @@ package org.elasticsearch.http.nio;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelPromise;
import io.netty.channel.embedded.EmbeddedChannel;
import io.netty.handler.codec.http.DefaultFullHttpRequest;
import io.netty.handler.codec.http.FullHttpResponse;
@ -116,9 +117,9 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
ByteBuf buf = requestEncoder.encode(httpRequest);
int slicePoint = randomInt(buf.writerIndex() - 1);
ByteBuf slicedBuf = buf.retainedSlice(0, slicePoint);
ByteBuf slicedBuf2 = buf.retainedSlice(slicePoint, buf.writerIndex());
try {
handler.consumeReads(toChannelBuffer(slicedBuf));
verify(transport, times(0)).incomingRequest(any(HttpRequest.class), any(NioHttpChannel.class));
@ -131,6 +132,12 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
HttpRequest nioHttpRequest = requestCaptor.getValue();
assertEquals(HttpRequest.HttpVersion.HTTP_1_1, nioHttpRequest.protocolVersion());
assertEquals(RestRequest.Method.GET, nioHttpRequest.method());
} finally {
handler.close();
buf.release();
slicedBuf.release();
slicedBuf2.release();
}
}
public void testDecodeHttpRequestError() throws IOException {
@ -138,6 +145,7 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
io.netty.handler.codec.http.HttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, uri);
ByteBuf buf = requestEncoder.encode(httpRequest);
try {
buf.setByte(0, ' ');
buf.setByte(1, ' ');
buf.setByte(2, ' ');
@ -148,6 +156,9 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
verify(transport).incomingRequestError(any(HttpRequest.class), any(NioHttpChannel.class), exceptionCaptor.capture());
assertTrue(exceptionCaptor.getValue() instanceof IllegalArgumentException);
} finally {
buf.release();
}
}
public void testDecodeHttpRequestContentLengthToLongGeneratesOutboundMessage() throws IOException {
@ -157,9 +168,11 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
HttpUtil.setKeepAlive(httpRequest, false);
ByteBuf buf = requestEncoder.encode(httpRequest);
try {
handler.consumeReads(toChannelBuffer(buf));
} finally {
buf.release();
}
verify(transport, times(0)).incomingRequestError(any(), any(), any());
verify(transport, times(0)).incomingRequest(any(), any());
@ -168,6 +181,7 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
FlushOperation flushOperation = flushOperations.get(0);
FullHttpResponse response = responseDecoder.decode(Unpooled.wrappedBuffer(flushOperation.getBuffersToWrite()));
try {
assertEquals(HttpVersion.HTTP_1_1, response.protocolVersion());
assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, response.status());
@ -175,6 +189,9 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
// Since we have keep-alive set to false, we should close the channel after the response has been
// flushed
verify(nioHttpChannel).close();
} finally {
response.release();
}
}
@SuppressWarnings("unchecked")
@ -189,11 +206,15 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
SocketChannelContext context = mock(SocketChannelContext.class);
HttpWriteOperation writeOperation = new HttpWriteOperation(context, httpResponse, mock(BiConsumer.class));
List<FlushOperation> flushOperations = handler.writeToBytes(writeOperation);
FullHttpResponse response = responseDecoder.decode(Unpooled.wrappedBuffer(flushOperations.get(0).getBuffersToWrite()));
FlushOperation operation = flushOperations.get(0);
FullHttpResponse response = responseDecoder.decode(Unpooled.wrappedBuffer(operation.getBuffersToWrite()));
((ChannelPromise) operation.getListener()).setSuccess();
try {
assertEquals(HttpResponseStatus.OK, response.status());
assertEquals(HttpVersion.HTTP_1_1, response.protocolVersion());
} finally {
response.release();
}
}
public void testCorsEnabledWithoutAllowOrigins() throws IOException {
@ -201,9 +222,13 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
Settings settings = Settings.builder()
.put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true)
.build();
io.netty.handler.codec.http.HttpResponse response = executeCorsRequest(settings, "remote-host", "request-host");
FullHttpResponse response = executeCorsRequest(settings, "remote-host", "request-host");
try {
// inspect response and validate
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), nullValue());
} finally {
response.release();
}
}
public void testCorsEnabledWithAllowOrigins() throws IOException {
@ -213,11 +238,15 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
.put(SETTING_CORS_ENABLED.getKey(), true)
.put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue)
.build();
io.netty.handler.codec.http.HttpResponse response = executeCorsRequest(settings, originValue, "request-host");
FullHttpResponse response = executeCorsRequest(settings, originValue, "request-host");
try {
// inspect response and validate
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
assertThat(allowedOrigins, is(originValue));
} finally {
response.release();
}
}
public void testCorsAllowOriginWithSameHost() throws IOException {
@ -228,29 +257,44 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
.put(SETTING_CORS_ENABLED.getKey(), true)
.build();
FullHttpResponse response = executeCorsRequest(settings, originValue, host);
String allowedOrigins;
try {
// inspect response and validate
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
assertThat(allowedOrigins, is(originValue));
originValue = "http://" + originValue;
response = executeCorsRequest(settings, originValue, host);
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
assertThat(allowedOrigins, is(originValue));
} finally {
response.release();
}
originValue = "http://" + originValue;
response = executeCorsRequest(settings, originValue, host);
try {
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
assertThat(allowedOrigins, is(originValue));
} finally {
response.release();
}
originValue = originValue + ":5555";
host = host + ":5555";
response = executeCorsRequest(settings, originValue, host);
try {
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
assertThat(allowedOrigins, is(originValue));
} finally {
response.release();
}
originValue = originValue.replace("http", "https");
response = executeCorsRequest(settings, originValue, host);
try {
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
assertThat(allowedOrigins, is(originValue));
} finally {
response.release();
}
}
public void testThatStringLiteralWorksOnMatch() throws IOException {
@ -261,12 +305,16 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
.put(SETTING_CORS_ALLOW_METHODS.getKey(), "get, options, post")
.put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true)
.build();
io.netty.handler.codec.http.HttpResponse response = executeCorsRequest(settings, originValue, "request-host");
FullHttpResponse response = executeCorsRequest(settings, originValue, "request-host");
try {
// inspect response and validate
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
assertThat(allowedOrigins, is(originValue));
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS), equalTo("true"));
} finally {
response.release();
}
}
public void testThatAnyOriginWorks() throws IOException {
@ -275,12 +323,16 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
.put(SETTING_CORS_ENABLED.getKey(), true)
.put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue)
.build();
io.netty.handler.codec.http.HttpResponse response = executeCorsRequest(settings, originValue, "request-host");
FullHttpResponse response = executeCorsRequest(settings, originValue, "request-host");
try {
// inspect response and validate
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN), notNullValue());
String allowedOrigins = response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN);
assertThat(allowedOrigins, is(originValue));
assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS), nullValue());
} finally {
response.release();
}
}
private FullHttpResponse executeCorsRequest(final Settings settings, final String originValue, final String host) throws IOException {
@ -300,8 +352,9 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
SocketChannelContext context = mock(SocketChannelContext.class);
List<FlushOperation> flushOperations = handler.writeToBytes(handler.createWriteOperation(context, response, (v, e) -> {}));
handler.close();
FlushOperation flushOperation = flushOperations.get(0);
((ChannelPromise) flushOperation.getListener()).setSuccess();
return responseDecoder.decode(Unpooled.wrappedBuffer(flushOperation.getBuffersToWrite()));
}
@ -314,8 +367,11 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
io.netty.handler.codec.http.HttpRequest request = new DefaultFullHttpRequest(version, method, uri);
ByteBuf buf = requestEncoder.encode(request);
try {
handler.consumeReads(toChannelBuffer(buf));
} finally {
buf.release();
}
ArgumentCaptor<NioHttpRequest> requestCaptor = ArgumentCaptor.forClass(NioHttpRequest.class);
verify(transport, atLeastOnce()).incomingRequest(requestCaptor.capture(), any(HttpChannel.class));

View File

@ -0,0 +1,120 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index;
import org.apache.lucene.index.FilterMergePolicy;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.TieredMergePolicy;
import java.io.IOException;
import java.util.Map;
/**
* Wrapper around {@link TieredMergePolicy} which doesn't respect
* {@link TieredMergePolicy#setMaxMergedSegmentMB(double)} on forced merges.
* See https://issues.apache.org/jira/browse/LUCENE-7976.
*/
final class EsTieredMergePolicy extends FilterMergePolicy {
final TieredMergePolicy regularMergePolicy;
final TieredMergePolicy forcedMergePolicy;
EsTieredMergePolicy() {
super(new TieredMergePolicy());
regularMergePolicy = (TieredMergePolicy) in;
forcedMergePolicy = new TieredMergePolicy();
forcedMergePolicy.setMaxMergedSegmentMB(Double.POSITIVE_INFINITY); // unlimited
}
@Override
public MergeSpecification findForcedMerges(SegmentInfos infos, int maxSegmentCount,
Map<SegmentCommitInfo, Boolean> segmentsToMerge, MergeContext mergeContext) throws IOException {
return forcedMergePolicy.findForcedMerges(infos, maxSegmentCount, segmentsToMerge, mergeContext);
}
@Override
public MergeSpecification findForcedDeletesMerges(SegmentInfos infos, MergeContext mergeContext) throws IOException {
return forcedMergePolicy.findForcedDeletesMerges(infos, mergeContext);
}
public void setForceMergeDeletesPctAllowed(double forceMergeDeletesPctAllowed) {
regularMergePolicy.setForceMergeDeletesPctAllowed(forceMergeDeletesPctAllowed);
forcedMergePolicy.setForceMergeDeletesPctAllowed(forceMergeDeletesPctAllowed);
}
public double getForceMergeDeletesPctAllowed() {
return forcedMergePolicy.getForceMergeDeletesPctAllowed();
}
public void setFloorSegmentMB(double mbFrac) {
regularMergePolicy.setFloorSegmentMB(mbFrac);
forcedMergePolicy.setFloorSegmentMB(mbFrac);
}
public double getFloorSegmentMB() {
return regularMergePolicy.getFloorSegmentMB();
}
public void setMaxMergeAtOnce(int maxMergeAtOnce) {
regularMergePolicy.setMaxMergeAtOnce(maxMergeAtOnce);
forcedMergePolicy.setMaxMergeAtOnce(maxMergeAtOnce);
}
public int getMaxMergeAtOnce() {
return regularMergePolicy.getMaxMergeAtOnce();
}
public void setMaxMergeAtOnceExplicit(int maxMergeAtOnceExplicit) {
regularMergePolicy.setMaxMergeAtOnceExplicit(maxMergeAtOnceExplicit);
forcedMergePolicy.setMaxMergeAtOnceExplicit(maxMergeAtOnceExplicit);
}
public int getMaxMergeAtOnceExplicit() {
return forcedMergePolicy.getMaxMergeAtOnceExplicit();
}
// only setter that must NOT delegate to the forced merge policy
public void setMaxMergedSegmentMB(double mbFrac) {
regularMergePolicy.setMaxMergedSegmentMB(mbFrac);
}
public double getMaxMergedSegmentMB() {
return regularMergePolicy.getMaxMergedSegmentMB();
}
public void setSegmentsPerTier(double segmentsPerTier) {
regularMergePolicy.setSegmentsPerTier(segmentsPerTier);
forcedMergePolicy.setSegmentsPerTier(segmentsPerTier);
}
public double getSegmentsPerTier() {
return regularMergePolicy.getSegmentsPerTier();
}
public void setReclaimDeletesWeight(double reclaimDeletesWeight) {
regularMergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight);
forcedMergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight);
}
public double getReclaimDeletesWeight() {
return regularMergePolicy.getReclaimDeletesWeight();
}
}

View File

@ -115,7 +115,7 @@ import org.elasticsearch.common.unit.ByteSizeValue;
*/
public final class MergePolicyConfig {
private final TieredMergePolicy mergePolicy = new TieredMergePolicy();
private final EsTieredMergePolicy mergePolicy = new EsTieredMergePolicy();
private final Logger logger;
private final boolean mergesEnabled;

View File

@ -0,0 +1,80 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index;
import org.apache.lucene.index.TieredMergePolicy;
import org.elasticsearch.test.ESTestCase;
public class EsTieredMergePolicyTests extends ESTestCase {
public void testDefaults() {
EsTieredMergePolicy policy = new EsTieredMergePolicy();
assertEquals(
new TieredMergePolicy().getMaxMergedSegmentMB(),
policy.regularMergePolicy.getMaxMergedSegmentMB(), 0d);
// TODO: fix when incorporating https://issues.apache.org/jira/browse/LUCENE-8398, the first divisor must be a double
assertEquals(Long.MAX_VALUE / 1024 / 1024.0, policy.forcedMergePolicy.getMaxMergedSegmentMB(), 0d);
}
public void testSetMaxMergedSegmentMB() {
EsTieredMergePolicy policy = new EsTieredMergePolicy();
policy.setMaxMergedSegmentMB(10 * 1024);
assertEquals(10 * 1024, policy.regularMergePolicy.getMaxMergedSegmentMB(), 0d);
// TODO: fix when incorporating https://issues.apache.org/jira/browse/LUCENE-8398, the first divisor must be a double
assertEquals(Long.MAX_VALUE / 1024 / 1024.0, policy.forcedMergePolicy.getMaxMergedSegmentMB(), 0d);
}
public void testSetForceMergeDeletesPctAllowed() {
EsTieredMergePolicy policy = new EsTieredMergePolicy();
policy.setForceMergeDeletesPctAllowed(42);
assertEquals(42, policy.forcedMergePolicy.getForceMergeDeletesPctAllowed(), 0);
}
public void testSetFloorSegmentMB() {
EsTieredMergePolicy policy = new EsTieredMergePolicy();
policy.setFloorSegmentMB(42);
assertEquals(42, policy.regularMergePolicy.getFloorSegmentMB(), 0);
assertEquals(42, policy.forcedMergePolicy.getFloorSegmentMB(), 0);
}
public void testSetMaxMergeAtOnce() {
EsTieredMergePolicy policy = new EsTieredMergePolicy();
policy.setMaxMergeAtOnce(42);
assertEquals(42, policy.regularMergePolicy.getMaxMergeAtOnce());
}
public void testSetMaxMergeAtOnceExplicit() {
EsTieredMergePolicy policy = new EsTieredMergePolicy();
policy.setMaxMergeAtOnceExplicit(42);
assertEquals(42, policy.forcedMergePolicy.getMaxMergeAtOnceExplicit());
}
public void testSetSegmentsPerTier() {
EsTieredMergePolicy policy = new EsTieredMergePolicy();
policy.setSegmentsPerTier(42);
assertEquals(42, policy.regularMergePolicy.getSegmentsPerTier(), 0);
}
public void testSetReclaimDeletesWeight() {
EsTieredMergePolicy policy = new EsTieredMergePolicy();
policy.setReclaimDeletesWeight(42);
assertEquals(42, policy.regularMergePolicy.getReclaimDeletesWeight(), 0);
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.TieredMergePolicy;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
@ -76,43 +75,43 @@ public class MergePolicySettingsTests extends ESTestCase {
public void testTieredMergePolicySettingsUpdate() throws IOException {
IndexSettings indexSettings = indexSettings(Settings.EMPTY);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d);
indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING.getKey(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d).build()));
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d, 0.0d);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d, 0.0d);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.getMbFrac(), 0);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.getMbFrac(), 0);
indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING.getKey(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.getMb() + 1, ByteSizeUnit.MB)).build()));
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.getMb() + 1, ByteSizeUnit.MB).getMbFrac(), 0.001);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.getMb() + 1, ByteSizeUnit.MB).getMbFrac(), 0.001);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE);
indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE - 1).build()));
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE - 1);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE - 1);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT);
indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING.getKey(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT - 1).build()));
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT-1);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT-1);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getMbFrac(), 0.0001);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getMbFrac(), 0.0001);
indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING.getKey(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getBytes() + 1)).build()));
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getBytes() + 1).getMbFrac(), 0.0001);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getBytes() + 1).getMbFrac(), 0.0001);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0);
indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING.getKey(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1).build()));
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1, 0);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1, 0);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0);
indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1).build()));
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1, 0);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1, 0);
indexSettings.updateIndexMetaData(newIndexMeta("index", EMPTY_SETTINGS)); // see if defaults are restored
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.getMb(), ByteSizeUnit.MB).getMbFrac(), 0.00);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getBytes() + 1).getMbFrac(), 0.0001);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0);
assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.getMb(), ByteSizeUnit.MB).getMbFrac(), 0.00);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.getBytes() + 1).getMbFrac(), 0.0001);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0);
assertEquals(((EsTieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0);
}
public Settings build(String value) {

View File

@ -169,9 +169,9 @@ aggregation and the Slack action:
more information, see
<<slack-dynamic-attachment, Using Attachments to Format Slack Messages>>.
| `proxy.host` | no | - | The proxy host to use (only in combination with `proxy.port`)
| `proxy.host` | no | The proxy host to use (only in combination with `proxy.port`)
| `proxy.port` | no | - | The proxy port to use (only in combination with `proxy.host`)
| `proxy.port` | no | The proxy port to use (only in combination with `proxy.host`)
|======
[[configuring-slack]]

View File

@ -16,7 +16,7 @@ public class FIPS140PasswordHashingAlgorithmBootstrapCheck implements BootstrapC
private final boolean fipsModeEnabled;
FIPS140PasswordHashingAlgorithmBootstrapCheck(Settings settings) {
FIPS140PasswordHashingAlgorithmBootstrapCheck(final Settings settings) {
this.fipsModeEnabled = Security.FIPS_MODE_ENABLED.get(settings);
}
@ -27,17 +27,15 @@ public class FIPS140PasswordHashingAlgorithmBootstrapCheck implements BootstrapC
* @return the result of the bootstrap check
*/
@Override
public BootstrapCheckResult check(BootstrapContext context) {
public BootstrapCheckResult check(final BootstrapContext context) {
if (fipsModeEnabled) {
final String selectedAlgorithm = XPackSettings.PASSWORD_HASHING_ALGORITHM.get(context.settings);
if (selectedAlgorithm.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) {
return BootstrapCheckResult.failure("Only PBKDF2 is allowed for password hashing in a FIPS-140 JVM. Please set the " +
"appropriate value for [ " + XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey() + " ] setting.");
}
}
return BootstrapCheckResult.success();
}
@Override
public boolean alwaysEnforce() {
return fipsModeEnabled;
}
}

View File

@ -3,32 +3,60 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.elasticsearch.bootstrap.BootstrapCheck;
import org.elasticsearch.bootstrap.BootstrapContext;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.XPackSettings;
import java.util.Arrays;
import static org.hamcrest.Matchers.equalTo;
public class FIPS140PasswordHashingAlgorithmBootstrapCheckTests extends ESTestCase {
public void testPBKDF2AlgorithmIsAllowed() {
Settings settings = Settings.builder().put("xpack.security.fips_mode.enabled", "true").build();
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2_10000").build();
assertFalse(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure());
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2").build();
assertFalse(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure());
{
final Settings settings = Settings.builder()
.put(Security.FIPS_MODE_ENABLED.getKey(), true)
.put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2_10000")
.build();
final BootstrapCheck.BootstrapCheckResult result =
new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null));
assertFalse(result.isFailure());
}
public void testBCRYPTAlgorithmIsNotAllowed() {
Settings settings = Settings.builder().put("xpack.security.fips_mode.enabled", "true").build();
assertTrue(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure());
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "BCRYPT").build();
assertTrue(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure());
{
final Settings settings = Settings.builder()
.put(Security.FIPS_MODE_ENABLED.getKey(), true)
.put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2")
.build();
final BootstrapCheck.BootstrapCheckResult result =
new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null));
assertFalse(result.isFailure());
}
}
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "BCRYPT11").build();
assertTrue(new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null)).isFailure());
public void testBCRYPTAlgorithmDependsOnFipsMode() {
for (final Boolean fipsModeEnabled : Arrays.asList(true, false)) {
for (final String passwordHashingAlgorithm : Arrays.asList(null, "BCRYPT", "BCRYPT11")) {
runBCRYPTTest(fipsModeEnabled, passwordHashingAlgorithm);
}
}
}
private void runBCRYPTTest(final boolean fipsModeEnabled, final String passwordHashingAlgorithm) {
final Settings.Builder builder = Settings.builder().put(Security.FIPS_MODE_ENABLED.getKey(), fipsModeEnabled);
if (passwordHashingAlgorithm != null) {
builder.put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), passwordHashingAlgorithm);
}
final Settings settings = builder.build();
final BootstrapCheck.BootstrapCheckResult result =
new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings).check(new BootstrapContext(settings, null));
assertThat(result.isFailure(), equalTo(fipsModeEnabled));
}
}