From c13df3b6c5020c185c232c590132151896d48d03 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 18 May 2016 09:18:10 +0200 Subject: [PATCH 01/36] Clear all caches after testing parent breaker With this commit we clear all caches after testing the parent circuit breaker. This is necessary as caches hold on to circuit breakers internally. Additionally, due to usage of CircuitBreaker#addWithoutBreaking() in caches, it's even possible to go above the limit. As a consequence, all subsequent requests fall victim to the limit. Hence, right after the parent circuit breaker tripped, we clear all caches to reduce these circuit breakers to 0 again. We also exclude the clear caches transport request from limit check in order to ensure it will succeed. As this is typically a very small and low-volume request, it is deemed ok to exclude it. Closes #18325 --- .../TransportClearIndicesCacheAction.java | 2 +- .../node/TransportBroadcastByNodeAction.java | 20 +++++++++++++++++-- .../breaker/CircuitBreakerServiceIT.java | 7 +++---- 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index 59cd95044cc..fe97302060d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -54,7 +54,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, ClearIndicesCacheAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - ClearIndicesCacheRequest::new, ThreadPool.Names.MANAGEMENT); + ClearIndicesCacheRequest::new, ThreadPool.Names.MANAGEMENT, false); this.indicesService = indicesService; } diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index 29863419a4e..3356d189143 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -84,6 +84,20 @@ public abstract class TransportBroadcastByNodeAction request, + String executor) { + this(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, request, + executor, true); + } + public TransportBroadcastByNodeAction( Settings settings, String actionName, @@ -93,7 +107,8 @@ public abstract class TransportBroadcastByNodeAction request, - String executor) { + String executor, + boolean canTripCircuitBreaker) { super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); this.clusterService = clusterService; @@ -101,7 +116,8 @@ public abstract class TransportBroadcastByNodeAction resetting breaker settings"); + // clear all caches, we could be very close (or even above) the limit and then we will not be able to reset the breaker settings + client().admin().indices().prepareClearCache().setFieldDataCache(true).setQueryCache(true).setRequestCache(true).get(); + Settings resetSettings = Settings.builder() .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getDefaultRaw(null)) @@ -214,7 +217,6 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { * Test that a breaker correctly redistributes to a different breaker, in * this case, the fielddata breaker borrows space from the request breaker */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/18325") public void testParentChecking() throws Exception { if (noopBreakerUsed()) { logger.info("--> noop breakers used, skipping test"); @@ -274,9 +276,6 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { cause.toString(), startsWith("CircuitBreakingException[[parent] Data too large")); assertThat("Exception: [" + cause.toString() + "] should contain a CircuitBreakingException", cause.toString(), endsWith(errMsg)); - } finally { - // reset before teardown as it requires properly set up breakers - reset(); } } From 947daf68d1efe1406f2a5e7b23834032cd1991fc Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 18 May 2016 11:54:41 +0200 Subject: [PATCH 02/36] Add CONSOLE to from/size docs Relates to #18160 --- docs/reference/search/request/from-size.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/reference/search/request/from-size.asciidoc b/docs/reference/search/request/from-size.asciidoc index 2e170dc2604..1c44a7ca8d2 100644 --- a/docs/reference/search/request/from-size.asciidoc +++ b/docs/reference/search/request/from-size.asciidoc @@ -12,6 +12,7 @@ defaults to `10`. [source,js] -------------------------------------------------- +GET /_search { "from" : 0, "size" : 10, "query" : { @@ -19,6 +20,8 @@ defaults to `10`. } } -------------------------------------------------- +// CONSOLE + Note that `from` + `size` can not be more than the `index.max_result_window` index setting which defaults to 10,000. See the <> or <> From f22f3c7df5e0c692e07a7841d0b43aee24a151ee Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 18 May 2016 13:15:19 +0200 Subject: [PATCH 03/36] Add CONSOLE to several trivial search request docs. Relates to #18160 Touches explain, fielddata-fields, fields, index-boost, min-score, named-queries-and-filters, query --- docs/reference/search/request/explain.asciidoc | 2 ++ .../search/request/fielddata-fields.asciidoc | 4 +++- docs/reference/search/request/fields.asciidoc | 4 ++++ .../search/request/index-boost.asciidoc | 2 ++ .../search/request/min-score.asciidoc | 2 ++ .../request/named-queries-and-filters.asciidoc | 18 +++++++++++------- docs/reference/search/request/query.asciidoc | 2 ++ 7 files changed, 26 insertions(+), 8 deletions(-) diff --git a/docs/reference/search/request/explain.asciidoc b/docs/reference/search/request/explain.asciidoc index 81dc110c263..9bcaecb4840 100644 --- a/docs/reference/search/request/explain.asciidoc +++ b/docs/reference/search/request/explain.asciidoc @@ -5,6 +5,7 @@ Enables explanation for each hit on how its score was computed. [source,js] -------------------------------------------------- +GET /_search { "explain": true, "query" : { @@ -12,3 +13,4 @@ Enables explanation for each hit on how its score was computed. } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/search/request/fielddata-fields.asciidoc b/docs/reference/search/request/fielddata-fields.asciidoc index aaaa606980e..f3a3508b144 100644 --- a/docs/reference/search/request/fielddata-fields.asciidoc +++ b/docs/reference/search/request/fielddata-fields.asciidoc @@ -6,13 +6,15 @@ example: [source,js] -------------------------------------------------- +GET /_search { "query" : { - ... + "match_all": {} }, "fielddata_fields" : ["test1", "test2"] } -------------------------------------------------- +// CONSOLE Field data fields can work on fields that are not stored. diff --git a/docs/reference/search/request/fields.asciidoc b/docs/reference/search/request/fields.asciidoc index e929928d427..3483d470ee2 100644 --- a/docs/reference/search/request/fields.asciidoc +++ b/docs/reference/search/request/fields.asciidoc @@ -11,6 +11,7 @@ by a search hit. [source,js] -------------------------------------------------- +GET /_search { "fields" : ["user", "postDate"], "query" : { @@ -18,6 +19,7 @@ by a search hit. } } -------------------------------------------------- +// CONSOLE `*` can be used to load all stored fields from the document. @@ -26,6 +28,7 @@ returned, for example: [source,js] -------------------------------------------------- +GET /_search { "fields" : [], "query" : { @@ -33,6 +36,7 @@ returned, for example: } } -------------------------------------------------- +// CONSOLE For backwards compatibility, if the fields parameter specifies fields which are not stored (`store` mapping set to diff --git a/docs/reference/search/request/index-boost.asciidoc b/docs/reference/search/request/index-boost.asciidoc index 29d1da3885c..bf766ce8a8c 100644 --- a/docs/reference/search/request/index-boost.asciidoc +++ b/docs/reference/search/request/index-boost.asciidoc @@ -8,6 +8,7 @@ graph where each user has an index). [source,js] -------------------------------------------------- +GET /_search { "indices_boost" : { "index1" : 1.4, @@ -15,3 +16,4 @@ graph where each user has an index). } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/search/request/min-score.asciidoc b/docs/reference/search/request/min-score.asciidoc index f5a212ebf8e..d9dbef99ddf 100644 --- a/docs/reference/search/request/min-score.asciidoc +++ b/docs/reference/search/request/min-score.asciidoc @@ -6,6 +6,7 @@ in `min_score`: [source,js] -------------------------------------------------- +GET /_search { "min_score": 0.5, "query" : { @@ -13,6 +14,7 @@ in `min_score`: } } -------------------------------------------------- +// CONSOLE Note, most times, this does not make much sense, but is provided for advanced use cases. diff --git a/docs/reference/search/request/named-queries-and-filters.asciidoc b/docs/reference/search/request/named-queries-and-filters.asciidoc index 96d7c1357a9..f8be0f1be69 100644 --- a/docs/reference/search/request/named-queries-and-filters.asciidoc +++ b/docs/reference/search/request/named-queries-and-filters.asciidoc @@ -5,21 +5,25 @@ Each filter and query can accept a `_name` in its top level definition. [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "should" : [ + "query": { + "bool" : { + "should" : [ {"match" : { "name.first" : {"query" : "shay", "_name" : "first"} }}, {"match" : { "name.last" : {"query" : "banon", "_name" : "last"} }} - ], - "filter" : { - "terms" : { - "name.last" : ["banon", "kimchy"], - "_name" : "test" + ], + "filter" : { + "terms" : { + "name.last" : ["banon", "kimchy"], + "_name" : "test" + } } } } } -------------------------------------------------- +// CONSOLE The search response will include for each hit the `matched_queries` it matched on. The tagging of queries and filters only make sense for the `bool` query. diff --git a/docs/reference/search/request/query.asciidoc b/docs/reference/search/request/query.asciidoc index e496320bd97..fa06d0d9bb4 100644 --- a/docs/reference/search/request/query.asciidoc +++ b/docs/reference/search/request/query.asciidoc @@ -6,9 +6,11 @@ query using the <>. [source,js] -------------------------------------------------- +GET /_search { "query" : { "term" : { "user" : "kimchy" } } } -------------------------------------------------- +// CONSOLE From 125b715e4508555b31bef7c8a56388ddf8804b0e Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 18 May 2016 13:36:19 +0200 Subject: [PATCH 04/36] Adds CONSOLE to count api --- docs/reference/search/count.asciidoc | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/docs/reference/search/count.asciidoc b/docs/reference/search/count.asciidoc index 9be219f5e74..859455e89b7 100644 --- a/docs/reference/search/count.asciidoc +++ b/docs/reference/search/count.asciidoc @@ -10,15 +10,21 @@ body. Here is an example: [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/twitter/tweet/_count?q=user:kimchy' +PUT /twitter/tweet/1?refresh +{ + "user": "kimchy" +} -$ curl -XGET 'http://localhost:9200/twitter/tweet/_count' -d ' +GET /twitter/tweet/_count?q=user:kimchy + +GET /twitter/tweet/_count { "query" : { "term" : { "user" : "kimchy" } } -}' +} -------------------------------------------------- +//CONSOLE NOTE: The query being sent in the body must be nested in a `query` key, same as the <> works @@ -37,6 +43,7 @@ tweets from the twitter index for a certain user. The result is: } } -------------------------------------------------- +// TESTRESPONSE The query is optional, and when not provided, it will use `match_all` to count all the docs. From a3425b4bf8eacb7cf7c340ce82c54d0596a9131b Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 18 May 2016 14:31:04 +0200 Subject: [PATCH 05/36] Add CONSOLE to post-filter --- .../search/request/post-filter.asciidoc | 45 +++++++++++++++---- 1 file changed, 37 insertions(+), 8 deletions(-) diff --git a/docs/reference/search/request/post-filter.asciidoc b/docs/reference/search/request/post-filter.asciidoc index 7bd95400312..493b4261c82 100644 --- a/docs/reference/search/request/post-filter.asciidoc +++ b/docs/reference/search/request/post-filter.asciidoc @@ -5,14 +5,43 @@ The `post_filter` is applied to the search `hits` at the very end of a search request, after aggregations have already been calculated. Its purpose is best explained by example: -Imagine that you are selling shirts, and the user has specified two filters: +Imagine that you are selling shirts that have the following properties: + +[source,js] +------------------------------------------------- +PUT /shirts +{ + "mappings": { + "item": { + "properties": { + "brand": { "type": "keyword"}, + "color": { "type": "keyword"}, + "model": { "type": "keyword"} + } + } + } +} + +PUT /shirts/item/1?refresh +{ + "brand": "gucci", + "color": "red", + "model": "slim" +} +------------------------------------------------ +// CONSOLE +// TESTSETUP + + +Imagine a user has specified two filters: + `color:red` and `brand:gucci`. You only want to show them red shirts made by Gucci in the search results. Normally you would do this with a <>: [source,js] -------------------------------------------------- -curl -XGET localhost:9200/shirts/_search -d ' +GET /shirts/_search { "query": { "bool": { @@ -23,8 +52,8 @@ curl -XGET localhost:9200/shirts/_search -d ' } } } -' -------------------------------------------------- +// CONSOLE However, you would also like to use _faceted navigation_ to display a list of other options that the user could click on. Perhaps you have a `model` field @@ -36,7 +65,7 @@ This can be done with a [source,js] -------------------------------------------------- -curl -XGET localhost:9200/shirts/_search -d ' +GET /shirts/_search { "query": { "bool": { @@ -52,8 +81,8 @@ curl -XGET localhost:9200/shirts/_search -d ' } } } -' -------------------------------------------------- +// CONSOLE <1> Returns the most popular models of red shirts by Gucci. But perhaps you would also like to tell the user how many Gucci shirts are @@ -67,12 +96,12 @@ the `post_filter`: [source,js] -------------------------------------------------- -curl -XGET localhost:9200/shirts/_search -d ' +GET /shirts/_search { "query": { "bool": { "filter": { - { "term": { "brand": "gucci" }} <1> + "term": { "brand": "gucci" } <1> } } }, @@ -95,8 +124,8 @@ curl -XGET localhost:9200/shirts/_search -d ' "term": { "color": "red" } } } -' -------------------------------------------------- +// CONSOLE <1> The main query now finds all shirts by Gucci, regardless of color. <2> The `colors` agg returns popular colors for shirts by Gucci. <3> The `color_red` agg limits the `models` sub-aggregation From 0032d4760eb8564c79630054a3a8cd16effda88c Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 18 May 2016 14:34:22 +0200 Subject: [PATCH 06/36] Add CONSOLE to preference docs --- docs/reference/search/request/preference.asciidoc | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/reference/search/request/preference.asciidoc b/docs/reference/search/request/preference.asciidoc index 0d07f29475e..3d6c6b40cb9 100644 --- a/docs/reference/search/request/preference.asciidoc +++ b/docs/reference/search/request/preference.asciidoc @@ -56,7 +56,7 @@ for the user: [source,js] ------------------------------------------------ -curl localhost:9200/_search?preference=xyzabc123 -d ' +GET /_search?preference=xyzabc123 { "query": { "match": { @@ -64,7 +64,6 @@ curl localhost:9200/_search?preference=xyzabc123 -d ' } } } -' ------------------------------------------------ - +// CONSOLE From a849cc97ea453d31051a0fe1d98886ecd463893c Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 18 May 2016 14:38:54 +0200 Subject: [PATCH 07/36] Add CONSOLE to script-fields docs --- docs/reference/search/request/script-fields.asciidoc | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/reference/search/request/script-fields.asciidoc b/docs/reference/search/request/script-fields.asciidoc index 596aba31d82..6e054f02e1c 100644 --- a/docs/reference/search/request/script-fields.asciidoc +++ b/docs/reference/search/request/script-fields.asciidoc @@ -6,9 +6,10 @@ evaluation>> (based on different fields) for each hit, for example: [source,js] -------------------------------------------------- +GET /_search { "query" : { - ... + "match_all": {} }, "script_fields" : { "test1" : { @@ -25,6 +26,8 @@ evaluation>> (based on different fields) for each hit, for example: } } -------------------------------------------------- +// CONSOLE + Script fields can work on fields that are not stored (`my_field_name` in the above case), and allow to return custom values to be returned (the @@ -36,9 +39,10 @@ type). Here is an example: [source,js] -------------------------------------------------- +GET /_search { "query" : { - ... + "match_all": {} }, "script_fields" : { "test1" : { @@ -47,6 +51,7 @@ type). Here is an example: } } -------------------------------------------------- +// CONSOLE Note the `_source` keyword here to navigate the json-like model. From 808ef6cec798fb186c8638dcddec3dc25f4e9067 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 18 May 2016 14:25:20 +0200 Subject: [PATCH 08/36] Fix parsing single `rescore` element in SearchSourceBuilder We are currently only parsing the array-syntax for the rescore part in SearchSourceBuilder ("rescore" : [ {...}, {...} ]) . We also need to support "rescore" : {...} Closes #18439 --- .../search/builder/SearchSourceBuilder.java | 3 ++ .../builder/SearchSourceBuilderTests.java | 52 +++++++++++++++++++ 2 files changed, 55 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 61f4acb81aa..429aa36e56f 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -1033,6 +1033,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ suggestBuilder = SuggestBuilder.fromXContent(context, suggesters); } else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) { sorts = new ArrayList<>(SortBuilder.fromXContent(context)); + } else if (context.getParseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) { + rescoreBuilders = new ArrayList<>(); + rescoreBuilders.add(RescoreBuilder.parseFromXContent(context)); } else if (context.getParseFieldMatcher().match(currentFieldName, EXT_FIELD)) { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser); ext = xContentBuilder.bytes(); diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 9f0113829ce..077d978a4ce 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -72,6 +72,7 @@ import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilderTests; import org.elasticsearch.search.rescore.QueryRescoreBuilderTests; +import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; @@ -561,6 +562,57 @@ public class SearchSourceBuilderTests extends ESTestCase { } } + /** + * test that we can parse the `rescore` element either as single object or as array + */ + public void testParseRescore() throws IOException { + { + String restContent = "{\n" + + " \"query\" : {\n" + + " \"match\": { \"content\": { \"query\": \"foo bar\" }}\n" + + " },\n" + + " \"rescore\": {" + + " \"window_size\": 50,\n" + + " \"query\": {\n" + + " \"rescore_query\" : {\n" + + " \"match\": { \"content\": { \"query\": \"baz\" } }\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; + try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), + aggParsers, suggesters); + assertEquals(1, searchSourceBuilder.rescores().size()); + assertEquals(new QueryRescorerBuilder(QueryBuilders.matchQuery("content", "baz")).windowSize(50), + searchSourceBuilder.rescores().get(0)); + } + } + + { + String restContent = "{\n" + + " \"query\" : {\n" + + " \"match\": { \"content\": { \"query\": \"foo bar\" }}\n" + + " },\n" + + " \"rescore\": [ {" + + " \"window_size\": 50,\n" + + " \"query\": {\n" + + " \"rescore_query\" : {\n" + + " \"match\": { \"content\": { \"query\": \"baz\" } }\n" + + " }\n" + + " }\n" + + " } ]\n" + + "}\n"; + try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), + aggParsers, suggesters); + assertEquals(1, searchSourceBuilder.rescores().size()); + assertEquals(new QueryRescorerBuilder(QueryBuilders.matchQuery("content", "baz")).windowSize(50), + searchSourceBuilder.rescores().get(0)); + } + } + } + public void testEmptyPostFilter() throws IOException { SearchSourceBuilder builder = new SearchSourceBuilder(); builder.postFilter(new EmptyQueryBuilder()); From d7a31c8cf7f411aadc7202a4a2326679ed820e88 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 18 May 2016 15:19:30 +0200 Subject: [PATCH 09/36] Add missing builder.endObject() in FsInfo closes #18433 --- core/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java | 2 ++ .../rest-api-spec/test/nodes.stats/30_discovery.yaml | 4 ---- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java b/core/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java index caa97ea7387..641dc3a5bb3 100644 --- a/core/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java +++ b/core/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java @@ -396,12 +396,14 @@ public class FsInfo implements Iterable, Writeable, ToXContent { builder.endObject(); } builder.endArray(); + builder.startObject("total"); builder.field(OPERATIONS, totalOperations); builder.field(READ_OPERATIONS, totalReadOperations); builder.field(WRITE_OPERATIONS, totalWriteOperations); builder.field(READ_KILOBYTES, totalReadKilobytes); builder.field(WRITE_KILOBYTES, totalWriteKilobytes); + builder.endObject(); } return builder; } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/30_discovery.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/30_discovery.yaml index 4769465eb1d..2617f76941c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/30_discovery.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/30_discovery.yaml @@ -1,9 +1,5 @@ --- "Discovery stats": - - skip: - version: "5.0.0 - " - reason: Tracked in issue 18433 - - do: cluster.state: {} From c20a669c2d9e4d1e85c2117011abdc5731164c4d Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 18 May 2016 15:20:21 +0200 Subject: [PATCH 10/36] Add CONSOLE to source filtering docs --- docs/reference/search/request/source-filtering.asciidoc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/reference/search/request/source-filtering.asciidoc b/docs/reference/search/request/source-filtering.asciidoc index 8458d37806c..08625751eec 100644 --- a/docs/reference/search/request/source-filtering.asciidoc +++ b/docs/reference/search/request/source-filtering.asciidoc @@ -13,6 +13,7 @@ To disable `_source` retrieval set to `false`: [source,js] -------------------------------------------------- +GET /_search { "_source": false, "query" : { @@ -20,6 +21,7 @@ To disable `_source` retrieval set to `false`: } } -------------------------------------------------- +// CONSOLE The `_source` also accepts one or more wildcard patterns to control what parts of the `_source` should be returned: @@ -27,6 +29,7 @@ For example: [source,js] -------------------------------------------------- +GET /_search { "_source": "obj.*", "query" : { @@ -34,11 +37,13 @@ For example: } } -------------------------------------------------- +// CONSOLE Or [source,js] -------------------------------------------------- +GET /_search { "_source": [ "obj1.*", "obj2.*" ], "query" : { @@ -46,11 +51,13 @@ Or } } -------------------------------------------------- +// CONSOLE Finally, for complete control, you can specify both include and exclude patterns: [source,js] -------------------------------------------------- +GET /_search { "_source": { "include": [ "obj1.*", "obj2.*" ], @@ -61,3 +68,4 @@ Finally, for complete control, you can specify both include and exclude patterns } } -------------------------------------------------- +// CONSOLE From cad0608cdb28e2b8485e5c01c26579a35cb84356 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 18 May 2016 09:31:28 -0400 Subject: [PATCH 11/36] Add GC overhead logging This commit adds simple GC overhead logging. This logging captures intervals where the JVM is spending a lot of time performing GC but it is not necessarily the case that each GC is large. For a start, this logging is simple and does not attempt to incorporate whether or not the collections were efficient (that is, we are only capturing that a lot of GC is happening, not that a lot of useless GC is happening). Relates #18419 --- .../common/settings/ClusterSettings.java | 3 + .../common/settings/Setting.java | 30 ++-- .../monitor/jvm/JvmGcMonitorService.java | 154 +++++++++++++++-- .../common/settings/SettingTests.java | 2 +- .../jvm/JvmGcMonitorServiceSettingsTests.java | 40 +++++ .../monitor/jvm/JvmGcMonitorServiceTests.java | 39 +++++ .../monitor/jvm/JvmMonitorTests.java | 156 ++++++++++++++++-- 7 files changed, 384 insertions(+), 40 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index dad5f48ce27..e66534a4feb 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -397,6 +397,9 @@ public final class ClusterSettings extends AbstractScopedSettings { JvmGcMonitorService.ENABLED_SETTING, JvmGcMonitorService.REFRESH_INTERVAL_SETTING, JvmGcMonitorService.GC_SETTING, + JvmGcMonitorService.GC_OVERHEAD_WARN_SETTING, + JvmGcMonitorService.GC_OVERHEAD_INFO_SETTING, + JvmGcMonitorService.GC_OVERHEAD_DEBUG_SETTING, PageCacheRecycler.LIMIT_HEAP_SETTING, PageCacheRecycler.WEIGHT_BYTES_SETTING, PageCacheRecycler.WEIGHT_INT_SETTING, diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index edd5d511f86..1efb65c18b1 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -18,19 +18,6 @@ */ package org.elasticsearch.common.settings; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.EnumSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.function.BiConsumer; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.ToXContentToBytes; @@ -50,6 +37,19 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.EnumSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + /** * A setting. Encapsulates typical stuff like default value, parsing, and scope. * Some (SettingsProperty.Dynamic) can by modified at run time using the API. @@ -504,7 +504,7 @@ public class Setting extends ToXContentToBytes { throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); } if (value > maxValue) { - throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be =< " + maxValue); + throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be <= " + maxValue); } return value; } @@ -572,7 +572,7 @@ public class Setting extends ToXContentToBytes { throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); } if (value.bytes() > maxValue.bytes()) { - throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be =< " + maxValue); + throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be <= " + maxValue); } return value; } diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java index 01b4c68537e..ac75eb93aaa 100644 --- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java +++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java @@ -31,6 +31,7 @@ import org.elasticsearch.monitor.jvm.JvmStats.GarbageCollector; import org.elasticsearch.threadpool.ThreadPool; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.concurrent.ScheduledFuture; @@ -45,6 +46,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent gcThresholds; + private final GcOverheadThreshold gcOverheadThreshold; private volatile ScheduledFuture scheduledFuture; @@ -57,6 +59,27 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent GC_SETTING = Setting.groupSetting(GC_COLLECTOR_PREFIX, Property.NodeScope); + public final static Setting GC_OVERHEAD_WARN_SETTING = + Setting.intSetting("monitor.jvm.gc.overhead.warn", 50, 0, 100, Property.NodeScope); + public final static Setting GC_OVERHEAD_INFO_SETTING = + Setting.intSetting("monitor.jvm.gc.overhead.info", 25, 0, 100, Property.NodeScope); + public final static Setting GC_OVERHEAD_DEBUG_SETTING = + Setting.intSetting("monitor.jvm.gc.overhead.debug", 10, 0, 100, Property.NodeScope); + + static class GcOverheadThreshold { + final int warnThreshold; + final int infoThreshold; + final int debugThreshold; + + public GcOverheadThreshold(final int warnThreshold, final int infoThreshold, final int debugThreshold) { + this.warnThreshold = warnThreshold; + this.infoThreshold = infoThreshold; + this.debugThreshold = debugThreshold; + } + } + + + static class GcThreshold { public final String name; public final long warnThreshold; @@ -102,7 +125,42 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent[{}]/[{}], all_pools {}"; - @Override protected void doStart() { if (!enabled) { return; } - scheduledFuture = threadPool.scheduleWithFixedDelay(new JvmMonitor(gcThresholds) { + scheduledFuture = threadPool.scheduleWithFixedDelay(new JvmMonitor(gcThresholds, gcOverheadThreshold) { @Override void onMonitorFailure(Throwable t) { logger.debug("failed to monitor", t); @@ -138,9 +193,17 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent[{}]/[{}], all_pools {}"; + static void logSlowGc( final ESLogger logger, final JvmMonitor.Threshold threshold, @@ -162,7 +225,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent gcThresholds; + private final Map gcThresholds; + final GcOverheadThreshold gcOverheadThreshold; - public JvmMonitor(Map gcThresholds) { + public JvmMonitor(final Map gcThresholds, final GcOverheadThreshold gcOverheadThreshold) { this.gcThresholds = Objects.requireNonNull(gcThresholds); + this.gcOverheadThreshold = Objects.requireNonNull(gcOverheadThreshold); } @Override public void run() { try { - monitorLongGc(); + monitorGc(); } catch (Throwable t) { onMonitorFailure(t); } @@ -304,12 +396,21 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent= gcOverheadThreshold.warnThreshold) { + overheadThreshold = Threshold.WARN; + } else if (fraction >= gcOverheadThreshold.infoThreshold) { + overheadThreshold = Threshold.INFO; + } else if (fraction >= gcOverheadThreshold.debugThreshold) { + overheadThreshold = Threshold.DEBUG; + } + if (overheadThreshold != null) { + onGcOverhead(overheadThreshold, current, elapsed, seq); + } } JvmStats jvmStats() { @@ -364,6 +488,8 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent null, t -> { + assertThat(t, instanceOf(IllegalArgumentException.class)); + assertThat(t.getMessage(), containsString("setting [monitor.jvm.gc.overhead." + threshold + "] must be >= 0")); + }, true, null); + } + + for (final String threshold : new String[] { "warn", "info", "debug" }) { + final Settings.Builder builder = Settings.builder(); + builder.put("monitor.jvm.gc.overhead." + threshold, randomIntBetween(100 + 1, Integer.MAX_VALUE)); + execute(builder.build(), (command, interval) -> null, t -> { + assertThat(t, instanceOf(IllegalArgumentException.class)); + assertThat(t.getMessage(), containsString("setting [monitor.jvm.gc.overhead." + threshold + "] must be <= 100")); + }, true, null); + } + + final Settings.Builder infoWarnOutOfOrderBuilder = Settings.builder(); + final int info = randomIntBetween(2, 98); + infoWarnOutOfOrderBuilder.put("monitor.jvm.gc.overhead.info", info); + final int warn = randomIntBetween(1, info - 1); + infoWarnOutOfOrderBuilder.put("monitor.jvm.gc.overhead.warn", warn); + execute(infoWarnOutOfOrderBuilder.build(), (command, interval) -> null, t -> { + assertThat(t, instanceOf(IllegalArgumentException.class)); + assertThat(t.getMessage(), containsString("[monitor.jvm.gc.overhead.warn] must be greater than [monitor.jvm.gc.overhead.info] [" + info + "] but was [" + warn + "]")); + }, true, null); + + final Settings.Builder debugInfoOutOfOrderBuilder = Settings.builder(); + debugInfoOutOfOrderBuilder.put("monitor.jvm.gc.overhead.info", info); + final int debug = randomIntBetween(info + 1, 99); + debugInfoOutOfOrderBuilder.put("monitor.jvm.gc.overhead.debug", debug); + debugInfoOutOfOrderBuilder.put("monitor.jvm.gc.overhead.warn", randomIntBetween(debug + 1, 100)); // or the test will fail for the wrong reason + execute(debugInfoOutOfOrderBuilder.build(), (command, interval) -> null, t -> { + assertThat(t, instanceOf(IllegalArgumentException.class)); + assertThat(t.getMessage(), containsString("[monitor.jvm.gc.overhead.info] must be greater than [monitor.jvm.gc.overhead.debug] [" + debug + "] but was [" + info + "]")); + }, true, null); + } + private static void execute(Settings settings, BiFunction> scheduler, Runnable asserts) throws InterruptedException { execute(settings, scheduler, null, false, asserts); } diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java index 2c17fca7c8c..ab5b1ac4750 100644 --- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java @@ -133,4 +133,43 @@ public class JvmGcMonitorServiceTests extends ESTestCase { verifyNoMoreInteractions(logger); } + public void testGcOverheadLogging() { + final JvmGcMonitorService.JvmMonitor.Threshold threshold = randomFrom(JvmGcMonitorService.JvmMonitor.Threshold.values()); + final int current = randomIntBetween(1, Integer.MAX_VALUE); + final long elapsed = randomIntBetween(current, Integer.MAX_VALUE); + final long seq = randomIntBetween(1, Integer.MAX_VALUE); + final ESLogger logger = mock(ESLogger.class); + when(logger.isWarnEnabled()).thenReturn(true); + when(logger.isInfoEnabled()).thenReturn(true); + when(logger.isDebugEnabled()).thenReturn(true); + JvmGcMonitorService.logGcOverhead(logger, threshold, current, elapsed, seq); + switch(threshold) { + case WARN: + verify(logger).isWarnEnabled(); + verify(logger).warn( + "[gc][{}] overhead, spent [{}] collecting in the last [{}]", + seq, + TimeValue.timeValueMillis(current), + TimeValue.timeValueMillis(elapsed)); + break; + case INFO: + verify(logger).isInfoEnabled(); + verify(logger).info( + "[gc][{}] overhead, spent [{}] collecting in the last [{}]", + seq, + TimeValue.timeValueMillis(current), + TimeValue.timeValueMillis(elapsed)); + break; + case DEBUG: + verify(logger).isDebugEnabled(); + verify(logger).debug( + "[gc][{}] overhead, spent [{}] collecting in the last [{}]", + seq, + TimeValue.timeValueMillis(current), + TimeValue.timeValueMillis(elapsed)); + break; + } + verifyNoMoreInteractions(logger); + } + } diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java index 8d3ddeec84e..91862e9cd18 100644 --- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java @@ -41,10 +41,12 @@ import static org.mockito.Mockito.when; public class JvmMonitorTests extends ESTestCase { + private static final JvmGcMonitorService.GcOverheadThreshold IGNORE = new JvmGcMonitorService.GcOverheadThreshold(0, 0, 0); + public void testMonitorFailure() { AtomicBoolean shouldFail = new AtomicBoolean(); AtomicBoolean invoked = new AtomicBoolean(); - JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(Collections.emptyMap()) { + JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(Collections.emptyMap(), IGNORE) { @Override void onMonitorFailure(Throwable t) { invoked.set(true); @@ -53,7 +55,7 @@ public class JvmMonitorTests extends ESTestCase { } @Override - synchronized void monitorLongGc() { + synchronized void monitorGc() { if (shouldFail.get()) { throw new RuntimeException("simulated"); } @@ -62,6 +64,10 @@ public class JvmMonitorTests extends ESTestCase { @Override void onSlowGc(final Threshold threshold, final long seq, final SlowGcEvent slowGcEvent) { } + + @Override + void onGcOverhead(Threshold threshold, long total, long elapsed, long seq) { + } }; monitor.run(); @@ -166,7 +172,7 @@ public class JvmMonitorTests extends ESTestCase { final AtomicInteger count = new AtomicInteger(); - JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(gcThresholds) { + JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(gcThresholds, IGNORE) { @Override void onMonitorFailure(Throwable t) { } @@ -198,6 +204,10 @@ public class JvmMonitorTests extends ESTestCase { } } + @Override + void onGcOverhead(Threshold threshold, long total, long elapsed, long seq) { + } + @Override long now() { return now.get(); @@ -213,7 +223,7 @@ public class JvmMonitorTests extends ESTestCase { now.set(start + TimeUnit.NANOSECONDS.convert(expectedElapsed, TimeUnit.MILLISECONDS)); jvmStats.set(monitorJvmStats); - monitor.monitorLongGc(); + monitor.monitorGc(); assertThat(count.get(), equalTo((youngGcThreshold ? 1 : 0) + (oldGcThreshold ? 1 : 0))); } @@ -235,14 +245,140 @@ public class JvmMonitorTests extends ESTestCase { private JvmStats jvmStats(JvmStats.GarbageCollector youngCollector, JvmStats.GarbageCollector oldCollector) { final JvmStats jvmStats = mock(JvmStats.class); - final JvmStats.GarbageCollectors initialGcs = mock(JvmStats.GarbageCollectors.class); - final JvmStats.GarbageCollector[] initialCollectors = new JvmStats.GarbageCollector[2]; - initialCollectors[0] = youngCollector; - initialCollectors[1] = oldCollector; - when(initialGcs.getCollectors()).thenReturn(initialCollectors); - when(jvmStats.getGc()).thenReturn(initialGcs); + final JvmStats.GarbageCollectors gcs = mock(JvmStats.GarbageCollectors.class); + final JvmStats.GarbageCollector[] collectors = new JvmStats.GarbageCollector[2]; + collectors[0] = youngCollector; + collectors[1] = oldCollector; + when(gcs.getCollectors()).thenReturn(collectors); + when(jvmStats.getGc()).thenReturn(gcs); when(jvmStats.getMem()).thenReturn(JvmStats.jvmStats().getMem()); return jvmStats; } + public void testMonitorGc() { + final int youngCollectionCount = randomIntBetween(1, 16); + final int youngCollectionIncrement = randomIntBetween(1, 16); + final int youngCollectionTime = randomIntBetween(1, 1 << 10); + final int youngCollectionTimeIncrement = randomIntBetween(1, 1 << 10); + final int oldCollectionCount = randomIntBetween(1, 16); + final int oldCollectionIncrement = randomIntBetween(1, 16); + final int oldCollectionTime = randomIntBetween(1, 1 << 10); + final int oldCollectionTimeIncrement = randomIntBetween(1, 1 << 10); + + final JvmStats.GarbageCollector lastYoungCollector = collector("young", youngCollectionCount, youngCollectionTime); + final JvmStats.GarbageCollector lastOldCollector = collector("old", oldCollectionCount, oldCollectionTime); + final JvmStats lastjvmStats = jvmStats(lastYoungCollector, lastOldCollector); + + final JvmStats.GarbageCollector currentYoungCollector = + collector("young", youngCollectionCount + youngCollectionIncrement, youngCollectionTime + youngCollectionTimeIncrement); + final JvmStats.GarbageCollector currentOldCollector = + collector("old", oldCollectionCount + oldCollectionIncrement, oldCollectionTime + oldCollectionTimeIncrement); + final JvmStats currentJvmStats = jvmStats(currentYoungCollector, currentOldCollector); + final long expectedElapsed = + randomIntBetween( + Math.max(youngCollectionTime + youngCollectionTimeIncrement, oldCollectionTime + oldCollectionTimeIncrement), + Integer.MAX_VALUE); + + final AtomicBoolean invoked = new AtomicBoolean(); + + final JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(Collections.emptyMap(), IGNORE) { + + @Override + void onMonitorFailure(Throwable t) { + } + + @Override + void onSlowGc(Threshold threshold, long seq, SlowGcEvent slowGcEvent) { + } + + @Override + void onGcOverhead(Threshold threshold, long total, long elapsed, long seq) { + } + + @Override + void checkGcOverhead(long current, long elapsed, long seq) { + invoked.set(true); + assertThat(current, equalTo((long)(youngCollectionTimeIncrement + oldCollectionTimeIncrement))); + assertThat(elapsed, equalTo(expectedElapsed)); + } + + @Override + JvmStats jvmStats() { + return lastjvmStats; + } + }; + + monitor.monitorGcOverhead(currentJvmStats, expectedElapsed); + assertTrue(invoked.get()); + } + + private JvmStats.GarbageCollector collector(final String name, final int collectionCount, final int collectionTime) { + final JvmStats.GarbageCollector gc = mock(JvmStats.GarbageCollector.class); + when(gc.getName()).thenReturn(name); + when(gc.getCollectionCount()).thenReturn((long)collectionCount); + when(gc.getCollectionTime()).thenReturn(TimeValue.timeValueMillis(collectionTime)); + return gc; + } + + public void testCheckGcOverhead() { + final int debugThreshold = randomIntBetween(1, 98); + final int infoThreshold = randomIntBetween(debugThreshold + 1, 99); + final int warnThreshold = randomIntBetween(infoThreshold + 1, 100); + final JvmGcMonitorService.GcOverheadThreshold gcOverheadThreshold = + new JvmGcMonitorService.GcOverheadThreshold(warnThreshold, infoThreshold, debugThreshold); + + final JvmGcMonitorService.JvmMonitor.Threshold expectedThreshold; + int fraction = 0; + final long expectedCurrent; + final long expectedElapsed; + if (randomBoolean()) { + expectedThreshold = randomFrom(JvmGcMonitorService.JvmMonitor.Threshold.values()); + switch (expectedThreshold) { + case WARN: + fraction = randomIntBetween(warnThreshold, 100); + break; + case INFO: + fraction = randomIntBetween(infoThreshold, warnThreshold - 1); + break; + case DEBUG: + fraction = randomIntBetween(debugThreshold, infoThreshold - 1); + break; + } + } else { + expectedThreshold = null; + fraction = randomIntBetween(0, debugThreshold - 1); + } + + expectedElapsed = 100 * randomIntBetween(1, 1000); + expectedCurrent = fraction * expectedElapsed / 100; + + final AtomicBoolean invoked = new AtomicBoolean(); + final long expectedSeq = randomIntBetween(1, Integer.MAX_VALUE); + + final JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(Collections.emptyMap(), gcOverheadThreshold) { + + @Override + void onMonitorFailure(final Throwable t) { + } + + @Override + void onSlowGc(Threshold threshold, long seq, SlowGcEvent slowGcEvent) { + } + + @Override + void onGcOverhead(final Threshold threshold, final long current, final long elapsed, final long seq) { + invoked.set(true); + assertThat(threshold, equalTo(expectedThreshold)); + assertThat(current, equalTo(expectedCurrent)); + assertThat(elapsed, equalTo(expectedElapsed)); + assertThat(seq, equalTo(expectedSeq)); + } + + }; + + monitor.checkGcOverhead(expectedCurrent, expectedElapsed, expectedSeq); + + assertThat(invoked.get(), equalTo(expectedThreshold != null)); + } + } From a5268cd40daa74b67045c24888d49d7602fe8ee4 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Wed, 18 May 2016 15:32:48 +0200 Subject: [PATCH 12/36] Add CONSOLE to version docs --- docs/reference/search/request/version.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/search/request/version.asciidoc b/docs/reference/search/request/version.asciidoc index 3b2329a828a..57c6ce27feb 100644 --- a/docs/reference/search/request/version.asciidoc +++ b/docs/reference/search/request/version.asciidoc @@ -5,6 +5,7 @@ Returns a version for each search hit. [source,js] -------------------------------------------------- +GET /_search { "version": true, "query" : { @@ -12,3 +13,4 @@ Returns a version for each search hit. } } -------------------------------------------------- +// CONSOLE From db4809d9061e6c4158d73eab1e7a9c82aec6521b Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 18 May 2016 11:03:00 -0400 Subject: [PATCH 13/36] Remove last vestigates of /bin/sh shebangs This commit removes the remaining /bin/sh shebangs in favor of /bin/bash. Relates #18448 --- buildSrc/src/main/resources/deb/postinst.ftl | 2 +- buildSrc/src/main/resources/deb/preinst.ftl | 2 +- .../src/main/resources/bin/elasticsearch-systemd-pre-exec | 2 +- distribution/src/main/resources/bin/elasticsearch.in.sh | 2 +- plugins/jvm-example/src/main/bin/test | 2 +- qa/vagrant/src/test/resources/packaging/scripts/modules.bash | 2 +- qa/vagrant/src/test/resources/packaging/scripts/os_package.bash | 2 +- .../test/resources/packaging/scripts/packaging_test_utils.bash | 2 +- qa/vagrant/src/test/resources/packaging/scripts/plugins.bash | 2 +- qa/vagrant/src/test/resources/packaging/scripts/tar.bash | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/buildSrc/src/main/resources/deb/postinst.ftl b/buildSrc/src/main/resources/deb/postinst.ftl index 5f67242c265..9acfc0f084e 100644 --- a/buildSrc/src/main/resources/deb/postinst.ftl +++ b/buildSrc/src/main/resources/deb/postinst.ftl @@ -1,2 +1,2 @@ -#!/bin/sh -e +#!/bin/bash -e <% commands.each {command -> %><%= command %><% } %> diff --git a/buildSrc/src/main/resources/deb/preinst.ftl b/buildSrc/src/main/resources/deb/preinst.ftl index 5f67242c265..9acfc0f084e 100644 --- a/buildSrc/src/main/resources/deb/preinst.ftl +++ b/buildSrc/src/main/resources/deb/preinst.ftl @@ -1,2 +1,2 @@ -#!/bin/sh -e +#!/bin/bash -e <% commands.each {command -> %><%= command %><% } %> diff --git a/distribution/src/main/resources/bin/elasticsearch-systemd-pre-exec b/distribution/src/main/resources/bin/elasticsearch-systemd-pre-exec index a51d639bf7d..5a5877598e6 100755 --- a/distribution/src/main/resources/bin/elasticsearch-systemd-pre-exec +++ b/distribution/src/main/resources/bin/elasticsearch-systemd-pre-exec @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # CONF_FILE setting was removed if [ ! -z "$CONF_FILE" ]; then diff --git a/distribution/src/main/resources/bin/elasticsearch.in.sh b/distribution/src/main/resources/bin/elasticsearch.in.sh index 8f1b5566f90..58b26a2d6eb 100644 --- a/distribution/src/main/resources/bin/elasticsearch.in.sh +++ b/distribution/src/main/resources/bin/elasticsearch.in.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # check in case a user was using this mechanism if [ "x$ES_CLASSPATH" != "x" ]; then diff --git a/plugins/jvm-example/src/main/bin/test b/plugins/jvm-example/src/main/bin/test index 76ba88943ac..13fdcce1e52 100755 --- a/plugins/jvm-example/src/main/bin/test +++ b/plugins/jvm-example/src/main/bin/test @@ -1,3 +1,3 @@ -#!/bin/sh +#!/bin/bash echo test diff --git a/qa/vagrant/src/test/resources/packaging/scripts/modules.bash b/qa/vagrant/src/test/resources/packaging/scripts/modules.bash index bd6da680da9..2e80fd648f3 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/modules.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/modules.bash @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # This file contains some utilities to test the elasticsearch scripts, # the .deb/.rpm packages and the SysV/Systemd scripts. diff --git a/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash b/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash index 72c59c39324..ee6e491d169 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/os_package.bash @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # This file contains some utilities to test the elasticsearch scripts with # the .deb/.rpm packages. diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index 09d0190695e..5f50dfc2850 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # This file contains some utilities to test the elasticsearch scripts, # the .deb/.rpm packages and the SysV/Systemd scripts. diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash index 4f1e574b905..afae7439057 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugins.bash @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # This file contains some utilities to test the elasticsearch scripts, # the .deb/.rpm packages and the SysV/Systemd scripts. diff --git a/qa/vagrant/src/test/resources/packaging/scripts/tar.bash b/qa/vagrant/src/test/resources/packaging/scripts/tar.bash index 277eee60f1a..798ec6c2997 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/tar.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/tar.bash @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # This file contains some utilities to test the elasticsearch scripts, # the .deb/.rpm packages and the SysV/Systemd scripts. From a846ff93e91020b5a837c45a045191b1c58a86e7 Mon Sep 17 00:00:00 2001 From: markharwood Date: Tue, 17 May 2016 14:15:53 +0100 Subject: [PATCH 14/36] Aggregations fix: support include/exclude strings formatted for IP and date fields in terms and significant_terms aggregations. Closes #17705 --- .../SignificantTermsAggregatorFactory.java | 17 +++++-- .../bucket/terms/TermsAggregatorFactory.java | 15 ++++-- .../bucket/terms/support/IncludeExclude.java | 40 ++++++++++----- .../test/search.aggregation/20_terms.yaml | 50 +++++++++++++++++++ .../test/search.aggregation/30_sig_terms.yaml | 25 ++++++++++ 5 files changed, 125 insertions(+), 22 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 13126029b8e..4b9e3acb873 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -211,7 +211,14 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac } } assert execution != null; - return execution.create(name, factories, valuesSource, config.format(), bucketCountThresholds, includeExclude, context, parent, + + DocValueFormat format = config.format(); + if ((includeExclude != null) && (includeExclude.isRegexBased()) && format != DocValueFormat.RAW) { + throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude " + + "settings as they can only be applied to string fields. Use an array of values for include/exclude clauses"); + } + + return execution.create(name, factories, valuesSource, format, bucketCountThresholds, includeExclude, context, parent, significanceHeuristic, this, pipelineAggregators, metaData); } @@ -227,7 +234,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac } IncludeExclude.LongFilter longFilter = null; if (includeExclude != null) { - longFilter = includeExclude.convertToLongFilter(); + longFilter = includeExclude.convertToLongFilter(config.format()); } return new SignificantLongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), bucketCountThresholds, context, parent, significanceHeuristic, this, longFilter, pipelineAggregators, @@ -248,7 +255,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic, SignificantTermsAggregatorFactory termsAggregatorFactory, List pipelineAggregators, Map metaData) throws IOException { - final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(); + final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format); return new SignificantStringTermsAggregator(name, factories, valuesSource, format, bucketCountThresholds, filter, aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData); } @@ -262,7 +269,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic, SignificantTermsAggregatorFactory termsAggregatorFactory, List pipelineAggregators, Map metaData) throws IOException { - final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(); + final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format); return new GlobalOrdinalsSignificantTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter, aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData); @@ -277,7 +284,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic, SignificantTermsAggregatorFactory termsAggregatorFactory, List pipelineAggregators, Map metaData) throws IOException { - final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(); + final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format); return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter, aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index 1ccf4a11570..62374ae7d19 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -150,8 +150,13 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) throws IOException { - final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(); + final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format); return new StringTermsAggregator(name, factories, valuesSource, order, format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); } @@ -211,7 +216,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) throws IOException { - final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(); + final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format); return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, order, format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); @@ -231,7 +236,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) throws IOException { - final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(); + final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format); return new GlobalOrdinalsStringTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, order, format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java index 101291d01e1..209700b86d9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.WithOrdinals; @@ -135,7 +136,8 @@ public class IncludeExclude implements Writeable, ToXContent { } public static abstract class OrdinalsFilter { - public abstract LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) throws IOException; + public abstract LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) + throws IOException; } @@ -152,7 +154,8 @@ public class IncludeExclude implements Writeable, ToXContent { * */ @Override - public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) throws IOException { + public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) + throws IOException { LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getValueCount()); TermsEnum globalTermsEnum; Terms globalTerms = new DocValuesTerms(globalOrdinals); @@ -179,7 +182,7 @@ public class IncludeExclude implements Writeable, ToXContent { @Override public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, WithOrdinals valueSource) throws IOException { LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getValueCount()); - if(includeValues!=null){ + if (includeValues != null) { for (BytesRef term : includeValues) { long ord = globalOrdinals.lookupTerm(term); if (ord >= 0) { @@ -534,33 +537,46 @@ public class IncludeExclude implements Writeable, ToXContent { return a; } - public StringFilter convertToStringFilter() { + public StringFilter convertToStringFilter(DocValueFormat format) { if (isRegexBased()) { return new AutomatonBackedStringFilter(toAutomaton()); } - return new TermListBackedStringFilter(includeValues, excludeValues); + return new TermListBackedStringFilter(parseForDocValues(includeValues, format), parseForDocValues(excludeValues, format)); } - public OrdinalsFilter convertToOrdinalsFilter() { + private static SortedSet parseForDocValues(SortedSet endUserFormattedValues, DocValueFormat format) { + SortedSet result = endUserFormattedValues; + if (endUserFormattedValues != null) { + if (format != DocValueFormat.RAW) { + result = new TreeSet<>(); + for (BytesRef formattedVal : endUserFormattedValues) { + result.add(format.parseBytesRef(formattedVal.utf8ToString())); + } + } + } + return result; + } + + public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format) { if (isRegexBased()) { return new AutomatonBackedOrdinalsFilter(toAutomaton()); } - return new TermListBackedOrdinalsFilter(includeValues, excludeValues); + return new TermListBackedOrdinalsFilter(parseForDocValues(includeValues, format), parseForDocValues(excludeValues, format)); } - public LongFilter convertToLongFilter() { + public LongFilter convertToLongFilter(DocValueFormat format) { int numValids = includeValues == null ? 0 : includeValues.size(); int numInvalids = excludeValues == null ? 0 : excludeValues.size(); LongFilter result = new LongFilter(numValids, numInvalids); if (includeValues != null) { for (BytesRef val : includeValues) { - result.addAccept(Long.parseLong(val.utf8ToString())); + result.addAccept(format.parseLong(val.utf8ToString(), false, null)); } } if (excludeValues != null) { for (BytesRef val : excludeValues) { - result.addReject(Long.parseLong(val.utf8ToString())); + result.addReject(format.parseLong(val.utf8ToString(), false, null)); } } return result; @@ -572,13 +588,13 @@ public class IncludeExclude implements Writeable, ToXContent { LongFilter result = new LongFilter(numValids, numInvalids); if (includeValues != null) { for (BytesRef val : includeValues) { - double dval=Double.parseDouble(val.utf8ToString()); + double dval = Double.parseDouble(val.utf8ToString()); result.addAccept(NumericUtils.doubleToSortableLong(dval)); } } if (excludeValues != null) { for (BytesRef val : excludeValues) { - double dval=Double.parseDouble(val.utf8ToString()); + double dval = Double.parseDouble(val.utf8ToString()); result.addReject(NumericUtils.doubleToSortableLong(dval)); } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yaml index 71d1a1e7ca2..c35e79e6cfe 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yaml @@ -117,6 +117,33 @@ setup: - match: { aggregations.ip_terms.buckets.1.doc_count: 1 } + - do: + search: + body: { "size" : 0, "aggs" : { "ip_terms" : { "terms" : { "field" : "ip", "include" : [ "127.0.0.1" ] } } } } + + - match: { hits.total: 3 } + + - length: { aggregations.ip_terms.buckets: 1 } + + - match: { aggregations.ip_terms.buckets.0.key: "127.0.0.1" } + + - do: + search: + body: { "size" : 0, "aggs" : { "ip_terms" : { "terms" : { "field" : "ip", "exclude" : [ "127.0.0.1" ] } } } } + + - match: { hits.total: 3 } + + - length: { aggregations.ip_terms.buckets: 1 } + + - match: { aggregations.ip_terms.buckets.0.key: "::1" } + + - do: + catch: request + search: + body: { "size" : 0, "aggs" : { "ip_terms" : { "terms" : { "field" : "ip", "exclude" : "127.*" } } } } + + + --- "Boolean test": - do: @@ -300,4 +327,27 @@ setup: - match: { aggregations.date_terms.buckets.1.key_as_string: "2014-09-01T00:00:00.000Z" } - match: { aggregations.date_terms.buckets.1.doc_count: 1 } + + - do: + search: + body: { "size" : 0, "aggs" : { "date_terms" : { "terms" : { "field" : "date", "include" : [ "2016-05-03" ] } } } } + - match: { hits.total: 3 } + + - length: { aggregations.date_terms.buckets: 1 } + + - match: { aggregations.date_terms.buckets.0.key_as_string: "2016-05-03T00:00:00.000Z" } + + - match: { aggregations.date_terms.buckets.0.doc_count: 2 } + + - do: + search: + body: { "size" : 0, "aggs" : { "date_terms" : { "terms" : { "field" : "date", "exclude" : [ "2016-05-03" ] } } } } + + - match: { hits.total: 3 } + + - length: { aggregations.date_terms.buckets: 1 } + + - match: { aggregations.date_terms.buckets.0.key_as_string: "2014-09-01T00:00:00.000Z" } + + - match: { aggregations.date_terms.buckets.0.doc_count: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yaml index 45c042baea4..a708ff19d7e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yaml @@ -121,3 +121,28 @@ - is_false: aggregations.ip_terms.buckets.0.key_as_string - match: { aggregations.ip_terms.buckets.0.doc_count: 1 } + + - do: + search: + body: { "query" : { "exists" : { "field" : "ip" } }, "aggs" : { "ip_terms" : { "significant_terms" : { "field" : "ip", "min_doc_count" : 1, "include" : [ "::1" ] } } } } + + - match: { hits.total: 1 } + + - length: { aggregations.ip_terms.buckets: 1 } + + - match: { aggregations.ip_terms.buckets.0.key: "::1" } + + - do: + search: + body: { "query" : { "exists" : { "field" : "ip" } }, "aggs" : { "ip_terms" : { "significant_terms" : { "field" : "ip", "min_doc_count" : 1, "exclude" : [ "::1" ] } } } } + + - match: { hits.total: 1 } + + - length: { aggregations.ip_terms.buckets: 0 } + + - do: + catch: request + search: + body: { "size" : 0, "aggs" : { "ip_terms" : { "significant_terms" : { "field" : "ip", "exclude" : "127.*" } } } } + + From 7c665a010b12152fef387fac5c1ad710dc467f39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Sun, 15 May 2016 21:26:09 +0200 Subject: [PATCH 15/36] Fix TimeZoneRounding#nextRoundingValue for hour, minute and second units Currently rounding intervals obtained by nextRoundingValue() for hour, minute and second units can include an extra hour when happening at DST transitions that add an extra hour (eg CEST -> CET). This changes the rounding logic for time units smaller or equal to an hour to fix this. Closes #18326 --- .../common/rounding/DateTimeUnit.java | 9 +++ .../common/rounding/TimeZoneRounding.java | 25 ++++--- .../common/rounding/DateTimeUnitTests.java | 75 +++++++++++++++++++ .../rounding/TimeZoneRoundingTests.java | 58 ++++++++++++-- .../aggregations/bucket/DateHistogramIT.java | 24 ++++++ 5 files changed, 173 insertions(+), 18 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java diff --git a/core/src/main/java/org/elasticsearch/common/rounding/DateTimeUnit.java b/core/src/main/java/org/elasticsearch/common/rounding/DateTimeUnit.java index e02342ffdca..bd0c5506859 100644 --- a/core/src/main/java/org/elasticsearch/common/rounding/DateTimeUnit.java +++ b/core/src/main/java/org/elasticsearch/common/rounding/DateTimeUnit.java @@ -53,6 +53,15 @@ public enum DateTimeUnit { return field; } + /** + * @param unit the {@link DateTimeUnit} to check + * @return true if the unit is a day or longer + */ + public static boolean isDayOrLonger(DateTimeUnit unit) { + return (unit == DateTimeUnit.HOUR_OF_DAY || unit == DateTimeUnit.MINUTES_OF_HOUR + || unit == DateTimeUnit.SECOND_OF_MINUTE) == false; + } + public static DateTimeUnit resolve(byte id) { switch (id) { case 1: return WEEK_OF_WEEKYEAR; diff --git a/core/src/main/java/org/elasticsearch/common/rounding/TimeZoneRounding.java b/core/src/main/java/org/elasticsearch/common/rounding/TimeZoneRounding.java index 4189e412708..e0ffb89c8b0 100644 --- a/core/src/main/java/org/elasticsearch/common/rounding/TimeZoneRounding.java +++ b/core/src/main/java/org/elasticsearch/common/rounding/TimeZoneRounding.java @@ -46,8 +46,8 @@ public abstract class TimeZoneRounding extends Rounding { public static class Builder { - private DateTimeUnit unit; - private long interval = -1; + private final DateTimeUnit unit; + private final long interval; private DateTimeZone timeZone = DateTimeZone.UTC; @@ -142,10 +142,15 @@ public abstract class TimeZoneRounding extends Rounding { @Override public long nextRoundingValue(long time) { - long timeLocal = time; - timeLocal = timeZone.convertUTCToLocal(time); - long nextInLocalTime = durationField.add(timeLocal, 1); - return timeZone.convertLocalToUTC(nextInLocalTime, false); + if (DateTimeUnit.isDayOrLonger(unit)) { + time = timeZone.convertUTCToLocal(time); + } + long next = durationField.add(time, 1); + if (DateTimeUnit.isDayOrLonger(unit)) { + return timeZone.convertLocalToUTC(next, false); + } else { + return next; + } } @Override @@ -161,12 +166,12 @@ public abstract class TimeZoneRounding extends Rounding { out.writeByte(unit.id()); out.writeString(timeZone.getID()); } - + @Override public int hashCode() { return Objects.hash(unit, timeZone); } - + @Override public boolean equals(Object obj) { if (obj == null) { @@ -236,12 +241,12 @@ public abstract class TimeZoneRounding extends Rounding { out.writeVLong(interval); out.writeString(timeZone.getID()); } - + @Override public int hashCode() { return Objects.hash(interval, timeZone); } - + @Override public boolean equals(Object obj) { if (obj == null) { diff --git a/core/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java b/core/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java new file mode 100644 index 00000000000..79ef6929645 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/rounding/DateTimeUnitTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.rounding; + +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.common.rounding.DateTimeUnit.WEEK_OF_WEEKYEAR; +import static org.elasticsearch.common.rounding.DateTimeUnit.YEAR_OF_CENTURY; +import static org.elasticsearch.common.rounding.DateTimeUnit.QUARTER; +import static org.elasticsearch.common.rounding.DateTimeUnit.MONTH_OF_YEAR; +import static org.elasticsearch.common.rounding.DateTimeUnit.DAY_OF_MONTH; +import static org.elasticsearch.common.rounding.DateTimeUnit.HOUR_OF_DAY; +import static org.elasticsearch.common.rounding.DateTimeUnit.MINUTES_OF_HOUR; +import static org.elasticsearch.common.rounding.DateTimeUnit.SECOND_OF_MINUTE; + +public class DateTimeUnitTests extends ESTestCase { + + /** + * test that we don't accidentally change enum ids + */ + public void testEnumIds() { + assertEquals(1, WEEK_OF_WEEKYEAR.id()); + assertEquals(WEEK_OF_WEEKYEAR, DateTimeUnit.resolve((byte) 1)); + + assertEquals(2, YEAR_OF_CENTURY.id()); + assertEquals(YEAR_OF_CENTURY, DateTimeUnit.resolve((byte) 2)); + + assertEquals(3, QUARTER.id()); + assertEquals(QUARTER, DateTimeUnit.resolve((byte) 3)); + + assertEquals(4, MONTH_OF_YEAR.id()); + assertEquals(MONTH_OF_YEAR, DateTimeUnit.resolve((byte) 4)); + + assertEquals(5, DAY_OF_MONTH.id()); + assertEquals(DAY_OF_MONTH, DateTimeUnit.resolve((byte) 5)); + + assertEquals(6, HOUR_OF_DAY.id()); + assertEquals(HOUR_OF_DAY, DateTimeUnit.resolve((byte) 6)); + + assertEquals(7, MINUTES_OF_HOUR.id()); + assertEquals(MINUTES_OF_HOUR, DateTimeUnit.resolve((byte) 7)); + + assertEquals(8, SECOND_OF_MINUTE.id()); + assertEquals(SECOND_OF_MINUTE, DateTimeUnit.resolve((byte) 8)); + } + + public void testIsDayOrLonger() { + for (DateTimeUnit unit : DateTimeUnit.values()) { + if (DateTimeUnit.isDayOrLonger(unit)) { + assertTrue(unit == DAY_OF_MONTH || + unit == MONTH_OF_YEAR || + unit == QUARTER || + unit == YEAR_OF_CENTURY || + unit == WEEK_OF_WEEKYEAR); + } + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java index 2c4d78adbd0..08a4ba11342 100644 --- a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java +++ b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java @@ -25,6 +25,7 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; +import java.util.ArrayList; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; @@ -147,21 +148,37 @@ public class TimeZoneRoundingTests extends ESTestCase { Rounding tzRounding; // testing savings to non savings switch tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build(); - assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forID("CET"))), - equalTo(time("2014-10-26T01:00:00", DateTimeZone.forID("CET")))); + assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forOffsetHours(2))), // CEST = UTC+2 + equalTo(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2)))); + assertThat(tzRounding.nextRoundingValue(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2))), + equalTo(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2)))); + assertThat(tzRounding.nextRoundingValue(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2))), + equalTo(time("2014-10-26T03:00:00", DateTimeZone.forOffsetHours(2)))); tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("CET")).build(); - assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forID("CET"))), - equalTo(time("2014-10-26T01:00:00", DateTimeZone.forID("CET")))); + assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forOffsetHours(2))), // CEST = UTC+2 + equalTo(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2)))); + assertThat(tzRounding.nextRoundingValue(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2))), + equalTo(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2)))); + assertThat(tzRounding.nextRoundingValue(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2))), + equalTo(time("2014-10-26T03:00:00", DateTimeZone.forOffsetHours(2)))); // testing non savings to savings switch tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build(); - assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forID("CET"))), - equalTo(time("2014-03-30T01:00:00", DateTimeZone.forID("CET")))); + assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forOffsetHours(1))), // CET = UTC+1 + equalTo(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1)))); + assertThat(tzRounding.nextRoundingValue(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1))), + equalTo(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1)))); + assertThat(tzRounding.nextRoundingValue(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1))), + equalTo(time("2014-03-30T03:00:00", DateTimeZone.forOffsetHours(1)))); tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("CET")).build(); - assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forID("CET"))), - equalTo(time("2014-03-30T01:00:00", DateTimeZone.forID("CET")))); + assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forOffsetHours(1))), // CET = UTC+1 + equalTo(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1)))); + assertThat(tzRounding.nextRoundingValue(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1))), + equalTo(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1)))); + assertThat(tzRounding.nextRoundingValue(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1))), + equalTo(time("2014-03-30T03:00:00", DateTimeZone.forOffsetHours(1)))); // testing non savings to savings switch (America/Chicago) tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build(); @@ -210,6 +227,31 @@ public class TimeZoneRoundingTests extends ESTestCase { } } + /** + * Test that nextRoundingValue() for hour rounding (and smaller) is equally spaced (see #18326) + * Start at a random date in a random time zone, then find the next zone offset transition (if any). + * From there, check that when we advance by using rounding#nextRoundingValue(), we always advance by the same + * amount of milliseconds. + */ + public void testSubHourNextRoundingEquallySpaced() { + String timeZone = randomFrom(new ArrayList<>(DateTimeZone.getAvailableIDs())); + DateTimeUnit unit = randomFrom(new DateTimeUnit[] { DateTimeUnit.HOUR_OF_DAY, DateTimeUnit.MINUTES_OF_HOUR, + DateTimeUnit.SECOND_OF_MINUTE }); + DateTimeZone tz = DateTimeZone.forID(timeZone); + TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(unit, tz); + // move the random date to transition for timezones that have offset change due to dst transition + long nextTransition = tz.nextTransition(Math.abs(randomLong() % ((long) 10e11))); + final long millisPerUnit = unit.field().getDurationField().getUnitMillis(); + // start ten units before transition + long roundedDate = rounding.round(nextTransition - (10 * millisPerUnit)); + while (roundedDate < nextTransition + 10 * millisPerUnit) { + long delta = rounding.nextRoundingValue(roundedDate) - roundedDate; + assertEquals("Difference between rounded values not equally spaced for [" + unit.name() + "], [" + timeZone + "] at " + + new DateTime(roundedDate), millisPerUnit, delta); + roundedDate = rounding.nextRoundingValue(roundedDate); + } + } + /** * randomized test on TimeIntervalRounding with random interval and time zone offsets */ diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 76b5558df80..cdb722ff9dd 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; @@ -1146,4 +1147,27 @@ public class DateHistogramIT extends ESIntegTestCase { Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), greaterThan(0)); } + + /** + * When DST ends, local time turns back one hour, so between 2am and 4am wall time we should have four buckets: + * "2015-10-25T02:00:00.000+02:00", + * "2015-10-25T02:00:00.000+01:00", + * "2015-10-25T03:00:00.000+01:00", + * "2015-10-25T04:00:00.000+01:00". + */ + public void testDSTEndTransition() throws Exception { + SearchResponse response = client().prepareSearch("idx") + .setQuery(new MatchNoneQueryBuilder()) + .addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("Europe/Oslo")) + .dateHistogramInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds( + new ExtendedBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00"))) + .execute().actionGet(); + + Histogram histo = response.getAggregations().get("histo"); + List buckets = histo.getBuckets(); + assertThat(buckets.size(), equalTo(4)); + assertThat(((DateTime) buckets.get(1).getKey()).getMillis() - ((DateTime) buckets.get(0).getKey()).getMillis(), equalTo(3600000L)); + assertThat(((DateTime) buckets.get(2).getKey()).getMillis() - ((DateTime) buckets.get(1).getKey()).getMillis(), equalTo(3600000L)); + assertThat(((DateTime) buckets.get(3).getKey()).getMillis() - ((DateTime) buckets.get(2).getKey()).getMillis(), equalTo(3600000L)); + } } From cec9a94b96b194ce35e63c5616493153dc9c19e2 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Wed, 18 May 2016 14:48:51 +0200 Subject: [PATCH 16/36] Added version 2.3.3 with bwc indices --- .../main/java/org/elasticsearch/Version.java | 4 ++++ .../test/resources/indices/bwc/index-2.3.3.zip | Bin 0 -> 77612 bytes .../test/resources/indices/bwc/repo-2.3.3.zip | Bin 0 -> 75600 bytes 3 files changed, 4 insertions(+) create mode 100644 core/src/test/resources/indices/bwc/index-2.3.3.zip create mode 100644 core/src/test/resources/indices/bwc/repo-2.3.3.zip diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 0e869d06149..9cc526d8f97 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -69,6 +69,8 @@ public class Version { public static final Version V_2_3_1 = new Version(V_2_3_1_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); public static final int V_2_3_2_ID = 2030299; public static final Version V_2_3_2 = new Version(V_2_3_2_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); + public static final int V_2_3_3_ID = 2030399; + public static final Version V_2_3_3 = new Version(V_2_3_3_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); public static final int V_5_0_0_alpha1_ID = 5000001; public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); public static final int V_5_0_0_alpha2_ID = 5000002; @@ -94,6 +96,8 @@ public class Version { return V_5_0_0_alpha2; case V_5_0_0_alpha1_ID: return V_5_0_0_alpha1; + case V_2_3_3_ID: + return V_2_3_3; case V_2_3_2_ID: return V_2_3_2; case V_2_3_1_ID: diff --git a/core/src/test/resources/indices/bwc/index-2.3.3.zip b/core/src/test/resources/indices/bwc/index-2.3.3.zip new file mode 100644 index 0000000000000000000000000000000000000000..aced41714fdb5ac31cbe04e9051ccf5571132f68 GIT binary patch literal 77612 zcmbq)bC4&|mgcW)+qP}nc9(6tx@_C*vTfUTRhQ9ao7->RPVB^sn4Q_#j5rzjN9OnC zJ@kXwV~zdii#8yEl=U}ETE$e^YQ1po;!;nhh0YpxzJ03gsu zAOHXq_3w*{|LNyw|Ngm=yRp8dor$T3J~KTFJs(aQ-tmJ9`sT=l>=s=)VjR z@Ba?U_)pld|LR|1{{@!xzk=0wb}@7@{U`cAocf9Y7MQ}X>8v@`5 zMn$rxFA9g(BCca9S!zW^!q#f7rCtcMu21KPmd0-=DU5moJp~hbz66My`ml`jm^B{fL%b{E-+=4zyF4?g=V`OZ8w@ zv$H=*%5T@R;|KJARPwJ8NcLY%=3nJl8vlEM|Cf>YukcL&UHE?!DBXV$=>Ibn%m0Mx zzpJo+9T)fijO;(^?*B5tf9mcZoaMdo^~Qfy==0Y^|Gzl@P4RS$|1QRVu>ZsUC7m|* z=90{4|47tKobZQEuV_a_O`cGWie$OJPe!dmjdl?P1{2F@j}S{C3`qR7pjxMCrYCJB z=3{AT1}&*MzD6i8L?WQXu{JnH@wrBH{#)q$Gnb|OSGnvTlt)b}x@N8bKzR`WAo%}7sc&RYZ)Wn(y$9YO z5M-Me6QNF+kMfK!|bJkNd` z$->Gu#VpTl_@FuN&!PITJl2EwB>7z5vGFj<$N~J0;TUsV_G*#u>&6}5vyMhM(=VSA z>U%Y@=2ZCHkK@$yRXsKjbl<5Tkpvw2t#+1fcZ(eQ&!d&0GhPmpIE^(MuT`Z4oxUq~ zIdPvZldjfjS3a~gX1~U>9*qAC=K0!O;mf%5pD&Kq=)0>0Z`STOzs^wjb(kJhrHH`r zZ{3Y_RDxr4wVNJH9R!dI9H+>20desnQkAqbmHo^W4#ft)@^3Lj6Z>YXyD-sh1m7jL7`tDr1w za7PvJk|?KDvl`xOtsakc6mYY(*_i{ZYNts1^CbuHqBj zN3;IIs#mgfp9uEqp;j)oW0;btrNHbJtxdBt!m}_{5NCwWcOT|OBDd2|+0kT{o?GN}j@PyNZ6SRGg+64g_TZe0RFpd+ zZkQT$r@6>ZQka0QVW?(NQZrCEpwJ|)tv7ad8#1M!H{U(F-bTwK1lIsPlHv2~_WC>} zQXNh~5l8DKb^pjiXtLs>Ty=2{vH@NW$-P3laZ0?hE43%0wN3iAX4PZyrbanhXPOYl z%qV1A^@TjDWEYs6(@XwHp%nWQJC0bGsVE8pwiDfM=%Hvz$Lh9~?pkxXZ3CKFMx`q_ z!gY!Xf$e!rn*D&f5~+oq`ol`*M>VB*zR^z`Q$qOK`60Oy>o4&w5`^_Rr(vB6O+OBr z0Bt(@A>!C(v|A39z3!6dRHLsnltZ;7PX$JS@6OX{M@z%nc9l^xmZ&8p0u znB3HpNKPTOp$N_F3kMbCvteY~ce@T-`gt6smidZO{pBId&2ljeCP%apWyV`h=L)c` z8{;rfW7RLtdwQee5Z>@qZG-jtlXoaqC_V@u1n8Hgj_`x6%5{u6o|OfWQZR^3+%mA+ z6z*#NM!UJLN;GG1Z4b>+S8&a9QrK$ND)HRxBJe#a>c&)>lr7f-yyA?~l41k6AQy#V zi8(wp2p?ClZBAi=b_v{jP!s{TY%4cj^+SXl%uU!W26;sqEEWMbY|jjq=ph9j)^TAC zs+eERrNs2B_2bTFIE#<@nkxOU>x76w;-aS{=-PcRVVTb*Mm{QVk>2yJFNyT2o;CGq zFm=30jOlk(v4q&w8)kV_P}}z&i(09+xdxNgL>;I~*RE6c&YpNC zU@AipnYwaTDTKU%xo5794xjSk=3b>6mLDA&K9Sz(Vb49ABib%ZHl%uLL&mau^m69r zzTM(Ot*FOx%Rvtw%W78~;dg2foyH|(xvTZctHyKQMvesL`#9Y-Oe-ZaVlMg5L|a!= zb-Xxew>!|=MkK*~g;mUIwMNFEdnfY4?-^8Y)OP0FYUUbL`&?Q;dVh_2|bfix0zwA!c)BIoES_6#&3lI~sv`zU~cJ2ay zl~L^*pbXU2cxehPLPd`6=7e+%l(wxgH1A`Sto7%w-Xx7`9pM#Y;~Rkwd*V)VAY}LC znPrt?34KiJ*_;z-{X(VhR8c$F+wW|ul-?7G_u^@s@sJQ19(i-bKxsssN-?8JALZ*O zIJ(VQ0~Yc{YLw%>)WWcD>F!6E>xzMX)a~&{+VWrS(sqQa)AhvGJCGBrhi9U>U$V-E zUN6E=Km?6byT?aaKn&ako~cSMbVfZ+lKQpHUr(WZqz;!&CEKdQ%|Kx5PR_H4aS7{;%a#J$Tr(ZYX^%q!?X6d zML~{m+aBrXes-qx` z4bbNsW9#p?6y%m^Z7z?!Iy(C*eJ+=R#YQALh9-i%I=j=kz*aBz8RE89vw_f~pwcxR z$M?DQ7bf#lLhTvd@9SN?gm3#c4(cxj2L^_EJ@o!=M(XuBT2s`zAMw}aH@8Ynr414= z;noU*eBsB4L##GPXHmugoGCNK8TE@V)Nq(Fo7Un9!Av?89|QIXw1B$>A9hak?|~y} zzvS*(1B3^2D9Cji|8xhiqDviF?@XwR7;LsFibk&%s7M3m#!nBfmlrN}>R)OBN%&~psYJ!@-!z{r^4!5@OGRkwxmQaFp>+idB#ITMDq^13~@^z zPO19=-7^3HZV8jZV$h*3;rfSqb%IzLK%P$JEy)P^K9#E33BsThGib2y(}}Bl=!Mfa z7Hs`8lUZ?>gMOzf)pmZfsjEoN_PoXu-EKP6`tieUn!$`F1!%boE^Z71=kbKkC1&pm zimD5S^?6wK)=lx^^A2kRv))`nQ6@ne$U?_k-zGUYS4pMVtp+Z z|4J2ehL--3(1eh5p1JQ6@BNH=y)Njgcc#{Z#Bz<;`}vZ;r=RLDMTbmlL`kF+cmLg> zv#Nd4K1CXnBZ5f+?kyj>`0qaF@W4@>psG7wa32VuHDgvLhi{{DakVMWyD8PQciv5(obC;x+TNrMip=i=>%g^zn|d??=s#xcJa; zUF}iL*Lm%upEm=IvL7id;bDZ@_7i+L3_*!A=9s@qxK`Z#9S63exxzGmC4i_J)^bIMl^{$!-2r^sm11>Q{ItD-q1yu=|q_JNmx9j9>It!;zGGJX=9{e)jZG(57Dk#+FRc06{pMqE2)=VC0>6aT`jP`TWLHGM?mmr?@Iz;rtZiGRL0{B$T}d zJS&$uLa)|=lxd9ooyxB%>eD+a;DzciK6nDba*qu_eVuC-;?>S_uq5u`dG##Q0-F}- zbj)Y#4Uu^W3p1>>V4^;$yF<=X3h%6tui76>JK{PO;WQn(c4zxqBaG}!o%=U6HW$;i_iLxAp;xL27XWdzF`CB+BnP47mD3%#|Of=|&F7NB8;ZG7vFjaq9 zU(_0hGk93AXw^(H7i<482z;7^7bE41=h@WSE}l9Yh`N@S#->&40`!bQQ&3ta)bE5S zJ6hki{?1ER0@F#L;(HV%xkY>y39XU|)s>yIWIw~Sb&F`K%5ci)izx$0yBHYEa3l3`qw zg6pV9cqT7sbJ*(R;1PR-l$7^c$Pr+>i5)v7r0TOzrH}cq-hAt)3&R48KmgZEr7x!# zu(BnOuq38Bv+`kZpRPvDR-Rexm}1phEiXSA>{_<4bJ0cn*o}JkrewVVm>(uuIF~~b zt~c%2p^7p1ZdpoAXxDjtU5rn9D%9``oliaXP~tDhK0+OnhMAgtyZxJ@~M0& zL5$U@Ae?yyd$+HR@Ga|<&i4M`(X%?`&T$$o8q?-JzrXna=k|NHB5-|P7DEQ8ttB2t z2?d%0M~B!a8d;7Wn0DTYV5cIQD-S%3FHt<)!_%ZnI{`Ma&w;M`Po4Y3VX(6lc(o-N zDDRm{PjIs97(kHED>Olfet!NJnp#R-<9hL1rQ9nMkzF+urIjmMsT65zp4%_W@TgNUI@TdSgRC zuB*I0nwR++yE+{m|^{pQo zjo678>7NR$)Tvjlz*%FsHjulG^BcYfKKJE>^O|vuH_bUwJP~eew;qAehYHp6yGLyQ^P`{Dvmte-zvSY@1%PNsdpS2%P^6Ae_CUH#OR0`*hqfDUu8R#E%8>1k` zzc44s>b`rJfs!hF3jqsaQoQ;p@$e^?)72;|DCC>mhFeZfhuuJu?k!tiKx^Q%?klAn zy#c*B9}lTG{zpBQ0_^Uecl}@r_CQ|NK>~t8!oe~DM=kqSH#QQ)osIlrt2B%xUv6ZC+{wWmn{fQ9`h`ITk~>mqQ;^h;Z_s%6WaJDRC#11cue z3rOX*KJo$xFiwc8RjOd{(+lp!k|c{k+Z`9Vn=%oxyE2Yt&kwBd1vF_tNFhieX8wNL zX;KPLeB%Uj>aJb@cm>AwK7pXDR5}=DO#%uyNZ)xJG33Y}OjXwgn%(^x*&g8CII*)2 z%0!&Gjp14c0vEmb!8xq4h7f?wERY&<8p@77G`*jK<4D@xeOuI2nFmo2<#x)gwp-}N zRKaglgbI*Ba<;syN{&c$MSH6N{io#sbOL1@v9(gq%}U>%IC_qAI_zXRmz7Cyk9bm` zLz%*S3~#7A)*kWNCL$+3vhzQd29@t>H_tDmIc4C(1o_~(M!rl!Aq}-+JHSw_5p3Ai zxsy98Lu{m1=r|v~Xx?q={Ql?&yQbno6}0U6sW%@D&0?bulpnAuq(J0dLf4X*Jd!=a z2O}_Ph_@h~KvOqpTje1;O{B&6TNSVC)ufJcQ$HfYRtty4Zu?y057EA_l3qy(1HvuZ$*jIF9~wOAc=D;mX+K290x0C7Ftyt z)d#eY_g1%zMONpuvH()iw*>hnT4sm?^^Xdo9upD$@|OCn0C98VDP9#ax;oYJRGx~< z8PF5tJ?z~T%EGIA4Y)59y>+2#vOgI@ZiH!P6AF1mzmsrBMvv5vU> zRAzBw%yJZ!57zDDKZPfc3CP}xRJS00X{%8mkLeJ76V2 zuPS)UK?%{i$V9Pz@&a47-i*(9@~mlUZq}m=ba@R(dC^DwnYOrKO6_eCKfeo z`NIf<@;b4YIm@KQR$dXrGbis?q&w=W>O(#o+8YZW8jc?Z>q$NNsG)W0Hr-QI$Fc_# z-x0scFPn4m$xf+CQ_a@iN9Or+qfiA7n$q%PdN;%%(9V>oAnz@0E)5uM*MjhNY|hP9 zMqTviOikjb@wGaHiaL5LstZaf7y`x56Rs>xF6K5)kO{a=#S4>;m9tnv?E*%7eR;__ zCWczr5hB_OFRle-ze|^D1W&}lcqHcv#_*L2L5xC!MZPmCBP6p&rP*pR0(a2y(%qx^ z)PZjs)yxV{LfAEsajZ`i7jJd|P>aVKROZKs+DkolL23lwG$$Xxb`0J?ZS-(Z(4Vv_ z>>cjFE{sFCvY3Pw7;MUW3HA=}_M!{LY&XY0WoORfJ1GLo4uLAycY-XmN6(#ht&#yj zW9WO=GA=W=nWfNiIq8~7>D1pA>Yc5){JjLA9wEEQTZA#&-KT_YXwB`gX%i@e6b1HLjNxLZr%_PkjA&J|V$uzD>zI2Q53r5z@*+@?3AK-SYV7dOVo%?{#znS8tN@tl;3+2X z4Dzmdq@=IX zdUi6(RD(iM;f}O2!r-RjWZCQv&a(@-Fifm)TrR|{PBYO&@Z>xp+1 zB{Xa){?sb5LeGWn5JOs^acyurH_lxOBvSJDv!VA4hR{<-f0qExKF5J!BzJ?STY%KI zzs&kt-D50Y)_%&ZPj$>1X|DF>h{fg{THiV_ppGvMUe^L7Y#wb6_cO-FDyZSeX~4oF zf;`SPyWQze<_RkU_0glTgAmm^poR(OCVEaV!MDB!SQ-MIWcwQaQDCW#o7H>LXuwZpL0X}h~ zi)jXvp&~|*A99%H4bOOnz2z-Rwz8v<1}GAkp=g|#*Jkt9%*k%@zdW2H0km2Hjm0-X zmhW=h*6hD{R6P`pB9!|!#P@?&i_@XR=#irWiJ(th8>z>%p5@QaB|6J}(Y~%$wztQS z%i=LSBwU?+=PxZ5vO3bQfBwWK#J=x=7-vh!-feb4wjSbgd(tKhyrbc^b#sOoL%-8N ztHt(hP=h#Yie1_$I?+0&xL#CNpWM-$CVeHp#M<5Rz)FU+18qn;VGRFcCC`Rsi+tA+j4eF98@Plrw`QZ2}WE9inPgso+Bd!b&TwqM$hZO4D zf)H&~<%Ji8Sj50M`YW$4{(N2QFf;IjAIB>fA9U`TVsf0njSS^nHgGX`~kkPE_upa*$ZytQOs`luqY;CAcVsd_>q+@q_^rt96X!A0@74D=T z!}DR>x<)FOWpAf(HdXoUK^Ek;*dsA$)-ZB}@dQe^d$56e%_dzq!QUab7%Z>^H`#rn zJ~{ZJqEY>Y-@|yw*+@%luk=xvvS_&l(!zLdNKYNs%7}jqEjC09zJVjhPGfG|ei0Dv z-q|r5MVi+a)+`r;i8|$@S`QRwEN{KEP5J?SxfgM4~QgGU)J<O4H4y%&LF~7wSx7O`4|h_S)netEEGJRF6Y1AB2_wQ(~rzM_<~{E#M)~x zc-%2oUP}KwDF$A@U9GJssd7qOiq#^!)lxGLf{7E!xvVzyjhWjry(?c6+Cs-Nr9tMvnt@UFCW=XL97^U&QXfKf zA*&RWQ5na%MRnkw92=>WkG9%SD!pW1y&VY%I~s)%B?ER1laj3o82Rpd%Yc4scWFlZ zm<74k;LyTM-A~Pshr>f6q{DnA7R}%mw&u=+uq1%8;n2L@Oo_prIuc}+l~Kx-w#G`J z3z}3gg3B;>KUuws@8AZB+*)+`mJulcLi?4`An1V+9B$zXOJu>ePQuIiau$g{n_uRHQl zV%fS39n|X;U)|YH*6WL_mR6&R!$zt?Vz=YG0~e(>qhU`LxCm`<%W8dbURFooV4;xi z>e4ml+!gVME;q=~sK@;3?S5|=^|oh;VyFn`&vkSeVDebtE^d{!VlN~hio8B ztu-6Ojj7JX{T;Wy?G?GfA`n5Jwc?wjL88bU!mM3si9~%Pq_BR;KD~*$`SGn74)3WV z8sOXyb~%HWl|Mgzgb!yRQTia@oU2X8uJrNhqVgPb|Iy9Ys>L#bol`2&2z`+UEWx(o&Eg!3j**>0l56 zlHzsg5o-LCv=*y;$Q;iG@(vjE)tJ25|Hg58t25{UD=*?_7!9sfNcP}hvl+BA9`GX2 z?8OPUn&kAjWjb%%^cz3=CH^G4-6iBg=}&cq`B@3U-Zai2%k)o2(xJwe+6rR}G?#{X zGyj|YI>XrZ`H@FZz$_Od61@}dg#%(=yiuzlk^;A#!UG@pOR9MF>l=F*4gd#C5@@Xk z0-Ho*{h~N1txL>~_0%_uF30Nc$`3n9ZpKT6dwJ%q%KRX^IA~$G<%iz1Hur-Z)MZgA z+|OLuo~~l#o&try;70xrj+<(mPy4XV@b43Qm~Si*Ig)%NW?BFXC3EKPcM>uC&k$zl zn^LI&<<7qaNGW|04iDw}pMfq={hYM%Y-5mDmmBhNS;-BS*VKg#8IgH%Mra>n3}(q6 z?C~J9uScE1=2Mjg$y73QndJ}Wr}PbY`I$p1_9}$iczGbW?2zBf2lhfVLN(j9E0j?T z??K%0p@(+gxeuinjFyl#bKl}m5_7$vL}0IU6^B7QKdeWNo+^a#-(L75d)03#1@R$*B$U{6wMXl(L7-7`|?iGT0nI%7>k3 zSCDL%e#tk0p5@r0MNyT&Tg8nsu!8GypSy7MXAv%v+xVWI)piI^Qlv2&dW2AE;)FqyY(}$QM2XKhXA{1|q)zhd#Zfxz9SN41SYZ$cobvGZ z7n|%9?6L0$6;$8&T!46S!OC5uKi(`bW*>2+eZE{h&yu|O8gw6C6v*I%M^qq6{KlO6 zP*k%62KlDs;8IYTAN1-XoR}sz$|4-)W2fV!J|MBk(KiR)m#oUSS9{ie@nyA*Lt}sy z>p-a1*GHh7`rI9{j`_m57^&!*rbTSPTwN@opgkP+^5L3yY<%)o7lP9yR4JqKb`|!t zqFO?c@O3>oR4E7+E)j*cT?}~cC=zV^iQbmTTSs=DVs!#A@<-&yVBjcVK3D=E0KI$P|g!}!_Rj6?d$5At9=!eChtdyK3?FU3Lu>6kE zXCtawR!E9K)5W0tTZD7P7c$aCT20PVZG%m7qWB(@PI3eDSHgBvcfiI=!X+PwjY!k^ z+^YuEGQyxy_c|Taqfri_0E*tFW$BiQzyQGx2Xz{=wP&^5#!TWD&3(_d*^2P05rx{G z7{V(DS;R4HxQ47zEEl05p-AIUBJSd;`axln8G7O**C!2ys$;5Ec6p#EmqyNyJ#wxl z>-kGZS|mCf;hK1$qJN36Izk6vRCdf+abC=aO%wX!+qA)qio#t^!?KxF=f1XBw|s*S0r93O%{3e1AO}0&jjG!57VQ6xVzL-TOR*_PvSn?8yiFjgmT<=x5MQ8@t9DnKS}=YNn+AEo&O58Y&`D z>`grl86HKrO&h^Txsxw-YNuGyNcx67W`O&cW}q|R(kb~3eQGEzq)}tE?>&X9@ahy( z^$2UzW{Qd!QvV5i&LBOJUTBU&I4RjsnjDDUtpIh`Xgah*lme|-9{nrageyaF#il}2 zvj&!Q-A3#!GS}SqmiO&y@ywr!Sqv7(m&$OVfPWV!QbiB$(kF%3&8&gGP^)OO^67<= zj6lEuXWk@I(+{~7WHbiTTUk=8gT`9CdL0^6yhW;h6oXFs{ue2p1KO~0p6PM1?6HO- zEE3~FHP9#20f>m{1I|*SE*1MD?Y3#=-lz<$ zeWl(cm0rqr#WIoNc$m3wJ_J$wIrK?DHs_13tX+dN1>pQoykGJOC?7GC;#;DjRFlm- z4)E4eqZ)T_bW@yy81<_=QP&rw?PIJ_7Vg4@o0H!j(y8#^vCJk_%^r_t@FZ-wwls#0 z5PdRK1uWxEDfO|a+5+!{;TG6n`|s3MASp{GC9qP)pPRZrQ}r*T?d_EqZ*7=A)N41cwj zBmrahm1RYf-jF<|?N|P|K;cEX_7~#u-$oZz5sX$X`Rd;gTd?Ux?HQbS_PXlL`DKY5 z=Ex+EBGz{Ka)Tzqi$wix?ktK&qRo0%9cwU=*o`z>h2A1*A8jJHCBR8+vZc^w*2hGi zyvR;u3>w*1mPTKC^@bG7y62d8!kHOi;xHKxaChb486A7E>BW<_v}_d3m%qgC41H$4V9_kB6{oKcXKyfzO|p;Zumi2KE+Cv8zYk?5xg_Ps%`b4&a3= z@A2v(EeLqRhDteSLb_Yl@?C0gw7rmzvgu8PIwC^5@?s&5`c(V^PzB=mwU?dztia0Y zog{Y!Jn_i=MMNDD(&^9@ahk?-)mh>sENnUO+<0sv?r`SpMTr^A9o2@}djaebd7=bc zeItP1MeU@vpyG_mFbg~Y?ba=|*-oCKpatOgBv8ydRJEI22#A|=MR_$xqZ2}wXS`UuY-^1;MtUN-v%g1Un8|{^r=s$g zWk=Ek0oyI_y`g{U9W2n*d%zwA!TB8h2)rnX!Uh#R8OIoDYlxm=V$c;&Y#u*N;ahy$ zbeGtSKzf(2OR~6sqS^*4Q#H7JKy)S9E~?ZkfqcVQi@45R2RzIwLl~^R_j2Y?u*p#- zcsW$rk;)i~41G^=BG#YuRrPtgOJaM>ub1|z%Cqy|Pi57*ny&guRh?+5=C4lfd_S}b zx?Mu0XEx#$j~#z9x7DG7d;_BBE${2mC@&Hi9yXG_;NQoNnl9+=cUDE@xA8*g+(&~d z4u1nDIWeJw3j;#yh_~yQfSfIXX`O(QD?5f}L#Y(`V)-j-Gbp^>mChPrWp>~Xh1vC1 zDHjj{vd8d7j0(S)?7DY{zn8^ zELpljWn=)|Ta#=@xZ2@E2m|daNrJ?QR&6 z=bZGE^OiP{ZPL%xpwtdI67f(i*DQ3C+D%QP2^4H*oBJfIcmm$4v|ioYP-}E0su^jm zw)l!=6Ijk{%EuGWkye7s=Fc>)VHo(`sfG?}gi7>PGO}>S?-dq&u>sI#k3q8)XW8SG zX!yHYgh30pRw|_NdVIEF&jVEkCeJIxXdKu~Piarf#nFLX>>`u!RoHl&oGNdzA1ovF z1~a7+izXru=CQ^dpF+OdoaEb8OcSxp@8UvY@s|)-`=d0d&=k`Zpj1$KF)f-qi-s45 zQE?-T%x0f2K8xOxmsD40V8N4%Ee5S)fmD;5CI8T=~s7X1XCb_<5YxN4sQ~2GpKsW|M*8 z?bhFT!7|$4(0hsBpnDS!1c>ws(t!>!O@w4`af()K#JHkfv~2#?NJ3~x<0;%vT&z24Z-H)EHd80UB^n71t4SFl{89yy5EuCNyTD02WHC z)3nJVS2o3xWI$7;sMcw1v$)h88?btc)I)qW#92(kN4XvYf%5x6-dW0eq-zPcW|RNZ z*=!R-x)*YqS~B*2B2Ie-)*BCwA}H96i3`uD`bsB>#zF|kXy z=ltP{!Xd8~GCqQA1+Fl0#L!f)@2aC8ySSlxbbtq@Ii;S9NU98?s z;+p)!`J}GBe~W3-G+b)7qI5Io!^(KgX*Vv2nH2^gvcY-4)G{{&-+SCJ=GG9iCZ%2d@N zuagPrU5cWybuW{4)O*3Ev!dItU$(u)0%W9UjDM07ikav+PSs9FTabn{d z8CCpA2BzNVEEdlS_VVe4KNJjACx{9J`WvUuYP8nyck*|8=e!|ogKcnCL8QJVU)x(X z86F3se+^gQ?%58|FL76DAENsi<?jGXF!j3ZgA^CHhAX z42M^oOk*4KW~#?SH#wgj2TqqiVtkPpSs$}oDW2+ZE8;;;SmQ(Rch;S_=R!z_N$C%b zV6Q=f6-oY{5x{N`W#3p7x|&z{SMw9=bE)+9FnuSBiMO$Hfq!>U>!95>WI{C-UsPgW zsM2{Mk|@sWV70Oc-EJu}Hp)X7u!JU05|zbp=hqG4yo8?6RhJ?o2NW>SpnGaLf^6mn z<<}t;5_cmTY6!WSp&uYwfKCil3ksuu3Ht5*-Oo#Z9y}wiBQIy1EDuT8ZmmT zze&h=N1--JndZ64!Z!D;;|oo5em*A2E(sAPwKC3SAfDZF@`P8OJ8aOYnO$w96-DKr z#qlB1(@iGW7G$sfmIH}6VwH%6h{f}^)p??)k(w!aB%%2z^e8Lno$;cJSr$3R2}$xF zj9=FwQxvlH!-NK{V5grIw(xF)Qxp@6>5JDIHr6Or8%xI9iqmna6Tm?t-1CQ{cvCvl zuX-JF#O$20Q%Hn1KJe;hLa%Y5sV%YT%>DK&iF;{Uou-l#Z zbhVGyis}ez+O7;I*zXpwK|rct{@E!^EmGzGDseoGeKAG@n*r=eb$~NP*bW`_)tL~P z7rT%@`QlG4gQVwA^_DELN6*5JBX#bpr0^XTrT^@fUbl$cP@a2_dTtomlX>vgS3;WV z*PS!aOB7YNpC=gas!FsCt_qf=o(dwY*=elFmpS#KFshnszB0_HUY2@6ODpCCX!D%8 z$^S^DNmBYW5eo?pd-aytPWc)`q@k$^iJe?5GxK5}`K@?+c1&VbIJFv!F0hh;S81mFaLy zm31rxUR3elF=Lp8T$;8y=xLU5@0qjg{FB@B8q-W;PAfXu+-DE4!%;_Q-piBC%5uJ( z8?bG6vFGP6(50w%Jnp4YnNIS9s!u)gqg09c&LGlg5q7FhV4(YDVWr#@#S02NKW+g<5$tif_= zWU0{K^vja~vS9YD&`0Ogyao8C8jMLkzD{6rqmM>!3|mR^iX$?(gqd_9Pi4kcv>2(; z^exL;UYthQ_|`PqO0$eS;Y>to7rr!@Y$CEFnN1t@p-5C?ZExGej2}T6{OUdB7=xJ) zE>tvVf)V`J%HMX$RCw1PAbITCVBu69xri+W0o8!i4W*D1;*(xIVoN_L5{yoksy|B@obBbV@qMb3JZ`K zp@g+wkrH;|(!26??>hD*6(GeZNNV2wBLL)V(l4-5{YfaN##={mS-@$aPC+j=PN+YN zI0isB8N!y0;mS~Z^MB3d5H{3;m{x(s=*eLKE2D#k+|CFMxMo$rMDCML5O)gwiIXZz z#=Ak#{(y{sl7C=c-bX>4)apZcrRX5&mfy?Q{xasXxD*|;4;D|!Yw?66pq)sqPWqzm4a>DYXRVjj$P0Z=+#UG*&6b6Q*YV&_p3t19vlo=au z>4P}0Y}@GG$`@5Zt|BuuLndg zei!5)xnqgO8t3eTdk`GwRJ9Y>2>F$NrQSq1&(?$7+o9#ytHAb7TZ7>3bA&pQbsvpJ~b0eU+czsY(ydn zWm!|@UyP0u%U1FC00vVkAy&=083X|>75Sg73aAP(DfaUi+GH|l-=3gIq{P=eMMr{d zD*^c#C{L|8J&;L$6R+pWWNk{RTgwUr8-(1O#$oV~=D_U4T8;RucXn28&<+=Z-D=?y z?v(+as;8LYceGsYLw(CkA(vk2PhcUR=p$^xq0K}Iu-Eh3hV@szNX!H3->YK@hKkvb zJ`%TjtbxKH1HG7x)JMNz@ErBxjOE#}wOS}pIRK;69!E+2VD5@dZ*+i;oywG3E4mKs z3$oc(t&)2lFDZAk+1(bC40^pLG~X<-nOc^de4h=);03UrwwoiSV(Zkx(x+jsc#V=bIqC(pHE+4pEU zzVsuqJ@mEvC%9W}BbwZ9tqK1$2sr|UYF)Hx>8?~THsWd8=+;p`zSG!96Tqqo$D<7s zS->dO*{AThF)2l^n9!i+z8K}^(m2ob=-x~S=-I1i4MPg<GtVx$vpO(i;Z`-g~<)^J(4OIVQ!f? zl%ovM-vhMU&xA(LTtgAJa2?kT1cD!r@zpDkWrZp!XUiwFI{waoTSCAbW$ zsgaL=>j!oG(4^kab!J+;QJ0SO@#7NP#Z`@%h4b|Uv+>OZUPe-J*4vS3IjqP&%z^i( zg+%S!LR!i|3ge z81T%MLF0b9aQUl zB?j_%8#zq0Wagj8#7VPHq^VtM)=2=?HkHB!+HhIL^k)jmj6YQP9yD0E$!Cn}8?TdS z=e-i3o(R_gU@GgW6DikMNfH6KoncDbp?8Ue{IIVZlW1|jbKzFa{3@sH2P?YLXE*2K zmCujuH2sgY>LRK~KK8)l^D6Q^V)O41$B2q1UvQE1^}2FzI3SGpfBH8RB6bAM7Wm#d9b% zpOXtAWC_hXP4sb|NawXBD@mfl!96R;I2P(>~RDeeP7kO6aIi$;V~S8XdmFc z<(Eq$!v(L^etJUDSjG;_C_PR`a>gF+Ov+NTSW_PoX@FtsdUVXG3oqm;G%{swPYq-a zSt)iw{6s4p_6|Llh%5Idr^>2dxuZ94G56M}n+e9+K(CTLPJYKDSc!Uc9-7U+>`yqY=-$-t2C_Y!qfOPE=aI}ic0mGZ1~MlJNvupxd&aKXT?e_eJqhj z8}OBYuWu%ovdgQ`$`=~^Z0QM(AQ_*I`a?>CmgP5&`pzY5qn?gddGcGq&xUouBvFmw zcGn~O_<`)%(kQ&a6Y>HfpjUi@b;<=Zdga7%c{Z+j_y}>3-GA4wGxE7bibV{|RI{lH ze4Ha~`$kP2g^gSA$dPtUdbnB7*ocaCPKO+YLopSbT??PwAl13;CFI~qY;<#g!XQG- z%AplOg;*V+a#V@>g*}LzP`n^}fPgxzldqiS`=-M%R`yc>_2`z}Ad?MK{8m02oHF*_ zUmfddS{0D+0j!o?#C41y+aw7C!+Z9&hvap{tR5x6JZD7!?R;q*uG^8#&poia+R~)+ z67>6g{*o(^5Fb>POQ?}fVK>&U!iX~_Xm{n#dD#r}dMU=6pZNuMSZ6Bwk~hBNM!J$p z#D%9U=TPzD-~|KtAY&}(1L*2TF0$=^vG!I`bq2wjHtz23?(XjH?hb(s1lYK{OK^90 zhv4om8xQX85=j1>IkVQRnf1@i#hi<;uj{R@uP?gas)w-Twp1f>bjBCoMg^B!09n4{1!St>nUpvVYSnMo$< z^yrdjve>2}YCRhIvBfFctY88<)Y|H(Pmb8WIr4OQm#rZN3*U|kV1 z_`ZQ6WXH`IvyQBZ^gg`H5ekB1y6a~7UuAg-C)ZUPHLuR0q~*JDRwu*KK!W0#A4;#GFaF_8ZKfv$*XwS{f3qXx}g*! z=gLmhdW*&YlNr92!E@vy?;XuB8c%esz4K`{L|OG-+;=|*Meb=*M6(WSY;-6%xU?$A zXL`y=1K}!g>VAPr^ZhokJ(1krUWDL0uN-Q64%}NFFhb)$yP|ar$v07eSbuQGa}avL z;jx5c`l3Jg=rY@BE#BoKqHX=*D$dIxBfzXFz?^w@jzlr_L@wQ6OX2X4<}2>;^!5*? zOR!xGSvg1VrW_j80#%8rxypZo;fE(34+fH*ykX`!aKYRLke`S5Jpoyq$}owLFep&? zs0wL*(W^oDw>vPa9k-G3_07cqZlj@RP2rV#XX#|k7u05n_3b)_s1H-{L|K~OZuJ9m2GcLl07cHQn8q89jcn%K!qGf8gP|)8;SLpGd*X zGa-Xryi1KUPr~3}D^D6lBQw)7(U`>Skl>K*oD!ByN8GTkq(rHAdO{TFE5#toMZq?R ztP^WTBX~5^7|}@XkX~c6rb^ZFV?uVb`NeQvIfJs0%%I*9=JE$H9z~`GVrW~yAqtN! z9!;z;HRuKefL6r4+>&~_5F|p{;JbOS(HamM;A0VL-DL1y#8dPkbrqJuh#|g&ap6PV^u)<0 z8oRQ#3PjMVgt1YkcEOzxU5297*w!?*RB)8R1O*|VMMhqVyM#{)32|@PgCQlg>?4$W zb)5OdYDBO0q3JxQ0T2Ns54Mqs4k#>C|UZ>rM4M%Dl@Zvdqw;xEf>n5=%E~jxg?t{fGBQA)nI5KGD=tOf+ zbQpFap;&DR9T6J&b^%`F4xk+aFhiVJUzEaYMjG$#oG|UwNcsbx!?{~kM97ClQVQOy0E?(w~VACGm>bMRW! z+NCuhTGBn|bshMWxX7cAla)r3F2;R9e!}`8yza{yI`_yZ5j9o?MQC1MOKWZrtFzeT zyrFfnQybgBm*9>`T-hG}X)@sdrkgco|BV)1HR-1S(zGfG;-#yA^)%wM@A7{h-iPpHoOgj0)_eS_|w*ADmSTE7Dz0M5OO@EEY`^0&tU%>zSP zj3*^Pvkf#QmotKXrMf9JrbIc%uydF;6DiIGc0vqh_|Md%H{U5|Hf9`T+>Ga6Y1+5**I zD58saN0R5qH7&Ps?m~r+bM_||^1$YNfJ5wYo|4ff^v~z=Z?B8uJh};LBhL{Q{_$$D z+#Xt-QyGw(;k!@+VXi(})Ga2p*ER>R_AZ)3S6h)V_yp}0)2 z(Y%1kA-zT*8LbcYU~+%&v8*LPaVRh@QxQ%|ef{<$cZve09$v$HX&+S;~-SW6G&c1J0gjF_t1QV#c^X$_|SSsg6G6p9Gd@$JNJPxKO=k**ynLxb|ONO3X3 zvZoL)lASc>oF9;Bndi(MWi1c}2el!IK5$_QgKdS*ei+X}Ty_#{S?hNS{f7S9pL16$aJMlC8K!Pqi zk$-PEvEKWk{967Vtswr5-Y-rja$l#Kg=-E`Ej)nf+U5p;Z^)5JjKZW?v~SUfl>^f$ zQZA^LwrZ8+;4aYjAPO&8!MMmoCr&6vxY58H#0D9YJTa&I%EEL@=v+|$=E6gdI+Cx! zQamkYsc1@<68OKjn>neF1j~*7*^#1+Q1%VKswdkn)Mz?1BT|glfPlsV82)Ef-zQT7nA{&QhT-pVEbh1{oV13tLO1 z4vOx~#r zxL^4x<~yLLp4OnGstezaT^6z0s8Kq4X8xG&n=Qd;GKBhla%rb#`4m`c!> zs%T8teh=S~e1t?kQy>}~{pntW5jk9#!VN??*!>AuVZm*R(WLEWp4OVYj#4x~&Fixg3xb2tr!xFTm^7#0T zhKiTC#E$~sNbKLZwhSsiN+uHR$zbv>V1-VPJo;-84?lA)&zsO~CF)sS#S;9@2Cb4$YKB*>QuY#h?=EI*e%IrG&XHqt9WxY2u zn2t!W>yQQZoqCbLpuDt{4qvJvA{cDBaIw3pFjjf7y#$g^+KwcnmIr_S^)CfvG_n5n(-dp?85RqhcTzl9VO~QFukzXl-u2-y@WdZ*r!)HdhdyEhEIqgpx+xoW zKri?t_zip%#t0X9cTT&u!PX|=l>U=(>0E{63F<@N5BJQ;1^M^buUV|!LAmiK@CM>? zNfZ{F1Ec5*EFmR=6-zTPVQczd4Bz*a#jF`?*tD~;#eIlE;yh!>NMZP%6&c3D@qU5f z2x+0Z){=OygU4R0VK#&y_C;OadmVo+ zG>0I`#k4wkvy3jjv-*VAN?V(;5mtbR;F8GtYZARBXqRVvc8KF18;jRk1aJjI#1D5} z^*@YJ$)EbQixwOo>d~?0b84GG4=FWBw@Rz5FJLi3GAXiG;(`n9#wT(L!fwy)8E0;J zLKrc8o%K=jdT(|CvN_*%3x-Vb#Jw)EX}w_eKK;xJ~?>lE=%YnFv180i)iOt zLtJGA)Or-riNUVPiDms$A~$LriqBlX-WW=hql7ZC16L6A`Sxds9w+48YBdd!*7*R2v&g9aNi-MGJ5{+1kPvJ($- zM`+};4B5SKm`TPTknK$3Q({?nLeUj$3Tq$roBDfJ%Bra)B?;Fll| zU4UwZ@+#In72w86taTOT5jZ@Lvc`zEOd``>XH_8mqr?I?>mCCgt7{ zv5}c2=Dj}5@yJ^7Y#JTIa8jt35Nu__Qzn3$_Lp2ho4FxveZLL~XHULG^MGp&?797% zJy;KO>age3_ifrHqMuiA?X!J<6oVXr3itX2)UvZC8y-tv-3PIErZKD`d>^W#xZV${ zdpNja(r&5hT6j50&Y>zbf^_hTJb4hcP%TzJnZW)%*2&3xd*Z}>%dsjN`>jwAx1eJF zAc)TK29KF5W-3ep#U5ELIcUuG4IX3cNgSSxu-_|d@uiIJTPmRDPg0S7|904xLG>#} z%6kvySz(VnKB0XTB)oNM|HMEj=VIoZVTp5=^?5hKDRLV+4Kf0GRdK!h8%Dnog7GH= z6{&xDaVC;hisNv&6^gRlWJacQc|(Em*jwz3pkBkEpYs^?dpw;=#E(rZx0=Y)YRGR& z;&oyzv&umU)Wj7F8g+`+D30tgD8}Ts$`DW;GnJn@mx_V)u~`vM?HzeqF3=DA8U)W| z4k!0^v-P9OZRnAn_6H3DFL3X_>#`o~;(Z=+H%zUjv2Ho^N^UV&sIKh-#->4RzA;lLDuqewxZBl@m zwZ%K4rrt7%JAC7mhB@9qsnD&G*Mj<&C&7O9gHV$; z3PO^v#yz?n|9osRpp^G0a(26mb->E-!yfj5nxATLt?P}OdS@`*#J9@yz2)YlGvy@y z6TCQKD7fgS#*pTihq&xWfwc=|y1&NH_V;baKyBy)^K>~YO4eMXxXr#KOseRU>+zgL zV8q$an$59I#!VmyLp2o^m1`NtspP@@gDt+z_w=Y0ySD+TKYUKe2(kp4Z1HlPT$eu3 zI;5J~NQjlz_3x|0zA#uIxYb=k!!>|?pX0##vuyGpf#XoegM3o_CdUV&1b%t$4Qu+t zh>JBf?b?$q_9a~827(m-C;<@em5^%^@#Vvu3>D9V*w@SLSV0Eu?%9c#K`)ouFrtOJ zEG%F^ucj(5TzBY25*a}()H^DXYwb6(W2o$TiE04R+(IyK<iR+YW|XXhy7%OB21ozP0bg;WMC>f=)6beU8Rh8_=5v)cuSz%mun}2vCkT4vA+3QDDZQwqFdD zF~A_k>5Gdyc4ivy=_RmI-LTf;&g28f$_0BO^-({tgZf%zVWg_YVJ|<~1^lPxUoj3k zEHgdrM_GdiZ3sZW7Z{I!pH+6i+MnH#bn}W1L0Tyu0s_X{KYwZl&DnJHA8|XhJ!RI> zs&1w8+RCeLjUcaS9Bgm@O+kUJ^;o$oCu>nW zwdII^a8U`G;~RdO$pY`7LQE*mw3wo&^*@(^ZSrO+jEFqQ0tpKkq1(skK$=S<+7Fh0 ztF&vm`4~sBJfIDY2C3b1k^`1#6%>;o9=r#@NgcV!IIVnu(s!kervl$D>{|12Nnm(1 znA3K!qhWM`#Sn|3ksf>t`1UDE?`NDo_(yZ4?G1M>(|6^f?>OtcnJTz%S{k*PY zEP-aLBloXbB1smmOOAoiYdNC`Aj_(EHS<>tvo@dzGQLrV8P7y`KH_Bl|ls9xQQ<{DF4e5JC0MsZU@MWDDtDr3UitI1O5uAx-gd#6B{i@2V z!}EHFbU$b@!(X#XqaUeK^%-hQi|^t!*X4C;(%F&njC=e-~c6()BzT{ zzIP2=l6RSXF(*w1J-LH6mGx9DaZdKjKLU=HQ zw2tx97YF(sB9qo@DlymyPTh2Ak;ck&%=OPmAzv>y%H}g)^&)iSv7Trj<~R+c&;v{R z=Mw{_OeN_bRA`&C+9wx+zo9zvR{X5TE9MQ0Yh@p?Z?8$wseE!9#rKcthq!_4Azln& zha@zqdD7ys)Ty1P7OwVPq_o#5ub_JyoE-@64?$+Q;9dKT6_iGr;34|Bz{S$sLusiW zi+@wNpdZOOQ%opO%GsRWXnj)nnKl1CA}-q5uG|V+@p`H zgG?$7>>~+AANaP5;3Efkh9~~9#)8{@Jyi$@KaD}ujQNhS7Osz6-$yWU9Oa{s+3+FN zaw?)3mDdz%$%d_k4R@InZe5hd;W>&dasAAq=7}?~4!WEM^oCh0Wp>C^O=v9j8$wE? z8#EbB*w1@dxCHdZZCdGMI`Tz&J$orV!e0ER-N~2eHvjSr?2&i9)t%}!Ea)4PY8Ig~ z{cX5BA{?v7id{UN=Dw5WO`7rtqO5BvXpIvX-D;t%(zj-nNyXf(*4}n=r2UqR^U_RP zJBS$u!%IsMR za_a|2)Wmh(49}b0%^+PYNSxGE9Iz1YzfmU$Qq)=kHOuMb+ya-#cPFhlqfT6C5NTK` zTb5K&Be2MhyEHq7c=31^kox0bD432n~n^ROyB3@Aa*Dh8%&~J)S+k`j4b?OJR zA@8j>l^BskukYck!W{`7Zx*FGZa@c4U2e{RD_?F7XnN&=t0wWB^}48GK)5VcY*RbL zacbD=u<47dv792RhY=!Gw1o*|;OdnOZq}C2@gbEPak$-oEqtYxUF2T^GtySl-H8xKImT*@dYWyTAy7SQW9<>E>ma+p z0FXK7Zo&Gh8pXKmMw5S9@p-SOCk(J9hJbx^Q4Tq+srKJMkxTmpLbD5O(kO%d#4G)6 zN#_aft#baI+357aYt@a>qBTYn}mSHvM1`Pwh5q=@2$X_igrE*gg*_US;4~*RLyWsN7b#H;u^o zU#EqwyC7-3F#&rH#=Z~FA4Afuem~{|=aQwrUNXjSfqnpe6e12av3@Rk-S{bUYmYUe zYAa}Vh~!3EtIl8CR?8i9*bk#+siHEADut!$0~)iPg#ecKYnX@P<0AvbLd;3z4ydsu z3=my%6%(DQbxd`YUPT1Tx=3XqU$GU$*QYHa>bubC(qBBo_`p|FBS{@y46;oi7K z?^8@yg`#hjQrW{W5xTr{h_zMB)g@0L`35+1vuO zk-%`I1GSEE&;M5P4B=eG(~<4Y+#8fAt<^lools;o_>4OQR1t{7TLzFJn}$;LT&!R}*uMJF|$awFY!Ems#!1w+HzeLAh}Ul|a1t(OpqB zx{XAANvybBG>pFPxuhA^lM9~LXW<7N!UuQf>~GrFQ;KnzP$f_8UV`x`6wT})69%>7 zRtkNQtBQ=JinWuPRKOl&CS>(%aD!CZz@%SVaX-|y0hkdqv?MO1lHWn6jv{i*u(?k< z4)rVlCuYTu3Sj~37+g^f>LlMO`_xI9mi%9}n$1w{w@0}mh{a&JAMe~S0hy6YMS2~Z zOFU%z%(23B^!d5vlMoD~{!t~9n5la(N~zOg0OVu~LUv;?8bj%dz#Ge;CqjYib8eZ9 z&;;*lp3KM%=mpLg#<>JcPOV<@P-)p+(nL~VSvEVWq{Ao?L-w@ zuqIhs`d4D8uw2$QR3}c8J(~KHhR$_?(!GT!0b)3hjsvNV?0Z?;$Usmr3c1^luRHbT z@j7-pcf>StPX<5Kj2X3|zv$LE@j>PV{!~9SJknwf3o4Enriz)Ewl2n#Cd?uacp6YT`EOA~Gf#i31cix6t5yjP zyzcLJAp;=?33T1@XQNE5sq#%P>`&y=rbKk;4@eRfSVKx%H&b%d1^oAlq~l%d3$J1< z9V-&EF@jTfNaq7UAW(yRp{FQXp5%>(-VsFmr1D_$HrgM$uc!duTI^lhMqUp^u`E3-5Bq#z#{h?d?xg`d5_#2MP8b@jA z*DoKD0tBRAY{Y-aHsc;DA+qD|&VjL@CxY;)LfK$8aDXB2GlK>B9p{xkss}R}%U7CG zlPp6$>9rcXfs_}x+0X6i?{x2iMNyZ@Ea8Ddx!ZG-*aOkSbScixkRCz2@%lM}$|dK2obgRZqkD z0p|V(^qrA7joXW-F_8#TNeRpnt7rUSq;WfM;gnD_k7MRID%F6ss^tM}Jw-inLSPa@ zqy>D;;o$M>kOUm_H-`A*ft6~{OlHaT>aFgunl0*?4}Y@J)|cEl^rjLa48f4C~`jSqug zBsP!wXDcrL1+z#-2PTNP^iaA#`U%j+c7@kX>uZy+yZS(L#U{Uw*?;EePt`1;-$A=V z;YGap7fdHEgkDj-J35+eS8->JJK!bjsG8VV%r7R)h!a&W(*NGu-A|*h)nN&Fn}K1Y ze;W8Qqy2%eu$EwUP(4>oCp&cpQ^23kG!e-l9smOt$cGWTP$T#`KsFI*$+@^6S8M%h zKPwbi9@^aTzUC{RmNFhh(dHI}J`HcAlHD?E36_LnR=2Ba4d?q44cq ziu04ok+)Ten)YB))!~`gS;%nw!Ox|&4^o=?7N;5&o2CKpjSHsFT=YPWC`Sniadh1(NKgTltgoaUBMVVFVlSATow%Hh4j{Uz zP+NGAIfD+UNuXU(_-ukR7el6E`*#gcZ=ydm;cm=Is!0!k5Np-nIFb7%#atOhr&EW0 zeLLh_-h~Xns1_UO`#wQ1UBNM0V)WA^%@NU;iWVBRGIXnv?{L}+@(zAgh3=Oka64MD z?RR$@v6}@4B$vK8UJ?>r3^xBEB3+LSI&lPHc@D>ZU0FLLBijNTTC9R94yQ`N30~dg z=O>Q;S{o`%btYu3XUNGZuew${>rdSj{p~cCJU8?&0F^`AyJbn2X{@WDbhV@xX-ePoLs3bL5+);N z3!s1d%1|?EhzxXx{<&53VXluBmdhLq&wk~S-DVVw*Bl@w=@pUSRfu)O+L`bys5E>G z+`3^gm3mwGDElQhBngyr&T>4LHO!O2AfE3%qa zQ|qW38h~_{k+3L-2s_F3fkrZFS{4b+Rw`rp*;NO?Gbn}4<( z>Lye3XfN2UO(|(?l}ZAK_o5w&`zcliNbSudOED1;>#{JdYSv`HrX{Eq4uBTN?Hfek z$WDpgSuoP13;m9S+8nhx(?Mq)WtnlUEM51btu7f)PfD#$79ITU^{lb+W|X-w%HK|m z_D(^NAphEdjrF`R6Em=vSft?;Y*V+3C9qO(qDqVB3L2u}LWTKn+8Wb?=Ffw?(&%2| zan_qd@c=5~W6~;+mX7|&2i>vG9JUEZe***k|HpQ;jP9J(8&E z9l^y0B~jv%=u0Sl+k;6Knnv6Ax0snsQE^HI;htOsLFdfd!wJ&i0pMGiPnm^#s zp&-`<8ivE~@D1T9K1?p%Gkg4rB6kmdz~WQ#Z)4KBMP3MmJVj~?y*eoqEuR|XU^OA3 z1v4m^No+j*cq&}4)4acnJGfk^WNje}ELKEM)W-^G98?p>($ES_Cs*|}t}u{&;+j5- zQK7psvH6BV4a#!38svxx+|$^@5R`$z3eZEIVn$GVG+AF~GjLdxMY-9I-42#r@0o|3 zUuib+C#+jR5?ZZ4UISkf>svgEG6Yn>q)VhUx3R+_E`-RwnmOa zn%eTgBhEnBzS3j3iT1q1zORH%7nCoNO8TA+e}JUhRrTH>Sm}3UP~mUt)j2zkE>Rc- zBkUco{JOc`-f7F#?mnyemKYIBUn@B=MJWL56z^oXZGw{M|Br>wuW)>GS300RqLmF!PJEOxEGkTs2r=YZ1s z!1E`&@~W=owv{Nv-oe`xHu|wU@-tOlbq{htDWcGao-O6QIa&wV`%lopIIEoL7ERaV zpi`|?$E;{g3RPUKje10N>nP->#TtS<1zQ1Rcfn8<-PwS7aOr2M?mn(wW96ietn7gc z0SK7F^PMeXlkL`(#)O?1vD}J(C++Lw-qPOs=TJ+1p%Azt-#A*gyTGS3O+Ilv^i}iw z%Du*tvL|(R9d5+Hn9F;qfm-rFu=?z8Mk}chMS?nzq4xbOJ3^par!0kkJaMbjt@;R; z=6>J}6{M}{B9kTqDiH_yy|TvKM;nZoNtVo`bhnE6Sjc=beCi4fJ0UimS~z$f6R%-0 zSJs$~bJ;<-qnOj6sGmb`DyD3Q^ZJRWp@`Hf3Sni-uM*JGZY2nKHno2}z!Y?t8|e8V zm-;pRZCLuV00BED(4vPUe#435MsAmc`Hwz8fLwKx8V=H2>f4{P8Rdc`FtA13KAkB@ zF%5y700`}T4aXfZls`rD++t0tg?403Phu+ECDIc0Jk{3J+VrZZMSR3!#tt+%j#(UP z2Z74=1Kue{V<}lr5ARJv48y273}%N@FZAQ7k=oy?!$=70ijA@NBQxYnLbn-%l+Q@c z%JbB2$7$`v<*;YjSNz+9Ke7Fy?~Q@DMY5v|5bH_K(j=m(E}fMMQoO{L#81!46#1|}SG zFL%R?;ammu_b58Fo=NVCp@G)iKcWoY4c8X~sibNP7cpFQwuiS;AE0H_p!JeIGwA)t z4!y-{N1vdz9O?*KLG79l^eakr6Z;DK=oyRVoY;9HUsg^q<*Y*)cxlE5iS;f-Vs7^s z|Jp!O@7-9U)BgUur~6ZK!K_)UsBfL5{}_}`C%usa+qMI~LpbX}1)ZXD+EA-W@5Yub z+n7BiXBy$9cMazGrHo3ET>nQoF4Goetw*RtOk2-N;)`M!zzGLP^ceLHJlugd-7ac7o92ixn=vFM6*h-AR)UFQz%hYB4xN&CX0#Z@?NM?HZfGCq9rs?ihGT#*o5&&3r^h-+4 zl_S_K_7#>AWq~eyGl|@}Uxp-a z<`Ug!(p*HJWGoG4P?54U>&1|{pd;>Nyf=TzZE6UH>@=Q3&(uxD zv(nXEJ+ybUhB)QRPLkH^ID)6LRB54C4uitJXsdZ*fL37Aq`aDxS28P$;!hemsCUJ9 zCid>jUFNEoNSZHWN?C^5XwLbIaar>%BSI5?`BHnE;CatI0-RV_zUqN!M+VnC{pJ** zzc+HuZa#8ZE(OvD`<|Jsd0Khv^Pa6(jZGLfMoy))p)02$OX{=wZ2#4bpzkYtOY-4zpXK zG(Ll&t84s7i$Zr^MUX~I&+&54kP2e$>;@1oPK0uQu>~ za#Xk`wG^;g>6CtZ3B_*Bn!#x#vP~%qqAgV0IeP%hJ|;8Q9Y)Cx(u1VP5h93n4VC(b+!KU#TL{gVItJgu?))FXbx24*^9Rk8R6V~fCqBXXwzocpZ183=P z^c)M@iIsu4BEu+{xAsV@_2&n;Fnee+yK)5^^+ie6I2-xip;&BsDDHUZdXZ!fG-kO2$4QG8pd8~EJ+7z6uk^t!|fH`3;M+=`6Sjef&1?JIm zjgvEVCrCVH{d&6O38%(~)&aJBL?YAuAk zJ_B_AsWqBlm}eJA=vH5lwfqZ9c5D98iq#gAu)0aNFFfiS{64BdH&meBb0$1IAE+34 z_=X-&XSgBq2RC!PhsWH51kAn^4`ce@162Cz#FQ@H4$ZP94}H84 zdB-+m9B-{hQ3?9d<++qL`zUtm*s!k4MUx3y-j zU^LgHId&rlP67V3AriJISA|!eTrzH=IP9sZ2lrb9vGV}r5Bpf5h#AxZwK5j3ylVt< zmzdO!fJ^$-EyTw=OFLHtb(< z1?dG|uM&S`v3Puz$BS&cYLv7`wFPCh!t2CpM}N)X-6{}wspUUr066rF=|!kHe#AO5Sj^8EX@E(+Y^L9>-KT+-mwkcNCiMuJBo%WH z)24Fe3A#25X#PPuO|C+_q*e)q6urR@8&sQW{h~KOQtzoifCs<-?i(^E9YD6Joo+zS zrXgNt7q;*A)ItLn1YpRM^$OKgeR01^oQ%2%6_;vJ{X>3IQ*S&?bza7Uy*qRa%Rk z77_+Ln9X?%ERUN$t^C!|5%Y4Jh4C0S#_dMv2jmiO#)}wMdGf$jmp7qY^>4dh+9Gm~ zrM0HJP!|N*?4Bfbv!x_BiBj+=iWQ1#=O1Hn%RGC}LKY=R1}g+*Rxxts@VDaU+y=fU zJ{Se$v#PRG%knO=lb}#jwpyv9{Om9(PKF|E{G)F-%`SYD>-zp~kG9PI9ay-RNc^y* zW*j1}N^D_7sKR%3grj7S=4*xVO&u8>Arg`NW#Qq>iro;YE2h zZZd&Nx4<^ny}pCVx?2hOtxC%7w*43OKk&x?$;|x^zUz`9IGfEsUhcPl-+uw$)$6~( ztN%BA*Z<(sQ(ImPLBUqeMkRY-`;;H`zI>hbmsq8)c1dL5Kl93s4F0> z@&8DILpu1$TZ=PLVp1Y1MM?9JwZQ>&!~$I8ZA^Tc3t6hS$@+?3T_5kitPB2E zRF#v5)&Di?Xa7CxR~e?MhFAZH%oAWx745Es-PD4QjWgB${+9a8jKlv~CVP85z6bwH zz`i>cmvaAt)%&jr`2Sue7ynbhIR?p@|C^v81&0IS?DoxqLJ-jY{p0_^hW?TF+N1NI zZ0P?{=v7Kr;CBB3oQD7d6a9Y$HM@QLKV{td$GpO@7~0sf#Rso806OvItW0r@7wlA{kVyshhP?Nt9CFT3g++CXjf4#sQc z^}7yAUAtfTkNK}Xe|t`QPJN+ZGxy(JcGtdsimaVNd~WZjuo~2&cC4-jwu^Xfl+4yO za0Rp%wH+Lf>0X$c>=?c3e!g@F0{Kx_r8P?%oH{>qoB>HGe_esI90q$-8B`9bRF2Io z(gpT%n(^g8w+QP5_j<>C|GpA0f=F~BW%uGgIQDL?Oq6yz;5lsT(-$5Z-jdSwr#*LE z9^#ig$YunyF@}aV8720j+t}#4A3K$q;$}$yCaP&3leyAzD+~S*gm3`rY(kpun-~Sn zpQg0!G-{m!LYoO`^7bLLqtsmLg)!(YNKIC4@LlAa{o!IB@zZC6h2U6T+TMd-aV6RU}Z`v z=M7^%UzMQ?kf|uFRlVcIm*_6RT4HyR6M@NEBg_$}p4xvCh>7YEp*TXWkS@fHX|RJ6 zG#8ESa{SKkp_w3gNi_VSz9~hjn1e1v&vai`%KQrLI~viuE~{HcsdXqCAF%GekBQcG z=K>eHWzTb}vVgU12~6nMb+*gjCleTJif=6nn%s#k{?KO-rEFKFfrUXf`pH zn{P^Tf5G9Nx~EtOM?5a5q!ev*pz2%~@`06UvyhtMJ!#CLC1`wyQx+c5qBh+8a$jt8 z>fPZUy0mK)%+qWy~57 zOOH&^6)JRs;5nzilkD@Cs9|^Ljb$&$baT?{=YuQolv1gSZ0LecNLwg~vPKR_ zdOwk%M4gNita^8xSAkeBL*IPuMy$Y{W79 ziC&bi^WATzDJ_!`S3nXhe~_q?eHLq7R=;rPA0N54>zoinnVyneV>kDGhiLmfN`-Vo zq*4M#`N}lAK$kk3IRc+sD1L?A<=A5(?$GBN;hLJjkh5X%Q@XPj#rYj=lqkTN_sLPv zj#WjyyOB+MC3aY-J;wExYU;y0{c-cN-Keh1#83@OBb(8l2jw6=iU{i?)au(-aIesf zcy&zYN z+jf_+%eHOX#xC1k++~}mGc%cFCMW0O`8_aM>nnx2kd1ywDQ$E@=3e4Z+-Xfj=;##Zyd>dGvVJ z07O8R>781dhY8e#$sBr|k`8IyJFEKwbU{-nyt!NIQ~pYW%-xiP6MEL3Wh&piQ-X%h z2AMWBTub2OpkzrS;>_Y;X`WE%#q4s9;O*EUn_J~PU;TQQ6%Bj4g`>)$XqJbF2B<5F z`0UuMS8mlw=kT_1lP|njpL>}2dXUlL5g$!wrXT&9bO;|!6_m1$ucil7wFqU<#&1l8l4}m)Aq_Krtd1!7s@Y7dNefXPzC(;LQdZTEdP} z*0rnXv?m?v3>seT0k0FfN51YOo|7Cf#Sr%}M8t1Fhco>;E`k@KG|!yb1WhVeNU!0@ zOo#0W>jcDB_A1CYSsKxfJx4Bvw|G34$v7;j2BiA~sTg#A~UDO#}54y$BQoj+<&#UsvV`Byg%gy52yBXTR39L}ILL^k0E zJwj2Q=PlY!ZMzw$-bBeoKBHt7+ZKLq3sA<69cwErVCl0q`T9|vjcd-P*#bM5sKXEI zLPT-mZeR3-aw#NQ$E+x;lYq(%m~=U8yGUyzl`zxP-}AvSKjC#5lrvx4oD19L?x_>r zjf-L#Ce9<#t1agSfnDY))@a`}oN-q`ZO(G(fNMLXo=F$0|s9Rw+y)hNIy7Ad4#v1@)Q2s1i2cNI48%-n+#J!cId+YZTqvvxd?~k zq%JNgsVADu+z(HNvu#qMEpwbc|GJkl&;1#lB%+*B-Ypn2;{DC!n91geRlzxYOKA8P zBg)>H5TIfg7 zAtevkBV?g-Zg0c9b}C2M42!I8FQh_><4P{3*hnoOXROu?EhYnTnS6fd8zf0^v(^T{ z@Pp)0`4N6%w-c77Ij>2v6}$y<1Sw0X1lT#AubnMoNi%*b?&JuzLclBLq>yL1174-j z7Po>J*}h`#eyHU;-5Ej;n-xB+q>DKg(mJ#xk)lyz*XWDOwpgE0Y2$zbNUS<^*uZii z5H6Z(&A`5=M88Od;>wW`V_=`xGWJtPYA1~&|Et;MpSfIYlC-uKnre0Oq|#J?EA0sL z=7$wTwdS3B0b&%A--fHr7fLJYk&MZbs%Sn7?RugvWcS%II*!23IDdn9piYvVC;N0{ zQ%G!*J4d@+bWPMPYe-8g?N{Z2RN|9`d?%kjfmsEQ6XyTV?-2pcSb(dDESz|0y51Yt zdXqiUyzE!2`+%baSI9nYVLS1vBYw@>f5|*J9FyV2a^(!;^UxCZ9I`r;?MV-T{OvQ0 z>1HFm2oAPTn0{RLgke?@m_8xUUL36erDRze{cWqt_CG&aDX$d&;<0=X-vqvPiyO%d z8#$xH6DcsgAMQ6=ZT#+#k+y-O=XXPT3_UX0hwLaT&RnxwVvkk2H}XVK@|uV404SkD zW9W+MouUEBf`8$V|AlSI4y_{hpx9^ADcBXape%-C=G~O*&`lIaS>OGsqk|X=h&7R| zT2x&c)8&Sfm}>*1h2Lhe1lQLfS{UUOq5>PA61T$ib&KywtQ&Mi5?agsVOHZA2u*}~ za|RrC6mf8bzl;)A<4}z;$TnF;8r>z)uJ@yPqqCCRGOX!`&UQV+TS-$rexU0Z;tp;b zx)v}NKs!H2XgUr3E>lWhEjGsC;M^q_MZ9;hJ7G4l67L+Wd0h?)51fTZwg-igXuPy!+?5>Iq%(tI6@&$K9cOSBm z=ort~^Jnd1g(BM|@-E)3;vP+<=e*Qh;at#L0zcfW^~^_RTtT>n1k_cO46T)Z<-Fh} zr|i5@?h*gt)^~De_hxtdj+R;Vw9872&`Ws;C9_`_!|r#kNtmGFP>dk&n~$^+o`2dgc;$Z8PbiBNJQte3%hJ`7Z0A?xiY9&!p&m-z}t zFZin(P?VHB~0HkJ&J534sL_y5z! zS@}U0H_zYIga4F`(Io6tT%JTqjhhkUOA(@t8vmtx5qpMS6iyWq{;BRvEqx;n#&`G?& z{Aat6Pt)`|<;dvJmegeeebmG?nZ+nr$K%)PgcsJ^ewAX+GPebz14~=FdkOtm#?-&Q zvB_g@19}a+jX^l=UZH0R#fFik(E2ZS>NXen(?JYt*PM9w#;t0c!arIZQS$S;8~gOQ zMLfnoO1J38nLG#&52E*U;vc;Z6ahY(QgKX_aB1`E^E6n4^ks5+77^$#k8DYUCNxV5 zNn5bztY8-NOamO5`*;~8@Ht**==Ae;SxC@q;lpSYb~|HV65`JfRlESM)va~7d%Kao`)ir!FicpS~;NY$IMpiC~8Ij>0`?M{u7+St+ zE>fOyG8DYx^uVrlmz!+C0|H9jB)?W?8DyD+^t^ZN9x@M_*u8w( zfgBX|YyQ%NpMOO!QF4yu?=eoCp~kNw)Wob|pA~tJyMo}|V<8qj3M3F{COPlbIEL-P zfluTzIrdR3^vCYj*Ng?(7(>|x%c#ILzAb(>x8!VeLcj)*`3Q#SxkRz`Dx+^Ib;mdp ztk|w0cZBj$rY0fVw&`F)^A(8m%vP;~TTjLk>Q-=nroY)$`A}bkRq^T$ov^@(+0J&F z2?G9>v^;}ENAJ{p9P%0ArRNc!f7=eBK%$*NWClA_%wWh06LtmY49^SpP0+slapSZx zrmzGb-p;Wl>UGepql)nNkE*HTEbauR`gsNWD0x-I zS?_^8^%eNeuBirY>evXxAe41Lhjc+v>ZNm41-n1@P-yg@Hu#7F?M@omOszYq`6^~6WXPm^bIJD_6c+L7H(*gu#K zV`*70RETH%T&_G`)f6eLeOJ;adnc;fj%Ij?4cMurjkTBST*CM^t zLxVmK@r!q^>gZyBS&=w0%vd#a$@_B+CYLCs^t?w+)6TNh5tm?n8!g<1TGB!G{sPHv z;h&Mj!mhY8(!y4k#^x^pa(9XWH<4oS3I0M+$#F;}`;gyjfueg?GL1lRQfdv?zQ22` zhbq~J0YuukM(l$+wmewA5Q#c-CY(8>!zHJoJzIo^;MeT?ji|ANSv1~DgQ%jOZb`gU z>-ZfQ7}}_i*5G^sr6}kHm8z|WFK9DP!vGgrTJD*7_8Lpq zWq4Ln?!vNvSsOP(yrFq`xIaO~N*s0XEm^*aTz7YVj?fQUSZ$lkQEO7AZ+_}T&egs6 zv0ke6U-Z2F{sxQeQCmmw8iV`&B>n*tf%ql%q01l!`a?fg@RM?nijE_+ml-W%m|+d? zv^4n_OZ-$L1@Yu8vjn=FQ`warUPxVKnhYULnUS*bs#5tBAA{b(hR1)sOs7Om!@tY) zL+!zD^H+UQOVo@mxvqRUh`Wu-C_50s3s-3D&;#4c>J5`y^9y`onVd=63|C_RRhQAH~XoIZu3h0=^cE;`fIK+-O3yTWoiFwCdfNtmHtp#t^#B(ZE$}xMLj#l zWE&khoFuwFDU+{x?n@v`Q0lJAoVl;iwZbn;gsKS)SAcdXbUPghRoo%UZmq}-MLw-K zdBc=4#eGqOFs|%7+9^K^ryH0ARx232a7&R0MHooXdrrtTCa5+-XUZ#>DpS#OLb$d@ zT6nypSDOj{mB3667rEIUd8KX!7pW>`;(Bn^lA<+F=i7w=%6zdp#K_QVq+g|2a|Qjs z;0nXEDqWZCQ7ld=cXtQrw0GRBAWNUJgW(3ISwn(-_c|?=VwaVb6#J(oGYZ*+6)GrdwKHucLXde$X04-<>#10 z?r6`C*{;GrD={Cfo40#VHH889nbhCT;IN>=aFodRBjEGH&FO5U`}JB`O!|- zInyUAB*|o`Mp^9H0Y-%@ONvEn3G*wROKf3Cjw!8ig;FfiedxMsG3hti+qb+CQ3HL& z%DE*X7xo;St>X$KeIl(pM19a{aa$)rVjMZhTQaUrV7VzR$8wfTY0*upptlU%UKp3q zu4+vJ3<%8fiL?DL25+|ia$+M|=k)a~*Bp~o+%tC-Cfw}M-{Unv+mkG_>%`aSwQlHb`7OkfV#f?Q&@+w}l;>9< zSTwYcIAt5~hDNQjOZUCcF*sT{T3kKtt=zIS9L*AE4aU znf~=p@ToxfD0$Z9=5`+Ht*=5B%g3yqMMf$k52=E}(S3-aU+{^wanf`UmDhhx6h02; z7Af$D?7XxcjWhI$A|`Y2Yc~HvH>bVOm|5Jwyn2w7tyKyDV{(eN{V-PcE3?WS(bZ4K z_M9Lf6@mDnQz$)D_DV0@m9u-CRAu>Rv4gB)V{=}rc%x($!x;Psp9HG5?b0o%lz-U) zJjKMUshfbdeC{*$l&SZEsuuf#LV`>)t|=s}LcVUC0lziU*ah?}7WR392C0x~+;FP4 zej<;7#yDpmCAIcYw@^0*m)K{@%EuMWMSeWAnAUdRDAC+39{eWed@43z(9tSmus2Q2 zWX*AZb-MgV(7?W`@NxZSm)J|#JdJDFA>fdiYr34M5OX1EAZS1G7k#7>MU`i6VBzR z$74rC)P7bOw_yEYYJ4BgpYrq3zEBjWoRu;F1x5uakP@gjvU5{+cT>%S!Ma)nZk>sm zVfB53o>58tW!lT#udg`|5JWdeHo^g1uwo){8!|ate$j5H)E9~!=rW3iYdnC|9wq+x zj|C8+XZ#Dd)+r0AahV`cZo-PevZ61xa=Ng>6;^)81R$R{qQgUit{14Xyp7!2cw%qN z$UNk9<-GK+=o@S9)xK3rYI;CWGOVK65}Nm$TarhE5kTLK-@#{$2f_aS<2h-Wn`}%m zVNr}r?29?C*EVSLbWKbuMmP9?5r+IVGf8n~zqs6WQz7#0_-!!r1}O$Md=Y~du>2Um zdd3yu8QY=MJN~L=6sMD^_n*h_>>)5%w~d=$y47Z(kv7kC^9$y=f@?Z6(ho!rUz0AZ zL%}Tw18$gL*mL)EA;WSm0ZO)gtTC~qTc`}tPHhDp~o1!7v;mICl; zJkcU$mICZCgbruK>g}ZlDdzKLLVzGWEOXc$m3b+3RnuR*tx&fPd7Df6U}ZNE%Fn{Q2J&uce`jcc;(=ofj0vkeCXt4 zSr?AE>6lq5M&0R|EFhQ{+2$$pYLWj*4#B$aaV2dt4w_l2=!6z$=LkRJbSU2ZHYf2+ z)S%9qr|we7jLXzc()fS=&0YsvCiShAu@f7E?s{Ey=9(v)u>;oD8$p8~`>|VF>RF%p zl-GNMGZ5_hzHmMrsQjZLZupZz+%f8H0n+T*Mr734eutU{ZJ4z^b0Z`=d)*>2D^Z3u zbbW}WZv@RWT~sHos_6FU5Zjcl9X22SewYu_8h-^UxniVOKHo@5PFlmab(A28Z3oOlK*7D*4&auc={DA ziuSNaDhg!VtpV=ey?V~N$6?G!QB>0NaK6G^{nVQLW2`DiEtkg$C+5Upsi+Y=t;Bup zi+kFVE2ZbzHnqn&jv-N#-pE)*jlOLIo#+zClU(FENM1>Wh(1SANv=W8k7e~rRueR8 zs~#eRol;^oP@ycXc?hKXhpk<aS-e&pW@~QcoKxpXy3y4jEl_f-WO# z_^cYf;_X2H34C54gU3A4)~2W_A^SX8#a|OA$dXuU^&f7>L7INk=u2^lwN$La(nZUL z%NZ!Y6|o2o?MZOrx5Y*pDy3F9LR*Y_+UAf5N8BU#hW#%7TeuQ0_Gf8TS&Uz+Tc2&Y zW%jU-_GxCSPN z#+)-27+u{HglPL+={$w8<^)P=`~G;i&*>ZdUVQ&9bCB{t3qOLDvI>kj!xa3!{@%_S z>fze@e_tPY+)xvi7fd`&YHn(+);ndHq^0z1+ZhCKnX;8k5`Mhf6|L#ALdz9*I=`nX zxMMQx%b55i;44RmMKaxRV%E>X$<^Gri>g$U&UDRGr)QLx)A3m=MAvWZBHH^eW%dGfxje%5xJMiZZXQ^kq${b(N)6E%YYbgw~U~583xe`6@BL^^a|| zWW`lVD_e11(Jg*kvRZIYE6-gV?qQ>ba|T<7byV4+Bl7Sub>~+?2A@$E&A(Jj=^X(l z-S0JHGG+|Jn96HsCY_Emb5@IWHOTWWneRSz*TpFe#!CnesCbsg8Z|~pGES{itqL;R z0h{B5%&(SgcUTGorqSs!RWf8I`)k05;S4{@&8d`q|8!n@|R{ zS?Sw5Q3FY8hL;7$ui;!;RV%xkk!$ZxRyaXCQR7xwE9tK8ZXUMim&b&n`>09v@4j%5 zhuvkl3^yT%6RQ_dW!4zlo|QR7 z%xFhpGyU4V5%2kDoAyqOMbC2(PtVEyHG$5{XJK4vlF96+-N$xYo4;AL1@r5k2TcIC z6%74Z(h{77JnJA5e5PvJ;)oKnyVIXWxlv$O{NHdEjhfjv5=1HZW2|M_uZwPln92dl zIvB(Zqug2?LPp4#meTj&7Ci0TV3q{rTDYc>;j{_%6=!nOtYzRumsNc3sr0vL>4)gc zqD;?dZ+IIo_LOdxkAW5MJtpDp+HOGi?RKx&^<&K|CBKO+@U7gr;a2pZ>xfUw2?3?9 zx@u*Z1E`KltjIzFTXZ$tDel=myVLXY=UPV6kJ$N>JrIC7rv-0Hk_443R@ED9?Qzs~ z^hX>^*3a{9hO+7OKiDF{7N<@i@Yjp{kqcTDapq-L)ZOvZZUZ%uN=;UCUE9XqB(6h( zmC=%v0(V`*27uvU)7Qd14&#|?WUsU!KD3U&aY7_t0Et2AI2rgv1{%v7 zM!HLt-#IO!z~J^-;$Vl!{#Rv(jY{_Q;^OgTb50b`Let6#VGVHBQuYhQ4-w&rO^Bsx|Mx3SalxkFbt^l?7v2a+&a8KpGVL^7}s$b0oAh3D%V3*mP+{T;H2v8*7&4{Q{Ee$eY($H z0BWjfNBIvg00E=-?kGNQaZVSr-aM`WV*sMaTvV}z{eH!@;p3*{LPpeKrI-t!!I=KZ z;>Aw$jSwwY3`0atb3dJmvT4F>;}ZHdcQTCUTsL)TE8~AC1i{`QjvUvP6d7736uiUSlx%hR0}SLR*@pOFg7}glye;t z#Pbg`@}v?F_O*};M-a&(?KM|r_eT20!tXFg&QnBGPd=z|mO=hvdk_Sdb)l;bxKu?i zznr5w@oV3D;(O&X1i~-BWRIxr=Y#tOo^Q*)=%n8hBngEC)CJhU{YG>=i_QEdpng7fYzhQr%0z;>U7S(EiZ_UmQe7;3K4txKYRY^Z|Rna2=&grJU8 zRNaTmNI;z0dD^s%50pWHMwR}r-x_rb&2+3E_zS1cl0K-Y&5SSfD*})3&3ylgY2gBX zR!hE-W$s9fdxa6}F^XA*|G{sMK$QIApq<1W>)XTVjcsFckbmFZAA8})<`-QcrNR*R zyIjyi3hb^k#;NeP9Zfrha;$sB>B&Uzidcm)sba}q9stTj@ZWo2{XLZVLPww0`_L2Z zgY0V_tnNnnVA0Jrk$m?Rv3^<*h?vsslKlrSu#D#&$Zsp4wZ)RG-=7`K6t*DZ9D&@4 zYxJAy{lL-WsM=WTJYCJ!jk@EYU~Q%9zVN09wHO&32>!*(w8k8NE|hFj>jq845I)3!ty>%yM3u_bNq&x zRofbE$X*S`XPjj?*FiS>MFB=Z2H{E zrkP2LO3a=}!T~#j_=nE--#cVe^i*HfpSYfv7B7h1L{zOKwO;pO4UXXJPM1Fwth)AO zX(lE$9PZtsc8{aX=r4x$zlRq=FXs!Oyc&ZiEuYJeC;kW^LK?3}s=m*s+5M&~!E#C6 zo9XG^jN~LEw!u4?X+kqCXq`iJ6wWvxyh*Zh@@zzA_Ydh>OfggSszCb#i88B=@n}F0 zHIIK(qa~*5Y}>7Bpo&i(%fz{U-o*+!yvl{RL*W9lU%^?BY%IZOH14%vw1)PihbsmMRVe^q1lf=x)~uGJO5v&;Mvf z>3R5TcS}r$DGpzDlG0Wn1^WScUVS~DI!mvhH}Ckp016Mh05=7`UGD`36enL~?*7e- ze+27YeBXJlU^|U*w;YbHw0>4J!vHDgV0WxpOkL5>=hH~c zMw^}CMTS|gV?w5paqqN&kGK~IyopgL0%;5b6hJb?)~)BG52tK_>6C-wOsISteDMWR zsh+3PJnHqHNH;45%cGCv8;@gwwy~#oy3Agk)d4F`8ol<5Pb~c@ok4yz)b1F%xchMN zCB`un7DSzP$^+D{c#_mLUJ$+Cy3V0L8=X||&z{u|=t5oI?6;S#le5s>BiH9IzA~X> zcWLdL%mRP^(=+9HUu>D^itnI{Zrd8a?Y!ImyF&^5Q$UW*87sD~!3tYRwNd~eph$v0 z$QoOe>!>f-fOx8>Rhj>n*A9aeyv3VOLeYA-MdB)ug|jLkuFBDm-xoU2i>|Ao^6eJH zQFP`}*bofstumhOl%0H??_NTFy&O$7-)t(Ed)$_8BI|}l=<&Eu+oDloD?;BU*wH6( zt}yb(_3Vd^Xcm)DaB1gxDNc5D8|Jm*z)ZFoYxJmfCwHLp3pn_3NJNmEq`fwa`r+}? zGutP`V9s93zb-P$^_cex1`|Wk@HfLFJqWZrY5^EMxi^~LKqOXU7^lY``V<%oN&i#f zzYR`3#wgDq5dS7O44I#GuW5;e$eVh`YG*$9Vj;Uof9QugvR#~!!`fNiG%L;I7JoDnpiT5vuf^?6WOi}ZFsAkbA1W4n4aFKkP~)qoEN~SQw48bQ zvT@5pg-QI&X|F*L=8M|WS#>+JIE#5LnPSHZvt`bXH!Rb|W7U5ExXa z%O!X@9k6#QM+%}K)VDt4+jd{1YI;zY>x9RnnSX-&@|}d1cvQ%!?!HArMu+W8CzuvH z7prcuoQun(tkedp47H5J0mv?H)=iZ>xrCaoI6T+fu0vJM zCN;F{s#5AXU@8vtl6+nw*=Ji)%a-D{izH(sV#Zs}-orNZ|w1O7;NcszGONVgPquEJf zcjj*ZX#u)Q;!!bcWwI0thYd7YR@L;mGHXRA+&OcTQmeV8NoeZtnv0u>4K}MpBcU0v z-&0d0GiVE5i!67(;;Y#&zUpBWzr7A!XMMeT zgiJ`~K(3467!c0-JS3?DY;nI~@-;l9oug zfUB&6p+y^yUgKV{9F3+HRV6cxQMMwj`zu#Mx>Bkv!$GTiGP&3bn7EHUMn@j zXaT?rW-^niG|J`IYHvah?)ElIEtW}Ykvay#75(=02TDmPJe?A#roucgpw$!;dzDCZ z)eE7vS!1k$$jYdC9O?ARXB4#Vv=*kPWMq&hyk!!JH;WaRnw))@CW=cQ#H4HpshOso z1ojizCZwFb7J}HU0O*Mi+DVJa!-I09K$_5i*g{lqt*lxmirJW%lHVhpjO9k#=2=y0 znx9$!q;gT(^4~hihiuHu$|3ShFNsN3xguGLCQ;eVL~@nE+1ZKw6eCpFt}e%%i%q8R z7&Aco3dc{-AqZhtDH zp@pAL*BeH*K$tMvQ~H67F>@XE+KVXmp`-bnYKdQ{JYuP)-B|Q|l(u!fWH)Mo$Iz$0 zq}*wMl>r6@8>*2{oXI4F$QR6V#w`eqN&_2%#x zJ;<=)?`Z{~$L5jwo8P&i!~k+zZ16uVW%$K6Z(Ws)?m%hZD3~|K@8AeUSyP{Y&4#?X zju*`Q)#iJQ)Dn7JiT;;;5r8!5ZqO(b#gyeopr_tFzUVi2nzk3(X+ny@$n5x=jkYgd z?oqn!frQtY!$QnMcs=NoQOGuY=i#z+6I?%8Lg53rflh)c6flwN@#^7c>}; zQqLK5ZvX0O0j99a3?l*z?envv4JC86%W4VK7dC?%=`?Gax}z%wEhGIK)NrFRKPf6G z)gMo*@Pdxo?Gs}Jd&>NVw0iw_^|?GL&G~J1-T*(eTi`cUGH-%ygFQt>ZTa{!zT-XB z=9S@2q7+)Xxl(S^jP6qWo_q1FpQ%zxYOdW3$Mk%MNq7M}|2IDCA>Uc{!3TuhFQ2B> zW9hpszqOw_`bo{$84ir)wLfu4rx(+X{qO??2D_NOP7{sxoxkyURnOlddLzSQ7*<(* zp{m4hJC>h&0>fc@hX9@CCEP++q0@TJd$5$`+jvsQxM_-j-FoQz8u(d(M%HWajH5cz zXZA#wzA$IMo%k0z=8Z;wn9sV~0#z&tRub)8s!!&ghlKQRib;)VkdaKEoV7~K2S${~ z;D?wt#>t?J3G-`MdG z5Fs}7g8hZSg&E3nz}CjmbphDtbNX+#*fiw1kl7{J0Y(SHL-#un80s*0usq2yvGm}$ z^7C$#-r06QojZCTiY`tSsG!H}AU4_U*42J6y+rH@;iWX)1TS-D%QuJJ2LPKOS6ER{0b4Iuw?oL8TTp0EBL&8 z9PJ$8f@u(PRTV*Jz#rlI;YVwl64@ zMs=2W=_ExVL4FPyF(15E&M1461VA0XA?=5_9#DGBs#uEpNjKQItw z2is1BmTyF=-zTXBn!jr_>+CvRUUgI>W{RsEoS6xqf zw5T3V6o?yhsderU{NYx+ewlqFVyIG)f8*&KyWsj&L5bebc(PgjqGBg|m+*8Oz2M4i&{8k9L_~Wzw=x(*rw!q{+)e5|J@fXlK#!JW?&g-R!wQp`7-d_ zfhwcpVIb`~An}sh%-QqA95lO~_I_t<;{0kOMMz}6Oyco~8SfSEyc6E|_S2W2+u@XC z^?t`2|W?+8g!BHke8OMs~oZ3Y3-EJl*?Wav$B@ z?qO~BjvM@5b{|NaV!Op>GCA#dseSB*<_(fSYLVW#B|J=t8~*c0%pPs@0DbZL@hkw7 zKEUx&t{dYWY2zI?ng)hBOo_QqH%lw28iR|cZDKF^5jU$TPDGF6_Z4mluIAHvz<|a{ zOUVqDU*491Xgi_{h^ER_!jnuuc+xyMPO#}A+}LQm;U+CTKkm0$Rx0i8H^p#(D9)IR zy2}U)3uoNL14S?RHX*X_B_xvjM_ALflUOhi(ddKSUs!6?(6iBxY}5Q8ki(3??-n`6@D6jodQd2eHRiK)7Bt^>9$${s7#y zNi4^$KzwPODS>EIB*f)s4UKPwRwpmp)!(~?@uo4egeQdPxLG-wSN@->E1c|6I5jiJ`Nzx84n*j56Bj-LPF znhDC;6?Ydz^)osE<(**6GO%Rjeek6|0OFtZ4qvtO%<_iQoW9jDeB;x zwtj#K1)qM0d@Fc4AbfwP5Ai$co?&Jc508>+h>D0UvspoAVQ)qC0sWMD;W(KWgk34TVe8&g)`2S6u{onRnO|6@cdECPy2emB80vk zwdZ$0@o0NIK|cHSvGPJBt1%}*7lto;p8Xe->-8PET+Vi7K8`9oA3r<(?oE;d^60PC z#i4!a71mqm5KkECQr!1OK+y@f^T)0IG)0!ZE=G6=yDPSimZ$o2q*ukIU%eSIV zwtN1S@E`zu!e$i5*qh0!>#T5!SbbIzUP-v0jR>JkDd?qR_#9m)a;7OlcZ~#y#S0Lc z@J`8IVKAS&c$AXWSsO5ReqY%OIi=`e9D(CVBaqwrIYl9Ii++{*E4xcsr*;24Xzn#c z<@l+RcwmB)Ca_JkPuWSS>n)*czK7J?^*Dq)PeUlaqtuzbc3)5pcuVmOe+C=D19_>I zl@B$(FY@8Wx~OQhV&6%vIoE6*wy0(Az2^@5D{)UQQex!;jH_5 zl9RG-5U6+MQqt7Lw_-=Z@lnKb$(eM!L6jM%3ecp@uX7sK9zf0o5La5vMl3&S{n4>p0y6|AQmULj*5)5d8Wq#3jh9T z&Y&mymoA(S+n>{We$(QXSX>D3S|LOOog#48PMJwK#vgXG)TiRBKq_vBI(aE(6D3u} zu^Yu3kCDZ>L@2czVJfiq((MuLRt>j|cqN!`OWBXwFEpQ|xX&TsC>%tq&Xt_FJB8&G zSe|=r{Pr`x-boBdz-dz~>B5Az@Tj5j&+JSFTclf(oui+hJ@rE?tBFWu+6;7lRZ)0^#S$N&(zKS{q}2E2F)H@Qg-lx$ehl68>fJihFQdmf zmW_N~u-_nV!;BqhOA{YK*Dc@s3SH*Bl&#OfJ0fdpy5O(~DCv!{8wXm!L5~#wClV06Hbb&4LZX8NIT46xVA3# zsg5zO{-?N8>X-#7C+Q7Qj^Y!4X-yIqHZ`pjzD#T{j!r8L=Z;#$$Za@sL%`AM_AzuW zvhQA3>nQ%$M3=xFbj*GD@pQ6=$6cQfI^lj8m2XO4O8XsGvMr|HT^awdho!2106>Cp zN3%0&3?&k!QTi@7t zCoJUo+6xBUy{O-R^=Z{7EK)rH-NICX^M-&%O%K1+Mgx46C z3PCyU=6i@_s{@fENPRX#u;`w;M_fj1r|v^#yrm1c&*`~r6Z;z1W;xE#aZ0Kg&FmDn z&JYaqg^r<$cZ{(f_miL2zF%^Ilaz5yZ0c?S91`7)vmPS%?8M?muPi@zhtXC0QOh~1j%+n`-q$vqb{-*~ACa5@(Cq!s=YUum#5CVB1W z{hj!EFLf(BEe7x@7r!s*l<3mdC{7r5Dr?Hd_0+pn7JtqXu}^Ib4kJj8p@>(6pEj#H zr)umo9G3@2m*o_Wy8k-=#PEClFR>0V2mnU31Zu7P3d1;uUP73fH23sY!LpvpyFj3 zMH%e3^!*&d;!kDYa{IM?7b#?^*SA#EA(>QogCGo@HG8SLC710oP20kW7kj_;56HkP z#PAAAuw88=%>LDM$AH9G5#{@%8|N71$) zII|=yqv~DLSC#wpNSaQZ>>|h515)+@4vSj!M@_0G zJ+d0@#ch}9305%$8hFA)3H)P68^}yUAAUmAd^2v1vI4}dhSKs_`~+z*woh`-elzZ! z2!4A)a>fR+1epah@QbdB?G~u!YqTsLca##E#nCXA@Xpkj^K`Z#&lwX-du(E|fN1VG z>H{`q{XOl5<#cCr;*N$?k@myG+U>7ll7~ zL`Uq?oGW=P)J7UDXVB@m&^}DCEEfAvv=EJA3yd#-36^(Ymv_XaD`rN>78M$`-)4aV z@{foyE?Ch?+Jo9^0^>u|_sHyZ>`OYVuawUPjGy&k8{roLHNKppKZD-DSaS@SO4D)? zu54mfxZ`T_HQ2*+*bA3Zy$Xh3_f0`Rdqc*?n3zR5O+&?SX-JfYE`)$S>ql4L+r#DH zV&3|lz<>W=^JCdOs{9}M@k8$4^MBI(a5VDzUzs2O7u0n6{|Gfm@dmny8EN`>`pH^} z+Gz2b6aN!h2UFIqd*J`(bp-$YdE@^7cpWv_W=SN-A3u0y{}0dM;PStk<5Gvm9#sj= zjo0btLj=B}xbPAYv69l9lCmgrR7g;A2s4ULzR7H;(J?~^p6xG#T@Ay1Dy&V4Al$*cjz)UN%sH4}<&ePA$HVDwxbui%H_P=>} znaCMJ)adK;>(<`hxcI!Y)TY%4kK3igSanF+!_mn_U zNex%R6E?$-Loz#^ zN4kU<63!qvVl!PZbF~i5@`tFr^UZ-|%4mqbl(QY|oUrGNU&!8KIP{0R@SYm=Sp7yL8QZcQ<|7L3rK=%(hn)Hjak zVNP%F%V3JPx;H1)n8}n8GzFGx0DbWd-cFJCA?uVo^+N_V!YS`X`4U;0`B+d=CB6>X zZ75?yCL){Ddd+RC&7>N!MBqyqZf38neyc&Ic+F!4lb@ml4}^LvTKnCKL0sf&o<~O} zp$v-G45|Cq-~oPFbqx%K-$HOyjsi79dJl|ehmQ*;PNcFoC8iL>FBQhygX53Q^WBRU z`vPm3?kR_8A!Fb13C+^xhFXTaI=WuP&&zQNsVK^P+h+tQJ39c!$Z0W)P`K+&;3A2P z89ea>KVOt}rNd8{fzlFnS9TjR7LfuAcTy6d1_phhhJ}?!@>Kk=lg6($O|lpo7si(^T*qABcOWPN#v2!OQ|=jZSvN9i5Z4E1lYG*ca! z{bqxVVi0Yc#iI}DV__NB?gKZh*mt-DO!%Dvf5R2z{pJgoo0z;huPfG~VoFK4cY+pv z%`p5V9toljgbJR1&=d2Oq^Z~MBtg0?H9crB6aXjNwYZ|clcuu_Y?~ZI?92f5Rf8E& zZi*H#Q`;MV=Hja@W_LO%gvLoBe(A=zPA!*u0A$7>Ap~LKt{Fj-BM6q^!0Dz3o+}Vdd>){23H1hfR=s{7D77)egzqd z@%{IyXBh^y6$;Z8e+>R8@>{cZ{O3O$gn7G7nb;lgKl1aCinMEXf1Fy}W8)mdG2 zyNLqqk_)0X^%7d=I4b`_6P4G*s!~ApPb6{@xU~^+MN$8dNp}%qi)y7Gjw9Nw)3yX5W3${NA+vxbFYfeR22x)XjMn;Q#4- z{U50hd8oi_z{tLR!_xSl9hBT`{@e6#3+YfX1j~t8-IraNWPKCgu;E#MNn7`&bFq=q zsG3;#c-W7#cF^gxk7I~b$p0oa4}AbE*Tt|w@*HKjPW^kpA+B>2ssYlrcdXGSmqUVe zc}%xoZ3G;C9DRJAQ~|mIp64^hw%-~Q%8eTTc6>iesucQsz!G{cSWN!WW}mBf->3t$4A}1nV-m0rF3-D!A?Go1v#?$oG&r$x0za5`6pGQ?RXcqPeFb*U7Jc~mh@|d@5Ome9w zCa`}0gK1Q-uNrE!(TDgL;ie0DpepG3m>A)C z*o`I9=oVc{zyB#|qYsuy*!;sJ{`v>2|AQ^^r1L%s2))o{|C}l-+^WfSmB~(>#xd>t zz8Ltq9B4<8igrmTyp)OmoN%Om@t1QkC{T~nSv)wB?kbb>CmRjm6iZ-uoS?np_+*#_ z++2pjk2^N!iF^+yuZ*%)yN+I%%*!bjt@rL|XI6`PYNKH<9c*43y$5vg$>f@R7sM`C z7TD`#xozi{eH&Fu^s8KrGg6z8@{b9yjnoL5>rI6% zl%(^niQEo#c z>SeB8tDL&RwTZt^a|Y;JRTA??Lr|?Z(Kf_LPmZM;iBCSG^#&oTGCGPQ?W$-G6fp($ zk|XQgIVK2mz*OCiW_`?R%)vVHC_vl?RoWqm0CCkg z>OehjuzIv|tUUq)phd!Q{@rU%S6-UesKAVL9)gqrEtL`dBxS@cR(e>xNbO&Y`484D zU^|l)=cQydpq*ohJzSlGxW4CyCf{>wE`N43&3pcJws7Z&ER2dPoY?!zY9+!`>9V_C2ObbeAt zsEor(=tSyjl8>1#)Fu3b!~elanE&v<|G~2V@|0tHuzq^F0jU+zvUPe&b^1!MWR|Tz z8ya})Kp)7TCM_WJ%+Jf?(B%DTK@d63u~i*adFV)6YO3p^X%t zjqUBE+Po|LBz&zxiFJ8+#+4mx9vR@n+&}Jo|vx+zzc8zqorT zWBBv+QMNz8|5+maOa8y8`*}US{ZZ8Td|j?2*d+M(Z7Dn8MkalFK+1m2_tbymusmAO zo?%|_bMB*I(yDtzeCVs2W_i~8inl+`_v8_7P^_v%uGRALG z`p7?~owM0Y`p1J&LMb4o;&bwtx-1-|j5^1oQ3edl#H&#jD@>(iaPU~UY#juSO2o?@ zJ}uG8#-2~tag0~dLiSV$ zx@qTAVriiPUBVQvUE z*mHCR3esUQhs1s&x`A$Bh4N`X7P=F<)H~5T7TOY8)mzY;7rx&K8Qy$8%q!A3zTg#U z%QvQ2)=rqV4Z=sHV%F0i>e1IcSJO9Wo7b+G{vH&L5Wx&$2-Svf#J0TQ>1UACj%@I> zc3#{|8~h%Dhsni2sOQ_u5V4NwL4T>cQNLnVyKI_0=pO;5IjGXxy(@>Pi(kiW=RB}i zGRP9Kk4Z(ZsGCvu96pa}ML(s>U&n3bytNlNC=nrtnatonj1lpMiA0~IS5c#FQ#)qb zIEWk3s2QT%J3X%VfwZJk(Y#_@J8K$0Xc@tPsmIWyW7qu5bY8P!Q#)?jto~!tI-P~n z%zkLq*Zu}Zi9So0zuwc~rf;uskS?N$^TKXz-7yc^Uj{1u4`-$w@48#<31EJ*s9gam zJ&UtDFL}~DorY7*erR33Ce@m4)G}?-EnSJG`NV#HxTsn|DE)&I%nou5z7EIk&en2e zq5;R0ZNV~S(kUH6)XS1Ed z`bKT5t?SZZ#-vv|9Oq|`M7kVjvVGMGf2|kbZsss{QXu_}vm5wxk}bWQbJ?zHeWRw; z=1!Y-z;YUoD)3Pi{By)G-H-Fl9&rV~fydl+}O1c^6=YVrM8Yh*#;%Y_{4_S0| zzg-v)qAS4%-yQxTAH9?Iam|Ry?|##;BTgne!?h8(Cav4e+)Oc_FhoQG0$v_B_v5{p z#ZqG0=l0vAZIg^X>o68XGlC(WNRN~q=2?@te&H|?7fF+ve&n!R#D4rxzDOtR6Xr3K zx_~E@kdx7Vkf~X_x<e{@C|u8T&&OLG*GYR)(fNMjFKY|lkpk(^qpHyohm^`cl{3D z7ruw#A#xEo@NBrZ>^m**X7&4pLEReD@w<5*Tx(~!CSNiN=zF)_yWkwmDf+8hU(NLv z=F3qfPca&hLokZzL$=?$(uL!D)82dF?6A&Q4GJ2UPZP`1dsr5>=Z`k?`twN1ju)sqPr`3#a; z={*g#H#{oK$AqI?h2AD1MZaN2bD;B@URg-NP=P6`h+4L>jCR|c4ScfgD#`%-0 z4YFGJ6Yr@zoV-qMXMnNeX318J(+2MC=gzBVn)p1vJ>(&NjBf^rE%-@SFurLk|s zq@|A|`HYcE-=S^8rS;-=tgv#}H2Q^*3?0*x{zm(_b|k-33M%>!VJ1JXkJ}A4%~h#D zo0r?u@$T$=ezH`nlw-6Yp1$I%0A1oS=KO3^gN*z z|CD#e9ru-UZ#lQiGvho4qqqKT%enL5e%UZb^cf)+KcN?Y(wqA8+J4b6L-e{TD4~Zy zmM8y%=b^JOAxuaRVHiK65B?4Jsq@5s%P>rIJfU1?IBA$mhy~%efTm}~y*99R%`9qI zH~Kr_8L(7yWv6!5EPmKB`l~k;X!5dqTG_2t%kr{GAZ!Zygqhej`NM zlIFQjoHUZ%C~j9m%Koy}_8s%{e!IUpIh$8%63?C$H1i#Ljl4@ezU$jB9HxtI5`4#4 zduez&*_od$j#skEW)w8=8+wz!#>ruxuxgox&W;s~Coj^cIP!yhg}1$hK(}IxU~ABUtZuI3g3$DtPC6_(p%N zb6h`SRXdHET_`yI_v?lUV8_3D9=n^>)A~uP_G!fIOhGzd%`pXIDl+| zVN%~iALGo>8JqW%{J8zG1>y(#3V9EwOEWshcv1V{u} z5;#4AHfD{`j&M)!4 z(G&HWeMd*x`Culd5q28Pa3mg|mDOB*^YhO*XOS` zt`$v=sUo2h)eNnG)gh>(G_S1asIVVF4ap>F7v&7LgI`6j<1v>TS&FeI@exg;m4{0u zWF<4V9I1#|C2bE)uT|zEM_(U&* zHxOE}T!aopM*3rpNSMeKV41`WL(bt=iCl;ej7M@~`bne;z{!8Z4vYQ?%Ye4Qn`O-N z6CeIXN1`QG74p@>cn3B}ttc*PX<8!}F;XPaVqJ({sJH5vdb_FoTPRdwiXj<@JZLVQ z2l^u~F>trkBX1QkawN%xX5wz)$M~}pdA8!szbFlMT#gH8J&V$ zD`#RlQ1ELMMUAvbQa&PukcG_Da-gDQZzLC`9m7&;;&%!j3zMnF!09gz6kD<-iHhhI zGDl`IPD#7)6rx&UQyYjZ^NlZ%q6p3C5bq6hTV$Z# zxTf2n2vzFPhL5>?FtLA`#sRbEQ}X99(Ere7D07!O$?i7v;jG4fr6_0L?dXFDp@LN^ zJT;fo^vt~$-Ws9hS2Z1f4rmA#q8w-oxaa@d(P5!RIIag_^8>aTeXXoP&8B)zqcD9; zA;fY+wWZQYYPYHbT(zZoMyD`hOd>=McCxj?Nn*FGkK;rTmJ7Y8L|!Ach}Gb|Yvb_K zO6VEO{?6XAWI{c)aL!=ur>s(D8~ zOBPfoZqYo(nRsz~4$#+Z&2!NJ3OOqP+r|*QA~hUJH#A%G>!d8I#{?6*jl0J{5`$>N z)Dg?^Rd~u=#Q z1K@mVCBIh7EPKd53Jez!iUZLGf2pO!S%x=s2qUT-H=3Oj&|){YQc){mRzAcTb%D#r z?hZJgYbo4y*4zIxWDw1 z1{1Ccwh5*wf=dNlLz_yQNkNg>kXeLTg;|DKhgpJILpjkV@FWl?Fg1`OurSaf5EzIN z7!^p*38M+C38M+O1ib{y3D*Su2Tl>Xj)!K2WQAshVufLatcARVnwy-Pp7v{&BaA2o zE(I|RZUu1!eguI9?gQ}#{sytgP0UTrO~K8;O$N-JH?sOMC7jvIyLeJLFI8NvD3nkDe3-AI+7o+XqCyxYw$hv7=96_h)v%rcuG1` zH*yZAl1G5@-v-maDf<^R{VWuDN+jqoc`~{u@SM8(FDU?ia{6(H3SN)^l zDVa<)-n;LZ$Mg$21!~!YymD?yr}XnWRrR7)g;TVdi@dh?UvoLgibb8`dPA$ADalL) z-t_NH$99W6mGOI#Q#zThyhnwKZdJ$ni>*bj2E~;kEEn*+xNfYTxp%5ZiBo1Y-kDUq zO54&X1ztsW`lmfP5h6Z8h>!*d4yPM)t(9th1exu;Pi~iI{4-u9chXjsy}UucAq(YM zQ|y=rjmvu3gH9mX;4N_{SX1nkhwy`1AvfXM@a~z9ZDz)EHgcLpi1PP-?S%QJcR{|x zzv96&V_P$i7}xZA2O&VFBW#)c>QxRhf$T?+#ExXgwqRZ}j_lPA^1!iT-ZBpCl?+mV z97oW^u3*=;s97taCiRTwfV_i0#G_}{wyqg5uI)t)Duf(|_aE>Jf`arwxWv)tY9WaZ z>ZJ%8htz`q0MkOK!pY#^wsP9q3G9^!(t~V9xWxWvl0G38Bn7Dl--K(&w&GCJZ(Q8V z7_}0Fbcb)LYVYf>#vm|P6LOH73*3-y9`o%7GqJ4P3{?Cd%1rr>@F_|I`L;ASwiBzlZSx8ETvR%teV$NnRixnNVG~gMr6+qoBogxiuk~smd{G> zVD_@P9V@6DlxnEAcAV|q3msL6*Q0D!xJ=pLXtkOPf1^Z_CrsmyQ)PZxJNP}S7C%X; zH$lm#ezeb7d3t2E1^0Kt$A7xo5DFr5y#o<;-S3u{+WT?#jQIQhSLGnp=ic|2t%v+& zkA zFHTwBF_7UiL1^9{7$z&nGOBB*17xuJO_0|tO|U@U^-?R2O0Jd^4{nGGE`v-VQb=Bs z0j?3313)#W5OGI6AQx#A0w?lCQ&+MqqhjNTOT`_PcVLZApIDH$1W>YhjmmFvv2ol2 z4uMOPziBPBO4oWuUGhs@G#hm_&X&f;M-85qAI_Fams>2|-6uCTEKM8|;?>BQszstg z5l)i?XDf7JG%LY7-C5Q6QcL3}7E0s61D;PEyJ1c#dA`SIw8QSAT9-lz#jXYq9@>I`Ls7E(GI zv`~wIr-`=!OQ85-`5}WtVtRb0Uc01XWknhfM}4_Qb*x&k>_tV}cNg2Eg^yLYes8)L z1V@VQ-O2;5j>NwhQE?fPGK%?M$+_d^xkqjADMaEnxP@TiG@=3$zyq{D(j5aSVHD%R zi)8}m$$M?$cGzW{(#W|LP&0~P6z9Q%oh40_W*_ zZIX7_<($$exF=CF4q+7Uz>B>C=lOeWJ`kqXRT9Vi8%r#K=M@_r6~uhw<5QLDt+I=! zs~WUN>In6=f5sp~x6uVc zh`vv~gav~C35ltB5BL-~%5wd@6PkVoxaEyJ?F)3Hq{Zcxdgqmz=9Plxl@{8R(Oc8r za3nUR>6R9OYO4KNO5jb?ZD|tQRSapzpcQEFgMFgouTV~v;>&f?*wqOc(_NDYNk-o) zRmM?j*W0xSd6K1Xs*^j4g|ddzJO8MkSzY60p+id1?O<%7Xc;Wjsz#rH%ml3R>Zw_g zom8|>(VozYE-P5_i(j$r$G*bW_ktJJ83MgDvb6rofrDwiBXEp1HsG~fq1-x@*ZCki z5%P)wVap7+BZ}P<%i0OX-l>WU;AhE~%D}unZ5OgiTmRiMIc0NDo~Oa2hCc|1LqBco z1v$>9oz;5_pJeraR8c6|`4r8S)K7Sa;6zk$rIcbs+G{FzkI5%`?SHmd#gE}4vEITN z6m4ukjyj?KVg^ZNy*_AStnq`B##m{97=S&W0Uf2PfrHZ)Ha5bL7XG=Z(1Q-j7cGnm zyI=>B4!ywrQyiU*7@KOtZT1EXY2K2~z^9u2@cNs1Z3`y&UHF;sh`F->v^%Dca70cg zv3X+)NCS8q3OLj+u$-VAF=F#JZctrF0*GF=J<1uD8E51=qYZdo7+#3L@d4ap_G5Tx z43P}5OsGxJK&HXUoONL=;S$vKZY?k01Ls#lpz{Z?E<8-PG^U2wM53<^JXlpBDFSzq zJOvdL#)bKT{GQ@Gf%s@n5ax{nKZ%$~f{MICmYYsQFqKiZg?v{LMs{Wsl~JxmOLE%P zG~zGg$1PtQ;7geE=M%ox&V@QB81{6A`g^HVd_$&XiBdhglu#~#QGiBh6&zP0PWRn; z(ca-ze)hb>D};g4U4yWXb?oDuU>gXLvtz-#+YiJ!5?*;J>(oDUu$$t;$K07Y=_llw zc^bOoiK1v|Xq&aQm(LUD&-c<>>8&*#zUM2aud!lc7g(R-s`;J(@3#X_M?jen7svXA z{IsFAp!eVBwbxm(UAO}b{WTy`)e#d?RXGGh$7Sk!V7ldJno&7i<#s^zmiCT!jdFK@ zh=WBGf{LEVA?Rgyc^j#0wlah3OY_Z6>tp`qjuK9CiF_b!L9C)i%36-SdNCx2&96|C zXG|UQ{=bY~2WrT#B6KeHOm=z<<-p*6hILr{_4~=^yCWKfPI|=nJiO;vq9PG=aT|0W z81fKw6?m+o1QkT7rrj|VZr3kkoG>w}ojDrr@3;<~lDwR5aPv}s8_p;2-Sz8jbf2~D zMcn>g)1gQE+*q@mN|99&NI8t(t=sHEv5@=G5jVNmVXtb=)rnHo;9HHj7}W2+lpb*_ z{++|1-GfGA+=Fjx8IfKdi&I1m7V6wT1raGefzK>-b<1+d{oa=7kPqj^BFnJdloJbl zW(cN8!f*Q-F~z>>8N@4sFF~-5&`&RaH%mTw#~jr`&i>eHG(6{MaVE(kGMFF=GC!_EKRcyGx8`umGB)U1lncpaoVgh=NJu6b z_`B2+Y!-oq+g)OxUI{K%ZNDx~+=tI3$$rj9PBalTwy7|s5-6OHL^5>a!23*2$V}WY zV*`9*Q&%BONo%R%UcGQ- z=XfXx+S@tVVV7+RJ&lo*c)_piP&410s$nb^mp(CRJY<kKoWtf%U+tFXzJCFXzJl$skuZ8>jy+IP~8j*Ykw<>h>s<0%S;bj-aYUs`W&X@g zjk+o0AR8Zcc>KnK0R@Qy28lV<381F}rf45#9;qS=vt=~860iluv4VjKf#I=&>pm=} zdljPWYsgq8ZGU=U3@CHB)Elu|eK_=1H+9s zVixZGW58wg?LS^AERZoDMC5PZXwm-XUMco}yb}L=E7FUUlQZxzonb4Fs!q!*Gt0-6 z&g#|G&D72-pU16C0V~TZx*Sz_Q#Ixj65R-fs9J4>Us7Q?oA& zg*+j$?klSSY=1lRgACtCgg!5@1VL^6H@p;uvO2*&t3S)_K$V+sZ%p;3qLuXQ_Ifov z%#&_eXyKhPzza`BK=d;0M98UXM&+3dL>2fOn$YzmI3b#01o~bOFZp!)rxv3C=cbjB zc-4qUZ4{nSKmT;d&bu{m`saUD*gbX=IlYUaZ{FG>-Y^q9}n#NVZB)!WRLKM1)wAnVX?hYT(#FC8GA z-ZX)2Yk0WQ8^iU^y~Fj7{J-Qb2MDPP&F(C74v)_jA@as4Ev8$IhtA=mY3UCx9y(Li zpyp_M{h87LUcl9?TQ2QLU2V?Z9v9U%aR>TTtg)F@mc)6{LqG+lwJzlA&PJdGvP0Mv zIPy6$8^PYFCkT6jW`#o zd_wh>Qm%4*x{s^poA0CYb7f&RC4nu9{ezpVL&O8xN^t zR95CpHaUdUGZ@L5xcnBjO_C!*%8R8L8n7E$p~JN57hWCgd8coHZAbsZOIkuyn@e9S z{78wV+YmZ}n%EwEB1upIqBy~;iRgY>(m2DvMUV3kXvb8vdd|ptd56VX<9^3b(oe(L z>1KKNUtql7V6T;w*5K9fLunJqOL+)0xjPab?Sv26GPEw%Uql`62VZ zE_0(djrUyDVNt*VX=ON8-B_4ME+*C$Y1mDj3o<{pX+*Z1H^}});RyxHpkFXvg5SiQBf`KZ=io@d74$9)Kh1Uz+3!Bk*D0Gpf^qCN^{APiS zM#tjQBU!&R@-01qix1F!7lj;mK@&f6-Nk&lzLsMkk=MifBtrhP#f`vEG1Ye566B(=1b5 zKN}oP#Xea3^Ci>RKHbUdP|Yq5*jj9c%VRANE0R;v`+K#fa-rI&7qP%(0Cyj@iBN|; z35sw-!bj@2Z;lUW6=rP1(pyScgLJft24!51jgbQ}dHuxhoH$IRQWu-;A6wWG@iwAG z`8U!v$8=|1ERc{=Xhz8P_wsAT=>Ie1pKpK_g}DI{86ZW-_K{!3sz?#^B>ExiC&qzR<+k7)M>9_DSGRhQ;{|~gpCY)YrNj`tF&IsDRTy8t zxzk7n637Ny@K2K;Qn5Mej4So#uvDFso5uwz7anN9xzIdAyp%aw?9i+t(uf;q7UT;} zXZvW_757p{LKN;5-*D|&-D$+K3U1%Mh(i_w991nT1$}r^#fTv@4ES6$*@&ufQhR09 zWL^2)RIo2+gNg^hm^~znLQAFj&^4_YM&} zk_L7J#2ASBNXIu~ri zP^UL;Jxz0(+0eXe;5!?;%FS=j=QaddF6ZgYT)aEe;mmXx`OelpN{FMEJ$@hWj-2>N zFSsagVg{ufIyUW7!Q-^KOX>R%!#-=kdIXX%;bxrmO7|_jy2W8#9rd5b0ky>gx#$yP z4RnPhmh|H$Q>-H}THKjp`^&I*n54sMW64^i^op`{lrA3%)^bX!EENZ!F6vT!OP41S z9TDGL`A8%fU(I^KO~)w2v;uPDELU>-A?64GBXgDoLZvRzRzts>g^u2766(^Y0K+y^ z9~psD5BG#nPR$=YHCWLjM>9BXG6>+tv9X&)I;=RHz&xqW6C2;AhNdo_{p)16xu6o@ zBSi)@|He)9ezf{B;*?L$6Ki0%FM8ER#s^dw4Gc+^77V-^f?7QHq)u{qur1dPZw~L0 zv_4LKYTHM{bd`Ye_2n02B^>OMq@0TqUL94qjujpu)?Z3{Y_nF&UuHNfTAAG2vL;Y_ zB2VRv&NEolc4t>1>QjO6i4J_&6dM(2e56%oLIp4^Z_cPdzrIZ7IJ#xo=_Yo%fzy9q zXS`H#&=u!{S)cRI2Jl08kRAzeYw3Ac6ZOpxV4f!bolw3Q;r!-jf~ZKgu4b_v`ZPy8 ze0-`B?1-Ul5L;sDIOci!CvVix@MF67fW4P7GSt0`t;~!hy}C>4S%|feKY8ISWUVE);r`cbzd&1Y=k7RP*>d zz61}CC+nTei@ip51jp6k%{VTLMjm?>QpmiEPTNxzJ^X|+j&I{_LF;8s4D8LnDF{!l zSjZ?F!QOw_Df8b5Xc@h#DZ@EYFk30&=oGr!L$Go**WYc(#@vX`rxN0)$T~V#9`=s2 zR6K2ugve>Zg%J+B%Cg)yc$lwhQMko3`sa&bE$}2yxTq%XQk$n_WT$b! zx6&zaj>u~iKK)WlW4tX|%D3$T?J^kMtlo!G81HG-Cuq8gM`ZAOfZoFV>o8wJi^}QO z6g7_+cfP)`-aa*4W{@wUg$;Jp z*jQXW8YMUX457k9=(XNvGtWC^b2M6?1mWY?ImzzIMS^8i+AwIRR$WEU0Ws@r^i2TC zRJtUN@}>rjU4j$&(+hLBV{<2u? z8MWKZI$HHt9TN%6-|EnPz?L#i|8PZFr`O~Nff-Z#%=gBsYr@deiApOqNR{sU<_n?dj1W=&Mh<{$)@O=`iJr9JPY zwMd4%i}Ys*to;hdrg|kDRS}nmtZ#$H_Cq0+nX6TCINT{t;kJ0Yb!lK@4zd!IE27rM z>RK;(5+I;YCiINb8x7LaE_|N+w32d!krhupzh+s!0 zjPO-aV_oX&EEbe47dt1&7bCt$yvi?eK0^QU@ia;4Fc&CD)9vU#xw9)BKiM2;u^fhg zkMZIlF0Lf(>gEC@OPiL^#slMNCCa3?3?LF(rKlC&682Xe%s9>__m11}iLJW}?%_8l zN@eRS8SwQR5Bbf%ulMw|RHKJafw^gA?FV+dBi%}ccr4sxj|H94(p!<)y5W}`FF))G z_H%)YF;+%&T1G77J8>_UCtg7~_Nw@Kc$rl_f-3pm$T59Ry=8l?Uwirnw@9of& zn99hPpK^g8}Kxl@YsoE^merpLB!>;{W>-- zwB}EM1K8Q8;w88S8ZmW}cUJwa0j_4(zF$%a*#x|AR*P@>?@u-xeJl|<&I<8&ytH~0 zBE1g?jZ8me7YCfZMC^?>H;-ruo%z-6Pv><%b`o{F8@|5;T>WMGuTpM^9piP^uT?H} z@c-#0iT_q@W)a`62r7au>|snoN{P#Ug92w3Niu|uuB?&(eD`Oh*G@%nkZM4dMfH*6 z6Y2?F%UUCv8QK~c5>lSX8ibzD;Bj>*YL8u>x&`Deb5=ze6Fbi@-78MFFyI=z%U+$% z9e41zzn8lED*B%XbO}5V)Gyr^Dy@Xnz>xU4p1YHzQn9ybSM!H#W3|Z0|L!n|`&9b< z*Au``J}#PMH}4wyU~n*A5J<-sQ=LH30=m2lxe=`Agy+4BxDnzr1NF2Ea^L?AzE8{( zJlX_0t49qx)an{vGRO30${amH2h9^zX^)0Kr1*wCtC!6ab9#@5KkVp+ z->?VH6NPDyq9Zu@hSji_4z^({jI#Wmwo|rx&h{R+Q?hziqfJ)pgtk+$dj9O5w^PJ% z2JaryAm7$4s%kd-VC@py} zO-nT)Rd?e5RPEz|Aw?J(>cE15AB))7et4$f+(3F$q(Ug)$U_f~Oy(sgse%9#gek;! z(&T@nsgAki-csc>2!!WC{7yu6wsbG?H$s0&_-kI`3$5M-H>sk0mwF{)T~t_dc!VW3 zu$q?p8ZYuElsD-DN8O1kEkBo}SCEL72n3d}MB#iJCD6u8Sp+cvda@4qW9R@&83KM9 ze6R`RKUVVQEclsqzWy?32>&CG@4rnKFsWf=jW2TP}bESGg6NgCx3qv8m6qu`#qMppYOO9FC3-_aLHvEiTpO)G*D zl%shq^Ew0d-O}Bvef0fIEM%gVYkR`?F8%6l^2x8>Z5@}0Z@w&!a@0a`wcOUdT!$_# z3&y(r#a^FDLy6*=Dm~!i8dwCHRPrdDQ`v3ISMH#-XscxSLI8eoQay^jp60t$S6*Go z9bAFj_T*MCxpcz1*6t;YZEn88S=!Jcx@i85!UhWDTW^+*SNnX0cmDo)reeLW6!?W< z@#?^)V!fE)<=*P)P}nt`C&pIb&!=vQ(v(+U->{1ebVf#>CNV%EWG%p9)J;GH*3JxF z#JvaR^=Gv)y6auOnGoBA7|5aTY*m-JF2i0HMV8K)i&)@+7a5ruHQ2r54Cul>G(;TX zyW;G!yVmrNiv zRz55ZGSmXonn>a&?YH|L-Qp+X3HHmj>`X08dcs$*1$Lw zyP#@KbTucOE|SzJ>UQlbQ0y9TITk&GxE=Z(_U@YO2W{QYIwVTc_HB|jJ9;PvAMgP+ zq2lvXFfwxmX$FP6^^`)nsy5z?eAyU%_))ay-!zqsf1h%bUG;gwVp75i=F0m6CX*NC zC2rbe$g>0nDXPSqCA5HCnO7;O@(K;T&|5h>_LLP4p+4x{=am%1&;nn{+(Kv ztM8@=pg34f-(vc*a8LwH_xDepR#BwdG&Xi%Y?)g0^pHVxdxU{kB+nb`C(;dtN(I>M zQffAs?lRZY2Z4%E^wcM+n+^J+VS<}(q?JqsTw!AVbjFnJ+91O&>T^Y|-Vg~qn@(Dw z0nBDbmZ3y;bZ47{o6<7H>fat0{|2HBb|!Q@E21w4jQ9v(5L2F>N@(O%G%wD!id^9t zI)Tq-?9ET|b`Q9FLy7y>Pnv8##%C58J)`c7vO04oLH|MJSh=@6>rfkXRug47D9zvU z<>GGEsODNS@xagRp+lJrpRvCGf%(YCQ{jLLqyrC7?tWb(U`WOm@!Cf9?M@nhTb2KS zqlU!#HD*H7sGbwqHr`M(Yb=l?sAOGa8&dYaZ=YI4$aPfw5Mf9`RC3}S2NVuPn( zx2D`i@c5#|WBd)mAZ@8f9Y=i_Q@vBYjR^i5fg10siji>_0ec&PD#vkpRp+aPv3en@qRuT;gZNDtPdrfrjB+X;|^W$~L{NsGy_xt^x^F8N#&b{UA?D>U- z%bvuJvMx2`=5mi)OrL5Ltj_K$pPx+aI?tJ!9m>?EnrJ9f_wvCrwKy$P8^y48kKi#` zdp2Ety-uxLSGBw733pu}hdg>mZ-vCrWWtWg)*CDNkGEj>R~x;tOdUO6>Yk62iFzq5 zQ9B&;WfeXP_^9woRym_`UAZI(nk$zL@ajb?J;@C6NwP!Nrb2sesxP=VS_E>@y{bt3 zLOIp|Ow@eLCu+!gvnT4k+KyrxLYKBE6}5#XZVEPwdzOkjw|Z4B&!V=d{0OrkWBzCf zQ$xX%;a!Do?lzrXS6<>ss{9JaI(G7Jk=ecbJ7}Taid14S5uZsGrtK@LH%VXcBzwZ@o>Ml}VOlN9CjM zp9lu6JE!bTCiS(r_W8G4v_$%r`b`}3-q==h>8ryFv6~4AxmSu&9ZOL7mgAiUUNYLx z(q9NYclYyIE^A$dx7{qv<*Rfnu8vhdR(=#cPUI_ZXV;f%ayXxIcuPVKMx;2@Jw!;1 zXdqo;ui)HtBeEvXJS5KJ6{#-8;apl1^}oV0>u>bv1>vNYI}vwBh+p&&#b(yXOGf{d zl-SUKevDZ6JnNOo#-9O z%!+d0sVTPf#@wcK3S0WfVFbhZ$D95A8o3{>mb|67x;_)Hx+Ar0<%SUy4u6c?X)9Ns z=oDNKy)XVjN+Y_kg|m=DztzXcgBOErgYBD(ZZ|()Y^lz*aN|Iz>Uh8P zTSp9KZA;=i>(!4xxh@-%Pk(pkfd|(gLkXue%(9{{_rJ!A7bq-^I)X|xty=Q-V#6?- zep=6=){eWtp(0}arb?qS1;4x1pNf_TNlIn7QE_H>JFo-Qo&^?V{a;tOm{0H`Q*{2l zi0v!qCq=u;%wLKo0sluST1`-EjcwkSRD5w4!GkY4VK4Vp1JUlz0AhqiR9sig#kfd) zYK%oxym88f_@s#H?$=9~39MMw(bq>(wHo3&q$hVrr;320TMpVMd)~YZ=4cJjIqJ;J zo!o5foNS!QZWg%1znehS@1aec^lQOclLy_biL~*e?d_b6szA*VHnvCIs%-LlrxmuZ zPvY0cZ3xn}CpqlDp8nWh0-5p=nGaRDp~K7z(yAOy-l zorWgPMJKL2H*qrQfU}#Ui=FIjK0&J1)`Z1w09l|*n@QCvdFhQb_V67|xloQ*6t+YAs%mK4?E-V&q@E5=ty~BW6#JC6V4k)=~FFRIL}K+*AX~zo2z&~ zROvSIe8LWKdfvcFuGieWdtn0W9u{}8^6^HYEo)F#C@Za+_WpukGa?$*1@jDFLFl$w30f6vV1<~nl5JfT=u%&D7`PHJ3f=gL~P@U zZ>`ZvG8F&QR29kA-u1#RWc$|UtJ*cOB1dAW_a~^0^?~hNQAb6`E)=zp%vXMU6>Jb4 zJ@)PF=QdksDW5|*x~IkhQqB4uk6s&7X{{CvAq8Ciz*UVMZPM2_2{#>mWp~*1sQn&N zU%N?@X5iIyjcCoiO_~M4n8%_Yf)x~0b)1~~2iu*36)px>q*s(4oftndnK67UuZ8-( z7le&UEOeui`0wHks>A*m^^Sw_1YLA7rKiQhx*SF8L-Y!0U%N~aq%Wd%l;A9$%x>qGIxhn~}of7hpFLF`nJ z{gQ+hWc0H%g=rEf$Ue-CWYQtDEmq6X$3SLmbwyo(G3Fb82laJRB?w!vYcM;TnK z7zIMUg&q#n1@3G!(_7#`lkcQw%tFZ5$-{xJKbl7%=;iW^0U=){3Yb(-aK%C*&3>oZ>DQlN5}P!^r(WID{?_QxM;G2N`n+a$gNj z5xR(J3WgnmEQrG?Oa+;wVB8_d;x8OxScnM-L@9VSr_8)kQ$wU^0dQM+&KVJ2bp=8G z%#?I!QA8QO`ffbH)cb<@F z<<{9~W{#Du2p6d>nS+7g3(pw}p78Y+wD3fhTj93IWkMZZdb)mtvBLJ)ef6s$Y{04$80>J+O&`gNr literal 0 HcmV?d00001 diff --git a/core/src/test/resources/indices/bwc/repo-2.3.3.zip b/core/src/test/resources/indices/bwc/repo-2.3.3.zip new file mode 100644 index 0000000000000000000000000000000000000000..b94020ee80082bd9d56a3c20a392d17135ba88e1 GIT binary patch literal 75600 zcmbTdQ;;S=)9=~SoVIP-wr$(Ct!d+F+qP{?+qS3qv~BMDeQ|c$1=$%|5w1p|ix`L6-z(IoZX&i@by5IhhUb0iyFf;f1-!XhiTpf&^T&*14TuFFzN!-j`-3*yY z^!@)=_W#K0zbE+riEV9a?)v{D#s3+D{r|@NcdGO(46F>S|A$E2{~eO){}cLOCH?*1 zH}OYupxRLs1f;_m1ccy!4>dGo`rnoJsy9$A1(+$=lYbGA=ZzI^>}7#9>WnvLj5YQj z4MJie!5(uCCM6CDqaWdxn=VZzTkj2!kLNI)wM`3wh`P$N8WqismKsEg3BiIP(3Dcp=6im zP2>f&b<@}cGu$`O`!fejZry261KNc&;Zo%8gQ|&h@D8=n-$Ky5BUdkPDiF00D!j-) z7FB7qgM~qq9hA9tQ0b?PI^*xYBH5v?$9eoXXF!#}qRmpb<{-rdJB-73haK3m`pU@g zNLFByMus2i8s&%8N!WIT{Tc=h|I2*D4J*=@FyTX>USvwxqI?w|u-LknGP_`bhF=PQ zK$jcr3C{1gHAK~@2|Vpa_SCo@q&@ta+j% zdjv?1;LF7eFr(|Oe)Ad&0=n#-xII+kL@)4$-jz4S2xYU8`KamcYD?%}Abdx{d)Fm3 zOG(uZ1!Du&-S<(Ex^A6+hiut!02SuZwoTpQ`ZXP`^7e^%MjK;W3ja*(0E*tVX#`2z zl(&>se8qH!5>As$fdzA(elfKT4s>f8ghKvTbXA-~95A|c^URPNgtjT_SHvCnRjJ5% z;rM4F@f&MCj8+K^9sp@8|{c1R|Q-krRofXMp%z3vq*6& zPQT^(2i3@RH^1B$8XS6e*at7H8hCS68%}UQOPzg1b@N&aR@VSdeZ&D6mPqrQh(c*I zqE=SCVbIP*oszEEmYme^BwZnVCorC~+KXupVKcrU*AFUIhu&y5ymZ$my?#EJJdep0 zn((^L$hedRya;Q=cBz~aGIFdk+OHJa?F+<<+WMH4w@eiC0+7|F;4XfD6i;JVL$juN z3*!L%RBlgy7f=Y06cjC+xrnn;cuV-nN5aJnRHvOZ^wPf&d{xhW9c8z8x1KXiFGXgw zi}s1=Q?11cW#=DOM0V?9tBLf6wnZjY^TtFozY|yu)pUrVHpEKJZsFXp#i|f`7$@1~ zDE5rQlNSo3jobTUl@y_Ngz5w)KcLA?DtE!TgAAz&MB-N@y%4P(abPMMhjQEn=Y+ar z<`!*Xr6QOddaEF)tT;?u$Ef9p*KPYpKa@!|J{RN8U=|`m9XhOoVOwT*6&a;%KrrbUMX5 zY7iXXkVfzV962BDd94@~l)D?4)RzH6d~MM#H)NCV#%T|mpKW@zT?V>JXewE>HXH~C zX_0tn?;&Q^TS2{i*TPlR*=Vg@X|6>KiK?r~IUsV2$ChozJ1kyy5; zs3x#Gn)v+0yib13S@-C!X^TIi#DHg*_-2sF@(CYJclIy(4e1a*ni?>517BT#TFo+y zQ3tVow@c6v6!`8lsaCHTAxk6&UBd#(pBz;1~4*a?6IaZ z!j*H+?UJpw;g`BhJ$&Xs@su#q&sMIy+3`E*fa9|5c!hqh$)V^k0B~?2RxentLrLs8AkiUl|#_1 zdX4lJfy{i=Av>0s+zqziRoHNXC`uV$yg0e+FT|~=zbv`Bs;uyw%ollT;r8AxbaTsr z7d0czMg&jZzJ8rDKsF1ba#`GJcCI{DoU8@y# zFiBSc)|H6z)Wf0p8Rbe?te!ZV1RtPW{R(9yI>!xe_GdK-nJ)0j z_JGwta%;PQqi4xKRK>f;h%CEOVla)2S|)+PWn4fPc&2HC+48|u~G-VRq`A$L*utw<-?_4Ev!*LKcgf{8<)OTiX(=tgR4duV3R)T!7qx zV3A|V-H{TYZ}nt7@2w?Q@7hiyM$s=x>3XPf4yo@j;1%L-ROXr-t7tY#3*DuUumfvg zi+2?X%}rZcR@O)|oqZUdj9}lQLR;ZHd-?q!ZIK5SlPs!|TG1mEJL2=h?3Bgsg;mKl zd`D>X4S6-jW!ni3(n?gDu&F0b!HxtDqH*=Im+y_ z==HGA;v*_v&?m?um%P5l1)Vg`@L5(ly*^0ARHxNEO!1LAey%v}SvpKc;&O$8u6Ib1 zkQVJtfYB$(lgbnP#9kLHYfFB!QX6aOrf{c!5ris1@z_nS`?w=qdr9xU*uu z)h>9oVtf25VpPYfg~yS$-%M9118jE0u(BTJSZLeOvSg}eseO|lF8fkLW|gfY3LvTa z$Z-?PkwBz)sx1@yfeQUH4T?KgR-BPzLEFS%1F3^Fp5mWYcR9cBc zA+C%Q%)38UF!j1m-X)MpSYZdQu0S}wxK}DRTe`C4JgobfrijCL*Z3q7JM-cl;*lm< zZh`#EiCromz3i_VFQhWR9OMVZ!YRxec)W)73?c z1H_riRWGS6kLmHiNzS(e(j)G&Swk9X5iO1Li%_SVo|Cr24fIMLNNgJQL=)R80$|n> z7zs^<`*H^yca?B(M81s^*Wyu4Fvz#qM4LRM&~6T5_+qkC+B0n!hR*lABHBpPJVDTP zjc^BdjNA&D3ZY$IBDI`{$jX&7)=ErpIJx#H#1J1`?N3=uttGk!tKPtErn|i%b&XG_ znc=1==8P*$Bim1}og8vR%wMTM$y*_|j9#&di(uP|a*VpA1F=WqnDHWp)Jn*dws)C6A+3`NWUr0;EkRqXD{PRw_(TZO`V zqWe#|D0GY$?1l3V@gmV35(QVEHVMz>vI{;M?g(z^Z9x!s8-0tBSvMeV5dlp#6=PeK ze+3_S=@|!Kv`6GG+=ecmoW7hMztM8*-VQm*5e8{b;S`RW64-;TbxBh+9Lf>o1B;Q? z!t4$zgk**1uwdL(T~0Rl;!W?YA+(+#mJs#jS2;~0JW(nhgpG1oug8IEEM)zIq$4gN znsPs(n8g4!L(0-JZ>aLv-e|z7eP}0lbQ=fStzU83xWQyUgI=vcykml+EpGw-K06$* zKXO0+cBU)XHv4OFw=yR0Y~f}EGxIE8bw)dj@ljg7sn=B@jeW!%5zydI^X9VA$x+Q# z)WI(UoK+C=Sm!kbf#zZxIPx~=O@%7WwoI5w8oN#CVaCGiG%C7Gv{_xfi_51K%amjj z@YcGo8b%TX#23&C#w&4GuCcb%Gu}sVdldWfPO0=v+fBLST|M1eJLGU3#Q@W5ke;^y zKo{{M%b%Shel4?`)MMi#J5tvP^ifl{6jtL9UC-ZZ6W&qCeW4(FzNC zn+FWI#k?jUW!ntn%$|fthcWxQ2~XaKN&sIi>3C);xby{$1zM~@hI09Q%SiOsC-&q) zQ`%+4BQUMoulA`M)^7JX2s)J+`wRL=Q$}rB=kdW<f$!AFG_qT-NEo4aS%(Mg^~!glUxt# zoWl;_)6W#Lxen2+3@08oH%x^&7(+RRE2z`!{M!QT?kPFwgn&&VixCX53&|3jH736_ zn$B@%Sn)k0o=BDBEG*tJD>wGwAI$CZW_?$v%;tiRG4;ekiU*@i~Z z8pm9chnP)O)?f^h`k~A>7pIVD%{L|XiYFD2<1M$-KZWsGb(Z^;JHM+@z)^6BQ_^rB zUYqnv9}W5<)IY(cy0e?(byf1%D09uoHUG~IsC<&N^21`uWI7I^^d-1cPsMkMOWop9lliIAFs_G%R#f?s#|YfOVB!m9aE7EB%e zd`IG~RxjYl$k>?pEbR`eO^gDbu@j6vQ4uSf{^VvEhAi5S%p{D z`CA-)x6yfNg)8g9RbBiD@ut?%(ZK{Y8*%i5k5t7La{c|q1;St8;#&J;u6nZ?L(6j) za-QBL$VQp=ub2gggH2Ys9T~h<;E%|YswW<{EYgCo1VY=n9qos zhsnwf!W_Ww3fBBkOVy39xUc;>wllfF$sRm>%Z}R*w zN5445<`^G4o+i0Ht5B?a9Y`WeQt7S9o_nm)x52MShN%kYf=F|HjtJE*>jW*V6V*D9I3am$bhMHxxZdrv7eC#W~WW-BV0tJ2VO zL%Fv{T6uk9)>;U^t-B_Ni{0&xz0$tA3vFlOZx^z3eC5rd?vZM}Ba4I004^<*GEb z3vx}Pc6AoU?AG94lv$29EIK@Cnktgbj$y8mPGeR@=3NztG0(K8Yu%7F=w#R z{OM-uT^LdnlVvm2qAmCB0iz;Sr6po@gauVDrFJkRCsa1L!l{-SzVzL7m<(GS9os&L zs6l?>6+Dtri~Ei)Ht|JKzEL)vV!r5fxNVa_an4-iZCN*G(7e>v6L~A<^q6Kf;Cm)+ zAB<~Qca0VS1_Wls#QDKDqYwM9+_=cLc>{f`b*B_nkE}h#33q$+j|5HN&Lr#H2JsF0 zooPo5D-)GzLv$LoU63nZ;#}mS^Nih-R9-Oqv|15?Fxo|i%3SK1vYp#T z7aunknIZbr1~~;gcG~>Ac!DWt#0N92ZVn<&&d_+f!|EIZ{vDnrX1~46u6HTIaq7 z|7P-Xe+vCRVHmJ;G85#+q>^zb{;BOL-TvHH;II$n=hE(n=0ZZ}ZxG^J6^vvT$ zm4#IZR!yB_F1bd$p;7CcvICzBj5jjC%8;AhBURM>ewyucSuDFkcP$ayw2{r$JcRu& z2r#d0_V30Q_*4*lv;x~oO9wB__IDAh)l+uw5)-wtr*vV_=mEsgZ}_CTco}+#s+&Kj zil0aGOO*IS_TD;9CYk!hk(0UjwOjw7Thd=?%`IQcuR=c(zXEY1w7{*sZ$??)vXSNMFd&q+*3$cMf^QD1ODrzaf|5JtQ-pjjnbhr zxDnLte~CN?n&Mr2mDM}K+{4@%UE^M;s-9N0mIUz7V%s|WqQ&yEdGVWB3aHtqgOAsk zLVRdrCu>js)?_Gr1`iyliJUZSb&J1-FVMP`9|4Y7xM#Y`XhpJBeSf{@GRcc@JsD$U zV>J7unGKo80(o*p&gcv;K`cF(Ec%7cGsbfZ&YS#uVk|2g*Uxjqtikgl$vr9UYqBMb zPq|j2pH7?*QU9{Zx`!AH)8PAZfhjD+_(4&gaaG9z6qyvIfy%(XsID!&y)AW5Mw=Q{ zxD93+# z&M5KcFIGULzR7RgI_GSprWJx9`3Y-AtIGbks+ponH&}%sQ-DI!h%PS)x_*%A$_{c{ z)2V|A6U&hEwafB{l3$#KcgJ=eso5bx>9DF+Ygj%Rj})&a6M&%yzmwkt4}#+Zw8{F^1e&n|fDY+YPBRxjj`35Mb=D_LpwprpcWOEK#G8Qu2hBW9Y(kr>Yeax)CJb+3 zhl_YNdQf-WOK&-J)^%z(dHh#Ei}&HSX+v98+{EUfhkkdxh1Tg-+<=YEX7J$WLEQGX zM)p?$)y@9kECh#vADnL|s=#QdJN~3FPpn3JpbSTjF*!|+|B;qqJ7!(){0NEeevfGE zYP3-;eLrH^J3$L=H}$ET8oC2I#155Pr|oBeKj!0%=0CwI?pT@CuXj?C)3%5mU1csQ z-{We%1i!G^$!&)4Z)UAop*5B6MlDQ)PK4rbx|Oi$Snp$AWkPOR>yX)dDfS_$w!G3O zc!pIh%8u|SYD#3g?ExOLK7ALxlW-QKXlj`SI6o2Yzcg9`V{EF&tyd?BrxwJZX{eFB zZN&W@OZz%ft7R8Cwsj}DPNC70KFCW5~^DYdsW5bB;vhN`Z4e!N;Tal}@H!&IE^ znqughtpaCgCS%4xjYsecX#c2h7_{2%=~{MM#H7;kwBIkxUibceWnQ+@zBN@YoU(cv z1l`6q@Y%KeB|AX@6Zm}7jGhZb+goC0gd7XxmH$kgAxqg0=i-(3j(r>ZsX7 zWQtdeRx(k@l&}bn97u2ycEm>-tE5*s!&;4d+vkx8M?9kThy5>qEnZ8M1hBTLE+wqj zZ_Kscv3NSf_%=WBIO|Pjiv;DE0Flxw5<-ywEC#rRx2Z}q@idruX@kOIy(ZY%ui@4$-cEQlsBiTasC^S@O)N%wHbuW1~iJh1i65R@pYUDc|W&Y~f6B~(oW$}Z&uI8=`p9<01=Ud%0c z2xWm=RepUFwUDIdc-e3Qnl5G4b#g12c@7@rMH9pmweD4QQXU!}7U4_&`OGMKPg>Of z=@$<3@mnNfGOOighlymCmRQwaengUp0-&URCwhr^5`M-T27d}~W-sO#L~n~62_j<` zNePNy6AkX{K%pgi!3eLI`>E9)Cz65gE7U|)SyX8A`ALK%GyheE;UVO7X7eVh${s`8 zx3++Y9qlY?VOW1K=DYZ6*V&D=?0pI5?LB?CA<%vODvB>lHl6#j|J-SB4=}H>WO>{7 zqz&Y;hG95QUWT(&U>ih&&r(ZY8c}BPaQ@RIKRVr=@GpW@vv%&C1W_9P1Zzd^`?5zd zwrYT?9tJVfIIj+ekO?xjwd^CL6;CHGgf$Vl4z77*IDLX+)rG<=du95v+d3idOy{IM@Nw#;iFQT0fdrB|c*U*~p0h91ByQn!9|&N{ZN;0CB0(jOQ}Y2` ze;Rcg1&L?P2D|8CET75vgDo0jdFBiRf4eLgxuj#2U|DfP-5WpaF;o|=(qgmFvuo;0 z<~|}=9bFy4?A_{rr?tiG{aC4~@9946E&RFY6{x>!;N{JGN-m5S_}t&~6jK2O z`Ak63GP072q6GEv14WDyyzd@11Pq6my%p_qn#|rHd#4BUqjd(26C(KmNDRxy$-yTw z(OBOxGF+?uFX#{j2Y1es20KL$zNYp3U|>lGEW?`74Z+wu>s@%3K(3F`$mziCfh zSIYlY!1=+3BO>+8c|+6Y)#C;EJ^>HIxlhUos8@8?xF4&tRU_^OC)MV*$0t3V^WW(m zGJN*~QPa#iD?q#f1WZ1AqxgI!x!o-K3%G_%frz5>(Iu7+2bDKQPg_!pnbAj8;;#IL zV+N;7m%A;u!gSoRjFGu5f9X|K%o694ZnUuR3f5{GX!h^1>82Il$mK(}%4A=os2DMV z7y$=?1>}n5ZpI_LDRi>!#+e^6M=1oCK{@@|eE^gfG=gD{o{T<^6_2X9t1;enQF2!X z(ex*Rl93bck2|B8wZ`P_jT_TDLZ;|rRjGIAG~g$mVA*s!o5xVNTH)2u8d4-T#%33h zN}gk)M8Q#JzH}nOfi`l{2qJlugVvhd{z(5=#69N7MXIRU=_d`&3NS!?AA;bjK5VTK zm%8}%w@Y*ve%*U-LZ5u5V8qq8+%b*ALP-C>%U#7cz060V6rr%7h9En*|A?+viMjtI z)GLR};mj8_%ufsiurM>!;Vn7)Ez$ry zWUN6h;GZFKO{d`J5vw4&^~+uL7?B&qP0_(dr{O4SD}0yvB3A&S_Ki|f^6uEu6da)v zBzsFb*8TVqALv$-xZo&QpAEN_Lm+BmL_lcVPiU>Q0=vK1j@PxWmc5go!m}W)`7-9T zJ~0L(7en74?VJgQ;a^-SX!O=PMvpW!GrltBlD%>*+oawJ*!;H0k+>UD;D?!?`F{I+NdDh_ z&(nN3@}UP_KTO#ZcE&db7B^tUV%DSGNi`$2q3{%KuB~A@-YDn;M^kS+aIdJ{cspKj ze@g0F+;y}4uHciOsx*;sk~M|!4S7+NAn@csfH~S*Vdg6LVU}WdZE7pVl?{}_E-dd0c0F|> z`@w^6zvY#pZE-Z3P~ee%^!bMmc`BOuK<|mtFOi;poF-`U3Nc7aNR>{RP_9>?86ZD- z$asS(>NSC?`VgMb)iNB@b6aWbvG+7r=!>-9v9=B*Iv z<_+1GE;uawi8N{qW7gMq+1H+^>G=K|u+wj8-fT0F^LEughMK2*=b97{7p5qF?m5H} zDWoeMUH|Di5*V+3kv?PV3uRcSS#9u}OtXHmg`VvbfAQ=^$`=*2h3S=HRqzSEh5rF6 z6fO{~M(UkBYgcmIJDgabN!&W(4}M1^qSQAh-6ZZ<|2{@vTsyO)!pGjh*sB1xfY>4_ zHHL)$)uKL9P*1%HPGx}IX!;qHQ~euGZx(uYUOqzFE{fIf57D+)bZ<|8u14^n_Fk?82#xkF2F8Eq2CFNJSZ3gbjS%yJ^cL0zZeyw@Hpa ztXlW;5l05koUb|eIZq!_`da#(CL8gE|3J!;Vh9VkEhIe#`VV-_wKc4YWErKzrA@-W zaY_N{oN5R|+mG}ehlC={X#>Ljke`yF{vdO`6O|e~Q)qVZ8+O(C2BCyZ9^YBoG}Yz< zIShG|&9jr1RhYd|gah`536EVLWV_^34AkE>U$|aZmam9CMAU5~b>0u*jZWYj&R1ZH z*4_JZv=ft>jt}n9dneK643|R(Kf_DF*Na79ey!oNw(r&Fvq0oDLOP#knt|_^`NNhQ z!AfcUyV=?PtkfhEw&4e;Sz-$vaD!896wV|tqFJhH@_a;g?+@vEYzcGCnqbExi3*#o z$!K6O4R1hnlNF}gT>G6`kg9J!>%@gY{^ctuf%`WC<1mZF{5SSQ!zE^pJ-k9~+|q6v zVAcAe7I=40v?>=owYSv)9Sc0`c$vtLTnQXqqx!HdU>}+LL_W)7ou^~!`!I#2W2V|ZPGdJEwJs!_adrMyhnZBayo4$X6u@!51BWPCkd>T@9Y`?_N zXMB4h*8B)rCO-LXS)$+FYH&RaVa`{MZOZA7%vn3TOllGrl_?Db{w>2F(A%jOV*LIi zQ1ID;();+&{*IU&Qv$yHG_}1@8uk9R^6dgm_@hVd{x} zy_`j1HrehDFEP$}pAa&SjQgYye8#^*;7yD|5y)T|q5x7Tx9_~3e7WQb&88fcX2TTP z;Y%)&%JjXQ7f^5ZMSIvNS)Y8R-g%u0bxgc`GGzDbtq<97(iwDKed8F;=nV^Mp!UYl zB|L^pt}srZupsJnQXirAB$B0X@PZlqH*}8z*y*MFz}U<;Vs{N6N@(@ER)uzS-GkM@i=M|Mgwlf4z2c9Uyf z$S(LNeE!ee_rMr4b9tP_qw;bpI^Ei<%zBFtpqaf^R?GP9t;#W0-b{m;z|-a@)RkfI z(!8;P0zt%H7Pp}KM}3VAKt;QzE+IXWB^f}#3`gRpnMw2buIE~IkBNWIvBnliBZj_O zwe+|>xJ;~;xdQj0C;leZT<5)!F*>-6Kd)nGXfgfhqv1y)3bc0j6t~&LG}0TjVRH!- z@xsYtDvi+c6}U9Id+SO0&+FE+jp@+PHvDbH%-Kc5J)bsTh!_rsSh~bo!)o(Vi z7|*Pq|HQyooiD>kN2v= z<&1}*td`vo3GZNi*Qzp?Ujk_&K%3~RS&!eJ$m-^*WlHOteym*dGZJqCLXEGTvBFhO z&~fD#$i=S=6(tL-q`w72SS;ztWY_P`;Vk91W{ID`RF`^7)12`?Xr$E|(OY|S*^hie zKwwaxt(4;BcEaAP9xI9gQQ!Mb?mGOCY8ZgsZWEqQ<^hQwEBBJx649ZfdIy$?nVojC zU7*?=+-!O!@~*Cva?+b@vNW=ihd}%IId?URlu{b{l88JD`%X1^&kKRXMAxjZ%83Up zIniD&z@_-q(NLwwUGzk657p9#PBmBmMcWMo`Ol5itkz!_LypQK9Ov5} zD+whB;_V;wbnh&EzX8{%U$2T%)K0v>4NSgfCdb6q&W@pB6DCT3d|QcL{rp6wSP%O* znY37sEma-g%8kslvpE&aYJfJ$?A!08lzL5T1C^kp%jFW~drK`A8AWaMk&IW^)=rV? zC-c*!o-8r|X(765(s2n}Rf;q#r!6#jcJ<7K3R`6t+yzUsa+`&fX;>Or?d9#nCcAZ# zvG6P?+0+!tEZU;?66?L6#9Gd)pGJ5knfH;~oS%1}JW$>MBGv>wd`-GI8olI57bu99 z7P~u**ZoXv`o0aJFv7to%|0dj=x`{rvpN0eFDR>dSSx^Yh%JyCQxtpEC`(y3=+P}g zKPy{7Uh7e~5`(55T`fCD$Uhv zyiR(S$r6AU!fY;GWt=CV-O-F5(&J;ERwA3+Dt$5yS4`#?FfA>u_|H9^-5`wGZiBH7Brm7#b)wg=m{rui*It~Nl9ffC@R3a--YQXKZg%lyo+v4O6qmLo zq+yLf3v3=hhaPSb`3#ucIZXlK_kQ_jWCmXeKhF;y7v zSY%hLYkg(6co2xuRgmeX9I>;osDvsoza}MH=ZR)3nMUWd5Xo1CxVzySbil zEj63LW6Ux~IYq0^^Tp!)p4M#?OKPnoX17!Qtjw}&N8S|Y!KQw(W|@JkA2m@kBQ9Og zTQOlirkH7-H8K!aIY&XW$YK}R!W&2;%t8})#!`LM2?1QlyU0*pFy7aypw*#t8wF02%|LR3O9|ShU7f>)OP3{=mTEENWzvXW zoTFA!Xmc~(Q=yvJD%mLAW50ehH0kN~m_ms9Ld-Y1S_>&TW14vaLgebD+MM|=tIkG} zG91oiHMQ|G==;LS7YP$bd&@qNF=lVV-}(^6KXtWU(yZ`{R7R}Sb()G_j?=eqmhDF^ z@fiCJmQ^|&P_~r0JZV>};*TMCL&9cGyj8W{T=~5JoJ0+_XAqVcpQhKd69#p8+~I1W zu+8}&cWL!N<+}#LmN#hsi}0cj!HMYj`OTnvPg(L}a=jR4{@|jItyraK!4Ox*_s)>+ z#nl{Jp0GWHQ@Zgk?iYi8eN(_Ur~It-wishTmkN*Qyp?jD>*$bJTTOTU(K994f$xVn zq{p;(@|UeIYLDn`Vh&i(95|c!?$l-+K=zVvWYG6bG-ipi5=1)Q;s@P#MQ7+CZ_dBo$q)+@=Zb~c1mNMfy&`X z>(Jw)<7xYNbd{~`;=9+rYPQ{sa>R}&B!>KM7MkjP*VX9|G_r$pZv^S|pPg6swOL7D zW8LoC>PJ~N{C({}^tgQT0E>HflvqGst1bSQm8^io)}5QO@jWp82L zZmZ*mmv@{v?$>Vm+YmT~-7lrH!1p6tS>6ZvSY`*91L*4aM|RTJwXpdFq^m z`3+~P#PF|_c3u!Ym$tv*7OMq9diDu;ciI)-$meMODL9|lt@^6T@QRZ93N?NURijKmKUopgpRUBk%@gN}*e9cs8q zMSv6)m==JiU35uL)H0n^qX&O<;A@GCGcEo?4bNC5i z|J%2D?L_8&+kYKQ*C4qCJJXS=q7Dp)bY?02#2-IMaIl-j`z*=$z~u*@PwnD8vM(wk zmT`^E52{+?u5;z3Hz)$OZwSz3QOYBH9X6xivJXo|v4ba#jGL|m*lU1(sD+;sY+}0s z&pfUtec?!Q?GJbH-%WU>XW48DfcdJwD^$ahWFyhZqyA#)eN4>wp`6r=0UFEp%iE~N zequy>4t|R3V4M!h%2F}~6##v`yc&(m*#di?^{3x#Qv%!gI$yiB(s0&xQR?}cRxbjq z0-8D>10%(UUa`LsxG}?6582x}yDtF;{La7TO3XrEidbAj9AR`JJoSEpro$ZP4_77` zCzc
AH^8C>iJG=;*fk2#5dN6!OWr8Y=i6(}P!I z2s~aAI^!&$;}d*oWEp;O2oNPpb~AjC80bF0+7%v2Tpc`izF)3N=1nv6d;QpKN*>V} zbe+=?e6x6Jdd^p^t@pfG_X$z-$~K23uRKggpizA>FRBp61wuntpM{syA5P@Yv+FRG zV`d^E?uUKYKqX$>NK7)7TNt}cr5iD7H`oz2vCPy9Wa!jbv+)kkA-_QcD=wRTL&kqe z^9jAIoW!_9x?&oJUROub8w!MajwEBB?Iak+e}P=E1@(^8eL`@idIvF@1O!REQ-9;o z-Sr2D(W=c6FP|oBxM{pII+d@%&Zs_MD(-)SQL*xO|7+!}@Oxrx^hzAftbmR=rL}kr2vXO&fQ^Q-aj3%&DZKXE0u?FpUm$APVrU(?_i){DS?lMl=iI_r0MwVjv9@6?F~sQO+q zexTPa8y`NTzQY$$fYgl$gU-JdyuVN;8$~lUW55XjZeVE5)gz<^+=A}?3l)KZ?Tz>c z$0r7&++h2u@XD=d%?Hap!rw#KDC9b7j}OI)o!W<){Ga#1&4^_2_Kq+^lDBv*8h^t5 zk2SZ`UTx~fQ$^yYJR045gaEj;?%(F0Nf>I>6hC;nC$6~u)lg!$v|jAizo|LMKP0`} z2e96NeG$u&Zt?T+;xjrw3SySdnMd={F(14)jCQGcUjODE(aHKlL^FPP*A1=W&1$vTNK@T&?Gg zzyZzE*3wxl|NL!3u?|F6AZ@jqq!+oM$fQL|yioIFgo*KZ<869IK|Gmyb{gH@59M&6 z7|xihhU*9`D_8vGBV`}>4k5DN6(o|!XL$3Cvv|lfqVXq3fQa;{=_?q4V5B;-XH{7Q zgNW^`9%91Ue;Q^K17Fw&ibBlQNzA^W;u@rYSWFsziZx`kTKN_JPh!vQzzF>sn&II5 zg8{f3(>TsM!GyASGXk;bD2S`CT3WwK?Jhp{>wou)sY}OP$6Xqx%3wmES^XP&UE64Q$p{aA_p@gpD zn$x+si{*J^0K);D9;hM9IFVVsTdz&`60!8NtLmSuhn@W&_EvMV41MeJ%BQ#6P4Dvt zulY7F$Sv!I1Id&0K8wtBY6DK$IUI2Kq~>e>={JkP?u(1Tm+@p*cE&oNzipQ*)9t0x z&@l@jZdt(GJqZsXRDa_`VE!q_93yLX{wIIhBOu{L|L9Fy-#mXf-T6lY!!Mq_6ECcu z*I&H3g$~Wu7fg}7+%LH=GR*oX*n4CJ;Ne27bp22gfM_jScDrU)usx{Z8~v49@$Sy& z6nNi$yA1*=9CG#(`lIOWi1721F(lxucaE7|GCWGEDJCku!fp+jjlCV+KOG>Q^6{wJ z0+xS`Z<07OJIQLos6ezV8wD@wplm!~oOAdBZH>uC9>FwQS_pebUo)qR!uv<5ApQH} znh^SC)Iq=z#k2kC6#4x3=jtnwoaVeFeK@|{Mb1A=?za!*3VFNLg?Q?m0{onW`*$f$ z$m4(3mq!j|*I4giL%iXn%ke*(fyJlbE}wS}GnCm5dKeL*9B$aU+FlwjQNOynlSe^# zAgw|?g3mpch>AW)*qC>^NA8>&UCuCDb@cExPmb9N;er z72DCLJH7M_(Fg$F@L8oXju!HodTX3wHs3XbHxix~V?ro1N(Px&ekZqy+!@NSJ!3&) zi9&>CyfgAQ7|fS$UgZ=GwnmIyvTFxn=Tu#cV{jZ91PVKU=V(M8vG1|~6%T2f^d7Lo zmOdj?PB7J^LsOh|!5yLlsxB%$A4xrneWbqbry=A8T0)6k<*uCdhr$}bd#Yc=3+MRRoCElWc{GyEBO?H?3y5Jfi?rUup z()YB+3TKSK#{CjW9p9Je<##d-B7oWs^F%s22KtIX&Auq4qnldT*hCRj(Jh~=s5YZe zHD=X6B{};Rfo4xWHN8RnC~gS^zo?rR)NLQ~B?ewKlCY;6^KU@arUbpru{3r6If%C9 zzJPqE?ufqTPxB+DY+&xLrl(M!&`JdM(2&~2RBP^E_UQy*&N4?dX_B6x!U(!9`5v9b zDUWvxFI!jaeZYNZ`CAw+o^;y&mr7q-O9Ui)wp9X{m0>^bdE4+VVo6HdsOU($S4JG5 z=-;1~Oa`L={4QJwKbY5ldDrHVTv`nDUL`~Wo+0qmO_@tNB^>pzHl*RJLMrWqIeRPR z5G7Z~a~LO>jFHE?Mk;q0V=8j=(eD%ORS&n0cqdxyNIQ%=EVi7cddwr?C?3YB&6l2f zIEUvJT3vW=lKGq5>?Q>!;G%U95X0 z3a}eHuJXEY+CK6&YL8nYZ3Vf!sVY9fVo3~8YuU(cQ5pF08kYp%LZ+{aJcVs}_iY~= zlr!L*$VI&@I&2cRW5x}%r%Q~W>y_-pEP<&EF8g7AjV$`LyNNezu0xh~3mh-qdvF^o z^`YjD9Xo8+N_Ticwh{Zk$?Km+^1N`oCj^gz9`u3 zkKE~}(8Mi(Q-_FfOG97WTy3LW{tfSqiG$z46A;zEC9y3ZdW@=k5uSaXapT#i%3?Yb zkUms%f-2KL;7K-~a5#f>zr#e{c!_#?Hui?gW}~|_2YO)0aj#j)W#G&j)o&}3$KGkr zgtp}OAgB&#thYq*bBu&tu=Oi(;aj-u~^I)1&f1a7qig>)x6GzI4y~VB_ z9WKyx!C#z;7_|dOVFWl{ z+c|;GL-yP6ZX3n_p6C|5hmL)SIGIV&^t|u)MJGH6r}j(jPwlwpPO-!Ezb_XU_Owzn z2n0wH?rL=6 zQp0PGONXMI^zc7MvDX7B5Tw6aAXxRzJR+|mchU}^GT$=av|?>A9rT zjc0dD+GYuc`NPIg|1av^DkzgK*cQZH8h09ZcbCTD!`+<^ch_!cq>;wm-QC^&!`oCV*57kN4>jf_qSuyYEn%;VZ zL!7%w`rWS`JBjF_KbC*DgH-;)t-OtV|gdfJGxpQ;x(8x4^Y&nYe#l^K(lsgIdA%PNaU z^ffwE=OCv@xW-re22iAj(IrbmPa4#mlQegk4@-g~i!<|v+~LmtV*5RV$5lu4MS#K@ ze`qhlg_hB09_`I?CHF+F>#^MUw} zX^4{XoAW$X+^Mbigd*8y^sw%8F$mL)!wq*EyCM5=g_5||-QYHEeu-M@chBdyO2_A2 zp?rf+pSn<6m(OsSU})qa{C&H=!J$T&hwT*@YuhD3_nedusW$n6_)#Ku!LyNBl(GSUYjru#@^Os~r3z zXeh!ApIX1tjb~?mGosA@Lf&9rd!4hVf?tems`Tfo1J$BzMjSBy26ejFKIvBRl4aILqRNln!H*s3-B=VfAQVS5nVwr*6yc8laG|L1MQ6bM_i=| z#!}o{rAi;NQ$3B##ZS8=@!jd4ouuQu94>;{115m9YiSaFB_kznc z>9UU|-u-fS(Kna9aw81>k7~((%6R+{Tnn816idm4_#eo4 zu>N;49`)T>-%&bV6_hisuPRqsb^K`sx( zhit(vNlh-PJ-mX&t&x;?E6HiBhTIl(D@Zz!+$t+h#3m@A>drXWM#Qf6AV8jMG3UEp7=UvNoN$usmWp+vJhgV=Q0bcJ*g2)vah{tibnLIs?gSU9=> z9k+pIro226sd8E7l(6LEqZj8er-1e-T) z7Ay=JBn;=aXgNg&ry6o@({xo+Lm7!0x~4H7@zkVT~Tw>~%UKXh~c zGfR3EV0FHJW{K_R@qZM1Vf#O4iE3I#TC%nEmyEQm^fb-g)a0b8&n)r$uUP^x&VO^%jIh)>4l?j($I;a;tI^@$)SLJ>EqHE8{i-;U)rIj_vl=GiO#} zPy{73H!C(ZANZUD}0JK~%)b&Db2&Z%Y>&dc5oFLmPtf2_iqkF)VeiU!X_ zP!}iFqqu8nzKeC`)s;LyE55Wny4FiBp0KU9dx_whnJsgbHgtSjFax8sf&~Zb&C>R2 zpR4fB-#gD#sMnE#x-ckS8Q4&$7ZdukyK*`ddWGnTwHf&1v0I`v<;B-G^dbY3iOHu) z%(@V!*4lp5RZtYc))Z6Jy$Al~N3{{A%Wb}?Fgr*L{Lpu%s>@7=aW{)HOZ&`OEO6h8 zoZOTK^3GuzVtx+}I*#~Gt}nkMFc;z~3)w4|S%=s(Y|Zf&?o+EpORH~+i@ssfcyCoYA<`!mteIvZ&VgGyBWEQbW`zg5FOf`}3yr@m1X= zl-d4=7#My z-Gy}&Sg3RrV5%+du$fB~8AmHc=95sMIy<|8Qq(rfl*CKQ5LRO`p=C1sCR>Vf?YuCb zf5L#2atP?Y#W?i}A^3zwt>Gz?tQHA;8O{z_OeH-3}xkXp^3%E{? z$iXG))B4(pbVH$H0dBjLsui}o%+=I>paLud&56oJgWk7Lp$%8EO6CIYP%(dcBdT^S z@L^}Q*&-Kj=mg#kM@@(T7E?oukgvA%XB)&D(lW(rU=Is#ff$3G2^~)gm`ed8K7v>z zRHvsB>Nyq73p1^vmjnimP%|03a}#{s1Mc3i;{Np$#v2dunMHYepw}A zOvV-U+Cum3P8xq*0ldS|zOMA;Y!6#%`!3gxeZ|BQ_TB!e3@n$N)1$0<5H{0pu+ab5 zdzd+Xx~Xn98?GONm+Cf4VZs`#@m0NNi!!mVN2m>}0em#^ydA zcH?Xv^xEy?SfUkxUu0$>_tr~wG3+qBM;R`Y|DNzj>Kue?4(QrDR_T(Uf-5wb>TL0BP+kHd5^)h4{mlygdnZPmhzgc1KdK#Y+ey+Pe!b57!3b63grEFAH zzG-~JewO{AF->O3ID!Lx6R#1IT|KTTph|+kwuKoU*yqWB4lQw=f(~8k$$$+#c%6a` zed5W022FXbMTa1}R|~*QXdF@G%*Up!vlQhrw-&)Dn@%a?Ce^wEMSCRC>}!z0lxzQTc+_|rRaU2+-zCC2jOg<$4u;NS*|ajwr2&C(e*J@K zm2oc{YP2wi_!$BJK0x{J1En;P9y(f72nPy@WQ+#t3C!Up^LY@;nEBXP;d!`?CDNGY zU5dZ{xzfTMERnGK$C4QK4_5mJo99XAy%!LBVaonKRg%9^mFp^#oji?W-t&Fd_jBIY ziX!{gB_aP@CjJBDK=bS`=d53#7N@}oUp$B6M{7E z(3~gg7EVzaWutl(JwK6`Q~a&oyQ7^&HR`dAmZNmAd3E&8x`SUP*Vs)6w_HhZx0Cgz zT|oAAR58&D2q;1t)W5{Oqh?26TXYv*dmQNTrx=eFOJv+`0M-wDf4(F8JVyKS*@K?X@1ksOW&kZYfnsM z?9{3=C-x5rt1%*`r^Im02?*6V0164XbBQuV|~pV4~#U=}u@6I*KUkxOeKbLsCJKs!u|ro_f^du!VK(7Q1SM$Ke=h#5{0cEpM=T zv~sLH0?S&Hl=IxpYgPv!&1YC(N;U^gMud^dgn5!O;ulCn`$%^w) zvFX#zvc{gU#r_^w%^R$ze9ohgWTgE<$!aZ=OY=)vE?itDQYQ}mA6)wnp8eO3MwxnF z`0o`4rIT0MI0Q#*Vc2KaZ>a;riCvM|cq@a9SJhia@~^F=LhGviE8^;I`k{F9@ub30 zN=gYe-6|W3@!KQ@bwBMoQrS?+srU2FxE%z>(^%q?tEBlp$x%cmdXW@w+FfX$5USs<}GPg@S727zWKfMFlf#Oph_?;~=8hTi@9rHk7A} z6%`jwuK;C9nF$?LF^vO{+7^Vu)`hshzp(yv^lD{;gb9Zcvnk`V5X$&2+B!6{rc81X z;#MGYE7^o0y(H{^Z4ES0tqhE4g{;Yig?Su7s#LOj2#HoV`i$c^=B(D*KWHM9#}Op7 zBXu+=#!Tkx68^#A|KKF-f9${h!Lt9_sm68@{B(5!QY)lo>vR?C^b`@uEn0sxH1O3y z+*3SGm_zHDotMX90+8{5<`osKAiKEQ)+IYiVbg#_vx7R^0$1s&rvo41^%UOqt*xZm zyi39)Ld`;nHGrLtuR^Ew{zulT_WnnlHJZam$SQBgU%9lUoy<)<`9xO@OA zQ)-)E7c_QA`&wk1TBkcEN$XQ!8-1T3{4!V8anOBe+LUI?zG7e3Z&p0Y6u&{`19(U~ zXSbU0kB6j!l}AY>VPqz*F2t{N>jcnRge3`W@aWYUIG}wggx8a zuH#yNY(LYVS3F1)F@tT&Fr*V%|0KA_NTmg6(6VM-GxWXZIJ4;_6 zFC7|lNa81|6X*(A2uS-r-<{B<)`{6M-MZ998G9>9%)M#1Nj+rzL;zu-U z1S|DUjqARnE^1daFB{d)n8XiSL@;9OGB#=3Ha{_+*DPDrj+->AeV?#QXXP@r8(Q(T zyM|X}$kGw0_q4z6+btZVk7(k$uw7kq$b<8jfldF-m1)bj=30AVJvUL*E{~d?#nqjc zJYkkj%cW{Jv<9e2wPYW)NSkm?SEOw|v6~w%s+Jc{f9Ha2_Srw)Si5wXHTT zi-#E#Ug?NjA3YN3a$L!FRm%dkUe>qMhp`ia>91Vf2R|m*)62P*Y^&DRYg(;twde*c zrUoYFD4sO=P1GMaeFqpSOELwQkL zh~D{c2@m-h9JP*XMvQ;;n}i;5G20rfjvzK^-fZM%iur`1pb!!9@w&Pn?@lk2lF&W1 z-z04rXY^TyvZ9z04e>^Lq-?Xy7{~RCgo-*#8rSrH3C%_6Ck)|_bi_Sj88fcy$4sqe zSujrPcMZiuVJB+ib@e#jnV!o{Rwey;<>WQ~fN+4)N%X>V=XQKFy=DBp-zyXmMb|tN zvx(Q%V|k}$&N#W>K2#7zjwp$jAz58bYo}(`II-V4loQ2*QwoAa>j|8OSuH@7{jyv~s3F$m`of5$wnGs*ln_=*qkAynGNguAFSjG;E%T zrO=Y$%6s6vdJs9To$R6lN9>{ZXACmmaq6IJ95VTq38&y)pQ%;Py>-{Qv2VnoqRzWvO7Bm+Ry4 z&dgkXvQ(>-L$naFUnjRRN58x{0n03^&4y2i~$GT*iHB1{lM+_90^v<~D zxpe9+=W%{wn!{r9*1KsrcN*L)8|I8YBjy$$_7X^XReM_9D;j2uUQ>Y}_7KeS6u9?1 zbP^$k4-O&@6+rPJyyiJ|0`0X7!$-#x%XNm6g*peD6ORjOcvjqL9n`LxMh)vkyAhuq zlxi$**Up&64_ickR#U+yFWblEom$l_FRKLNcEJbF^ZVta+O56TVZrEEV$@A(-n+yJ zL)rD>c16_eETK-{F+cCuyX%v)ImIUN>=_|b-=UYt+tlORzP-X>`sgO1zgVl!4UZ?= zb2G*9iok3pA$`B0SHKls4hsm_G6k0%D-=&rq(^z=2lIk-L;stwYZ%kZW}AHgSTg05 z{Z|Oq7v+WUhWF5A9$40AmQBlcm3qj&0SuV}WNQo6`yGzR1h)#^`X0VATxlQIj{s|@ z(6bAL#@{|)ma8<}6XaMRmEps5G1q=uwg@{E;gU`sI$5$qB-J(Y$SOPo(G6{+SSqr;H zcw3~W_xk}j{pT3ls%qiY&>6Y)q<#NvnSfY?wS`;8t&#CBtl>3^SoiB``rZ$|2I(%{ z1mO;So`i^n0v(Bn_mz#*%w(h>Mi;yp@(;-*%Upc2ECzDe!2>E)-AnD~@e# zC8z!Am_~#tNQ04hLN+!twUOf(R#H>3wqRFOZR&$Bv+M<$(q7V+F+!w%VlTmWsHym5 z-0Zw&vLiF!O~Ho?rJyp9c(5G#wgJJh{T9LX;71UXuzGMUxQ?{j&OM3W%fXk-MzN}3 z*Wp_5SV+u_Msi|$NyWZ}gdicVRIv!`oA;D|Zz8?;wi@aRcY-@hTj28aImWf$l4Gh! z>Az`&R3K;*)lr#MR&-R@ji3i-lD2>2`e}=_f?3CFCO5JeV@K-qEr||*m`cn>Ze}r3 z5wk+-^6fALgt$Ub2Qc;@35*FQjrfKw2WXX@3}rxFA^NOa9fQun7o_~({`_1=ZpCpH z-WMI|k2xY`rjSQq7BdJwM_l>pOtNn@k{i=cDpdeQ@e5)2+waf}I4gn~raV9K;V^nq zO|h!r&k-irw?b`2b5>2$9Jz>*B8?X7Lh(YsQNz~VNfp>cqZU&L&OqVCaOT?A8+ndF zyrCI+t%#8$O)fMQcMU%#oT1FK5pNFrGm}meib5wy<}G;_P7{4h#L8l#G_V~;iY8`G zsXinb8IDWD%4DK7a21AzMo#`BewWBmau;#@m6c@Ia-cTsFB&WviezMT%9mO>W0Qe` zuu(KsvLZ=9L<%u0xrxOSJ^t*#XuvUgLfUx@1j~wh_z3wd5wif$J~~v`{jXak3G5A8lAKQ|9*OKI3O)+63-E zop%g5`j(=yU^0KC29W{O0PJ5+u~Q~!!pGWVyTknB^JHkk)7m7v!#w60=+`dkR%pVN z+H~P#&hN||{@oo1*>frRvsjqF=`)mgN*!f)8v5{7Vn4ebQACnKZSXXJO zbd=hu>VQ&dsh-v@j2M#$mP43mt#FjsDeL1r5kla`EGhx0rxvm4|Ls~o`~V6+;n>~U zS(Jd(QVVDGSAWPVCK}Kk&~>W6lr-d2&$q}1B!bhqsKb;Yso+(xiJO;6!l= zlTK8ot5=sSkBptnWmBjmd9hdDyxEGI@=WBV!%(9t1E{1{)M=YXjAsB0Igj5sG^(b21Gl%6ZCl(H$Bca1?L23rRoyVd1MFnl(|)rp%o zk8vel+?-qMX|(1!s~-qE$se?hA$vut+n26uwC2}InOBbqC3YKikAWu!(S@p^loKlR zmN^3sTE^h-5Puv$pW%bjHo5@mIxs@1bf z1k}BXZuBH%^$xPe{1Rd5P}PY`1(HI=OgklEkrRL8XL9h_xXv7x&B*7sOQJ?);deHK zlLrQbO14TmLD_Y-g7%Y9&m|hq$6H@cB91ty4h{B3WiBgLKMR9s9m6gT6uu8Hhu;{UT zW6_vapO&B2pB9@|Rr+NR=q&0W`c>3WR9G~im!a3Tmr&IF5aAI15D^;=8vz?08?gzY z3BD-~1dRjb1N{p13cbKX!b8JD$-~G)evmt72>d=NlG)3*a8fxZRa~tgoLLR62&D); z1vLfDOcx1feWh|GJFRh4w;jG+ww<=!wjHuvM3Srd@gRMEQ&{glM_I*`*Q8U1IqjbAKCtNeYUkywT~#Xo$t~0&zZoqSIMn3u(Fpg=od^OfGx$AWzeXs zmm}x|h8@WQAH-uvaoj9%dX_1Gj=h z%e-c_goeyBniJ+0>5zbdMa!~g#Hh9xJ*W_79LaycF9;UK1Njf09(N0AbWks4&^U}H z(mSLkaur?%ClAnZb33qCB1jje8Tk+HKbv%rSdbKqE>aV|E&H;4O}|lbFH_J4%tx#s zj1Tey-Z?jqk>m7sOs{beIgAvtjx%Vxr58RZ9;O&cfgmHcSbWnUHIm~Q-V`92D!}!$ zy8WwHH3$eZRTzcaQ6WP1r1=Ikz-!>N0MGw$O1F)ys-;lBcEYd)1sdj9`W>upC@y}F< z0PM8SH-1l&po)-ROsU{pF{_(2>#XRIXq9k?$etTE`7R+G@s2$O$V%^E@v^!ZE2tcl zYN)q#nCaaO8I_OMrD~S{ld{g)3Y-mpr9uS|rwPQVuspBs{~A?|pPM;ZZhN0M2dO`H-C{QH^Or2wy5eS{z+g7Jw-07`%wk#j(f}@C7zgnai=%+J^RIym3Cfvb^I1#*YNy zIlF^USvl5G9b4@KM&K`^yk=>ld4{g%T5)s=)uebR19T{vFO;H%6eSr@>Tx;N=w_6n z?&$jzqK(2(Uwtvu6fMfA**W7<@kasnZ1L%!d4PpA6}#6cV3V7j^Tzt{U~%FXow;V| zYR{;1eu=Y2qmKI7;@J48{^Qd9*<$HZi-o)U#QM60v3)|kDmintXjBODX_C-Pg$}$% zB~+(7n<{^5X*_7YG#)D8>C~Yc{*;Q>Ek2`7Mq%Sq0yj>Td9*Zne_qb{l$!TGKI273 z;V(t8Xxtphs8!59hqN<36)$ppMu?0;|EF%L1|q%rrz>iVWyI!E+Uj($3xTJJH`W#h z;tS=6jP{A?@tL~qk_wd-X}p~EQMw6>V$482xK+Z=L^X_Pz@ z=oyFb3b#m3y&{Nv-3m1?cB3#TjUbVq8)^)};d z781PbWongbwDgv&Pw{2E3+0QGWrmfK_MP6G<%f$UI{EhQ?)LVTItw=Q6{(3!EX>vR zQ5EafAN9#LzoN?d`fsOFeSr2BLtQRgmp(w-=wWx;A$wVjG?QU?D#c+6(%Rd;7!v4T zvLz;H6f_M4;bqv@Ur4@`m#N^CkQksFi*hsQl}fnIAe;P%`2yEQ9|-;J@8ol6Ak^>R zn3}%CvszCCSiuOsm6NYa~ z@)iQ(m+X77F9`L$P=$2{2VUw~n*Ym*lX0Eu3EA1%Q6F?>|E8$|u0^>vt0NAxfj z@Km;|{U)XwKSXJ)%psu>Qiz|3k}u%T|l&qPMdoCM+Av3*1$aym)O8e6~{pxV%& zV8bADf^x)2%-VP$bYO^}d)arXrdg+*zSJ46Bk{rWK?jZx;2(1wBf(*bW7A1F6>5nI`8a!s#@PTd7ohL#9QEVm*hHa4wNyfO<$3B6lKQ_w9Mn?%`#A_MH6- zw7%kPgNTo1?8B^38~9fzhl0PZ-%(~s`2bRuslR6tHpGXIc`|d-Pbf0;)OE%azhPit zY}D5Nc>w6&7OrgspAX|HkCC zulnUhl-}8n*;bda{NQIl;~E0t+TFy{?GdefCj&}+9>G)W*CJ6&aVtz8c#2?5Wh9)U z1Z5Pdrkyb~9+%HwLWPP^Z_m>5xZ&G(O7d~JBF;&@HJpP8-Sz6Nbe^>AL|xym=rN;x zt}R(lrO2y@r0mCU*Q|D6St%LLf5CTV>_3L+@OOJRI-e$4rcHvN& zc9GgzMx>X<;uO$-3U}_Ef{PZPAY~T1xMtbsy0s<-{I5G5(}XB5eB+ zG0Cyw8N??_C_%J_+|K~GouQbxWr^yb;CN^?93FDn{m5l;>~e0IJR;%D_P-^7C1xD0 zM_Cxh@aot>_54w{U=u-EoRBc9qH%Lg@ZE1)O|bhqi`{zixP%)dc>#KsKA#_f58mTJ!N_Ey_3o3mA6^O0z_zf96ZhdWN%9|ak)S4`#x`ZI!`wIWOv7D#f^YL6*oB(v{Ulm)_}_V z{7khO>HGhK>9f7v;j{Z++&O4a)wgg111l}~A25A*{yTU4I$=`?2OGK87i!ylif!fq zp9*S9TEA-8=UNE{WOd+ozz~e`T8G4c^oH58K|rC9#6a=rDN(I-p?n7mvWtuQr9qOK z!Ad44EpGZ6pcr+2`SSXfeuUQU*VNY5b~?{{DYFp&;aVZ=9IcANzuQ7SySPa`#eW^v z4!AaC&0MfV&N$P+$^Sz2hF+`z_o1wOH7@Uq) z^F7c(5TekTm+6B>w7jqjXy;ae_imXSMaXLN7j^WCmn%M}4u2G}%X!D%;R8)}GzeXf zkk4VW_GxaEI?EZVN`dcrAU`@i;*8ab?eJXT_R1?XD$1p44PZ_pV?p=VTtM}CHa3*d zXO_X-cQLJSKB91}WD!vKsdI_do;0a;dH^uTeCJesnZ{ zYhwjJr-W8IxCkK?&w?}uqAIP5W|}>i<9&!=j*k@pSV|x^kEHK+sF%dw8NJ522g5MejDSdZS_5L! zf^wrxumx{Jy~*XbdTH7kEK;%xoX?0lmOdC!z(Qe{G!Np2t%p9dDoX`iQyW^ib@39Pf5E6 zo}%Mr=cQ_M({Obt@}IrZpS^98g^CLZd@J z1s+DbfPOPnCx&VE!unZ`Jxrto=W$y<4E@q-v^}uInr?9maJg2IwiT6?bR5njsy^^b zE92Zz75#J&o#EBK&5mh?K)GqAtk`g20C%la0*BQJYe`rS|w0o&#PG))y7wX}&FNZ=-w#XPSR;R;(Brc8#zE`Z|%XQmEd3x}zM+ z1xCkHYuF7&>x=@alA}U0E298rM~1FGi9T_|ZI7rhwYaFz2sy}Au~2H72n*KN4SJL3 z+t*ep!doZ|VfPGccL9xk)J)tpqzz^TC3-w|VRwA5RQAXLMShOaZ<@4Gd5y(nOiQ(+ zE*1oH_c>asy+|u0UxFmXPsp)#dY(el9*azTRguGeX560Qn3BAzYSj^I1j3t((J{XU z7T4{u-27!pxvBU~f?v66kxdJ~dFwf+on)6~G;TxMhN*n%26AxmA~J(k9e_>Km$yzN z5eUpWb#rq3s~`#VE?%{IYt!@%_elwT>{=Vrab>loFi;;b1?(`%1A+dXk^{|{hw=+S zcb*ICf4IYL)M49Aizu^}YE_m@r+rMEz5;g%I;&V0i{vC+a~?@I{(x!(2(YfV5H?N7 zfA-{8u&Gy@m_qFwD-6D-(!S8y1Nqc}nzXyTj>-?8YK>ZxvgU+BA|c-eBmRc0$+epHw|Xv&sATO0k<9K24)i0n+~Ve z`i4Nj3O~yi?ZNBq_ioB^5ssro1zzq|^4-|N*l`W#oC&O@XzTYqbq|YO9IQ}zH~vb) zwTT*u8%`nVJ&%_}bA;FFnX8A(KvE0Zx6!mmOj9YfhL!N+zK9DQ)hDE-^hz4xqzSJ4 zmGU;vnm=`a8Qr)v9D{yP4-ol36&jng=B3b!+{%FH+HeK?%mdD5HE8mpX>iMu( zty)Yk;NV@ew(RVxy&dSotI9R$MbBs%octsEnQQ3l6DCb^MTs&aH~hep@vgw|H9*WUmcTiI5Oi73()roG=Z8y=D_vUN|ryK~W+|9#_&gxMK z2%2V}0ezhZE2G)>3K@;A#|$J5EA*iX{zV>rd|;ekfXuLAKEAsZ2SGTiP@nDCHXkpE z=uHzkyq-HXj1Cosyso)(^=ORD4PUcs>`$fxo4vUv$y%B%`a%wZikEerUZ+={Sb--b zI+OYzm)i!h9}X?tbnl9e%*?e0*u9-BbSu*g=9o2lF_)!R*UAmWbyA37Hi{yGVTWG^ zIBd~QBTT^vKo&}q8t3nrVTdEP&4ptkX^g7AMqIzJe%#FZa`9k)^c~0sBy?6Aq22-E z;Z|(Fr#M0vT<8LP(%{dd@Hs)$_1;bJ;YMure?57ho_M+F@(hxH1gN#6C109|v0D5U zz28(H!r5tu;@E;#pg+bt`KF;8P=#6H>khouQPAwz!Q)EZ|IeaG&ZGe zZlSxyR#}UZ2O52oBKES31$oWMDT?U9FK#5 zYj>qr7ZK@NL{)~XeY=L8=2MQT--d*R3#`lDC_CSMz#Auf5Wx-N2Xb1=&d00|SV&eV zVPe$k1Ua4>+xF&MkCGv-cNMQ$CQ1vF>qxwQ$jqP_k$zX3R13QEj;pQ46I!fbOVu_! z5FHbh%?A2E2;5F;)apYm`J`#zNi9@Kyq?W}_XV#?*OL#4aExF#~5*9eD8&q*40OJb_zHG|DiXKEu%uaNRP?FwGN-7MIz@O6J){?@x zd*$5!O1!sprlj0>-VQdhrp!(JICsf*X{8}2#x89k5_z~~>i<@y7abE4rmr)s^*p0< z;CIzm4|q#t4+|yHaTpWKWDbg(1mflu@h*CNck0`St+is5ba)NJr1j(4MWf1hf# zDYPKB$t+#mJ4#JVNtDxK47>snt6@3DKKlt;HUWRr61~ zCg3mVz`x|Wv1Qh;0CSafw{T}X*%lRo+X%ge8l~D`y-s_s^*qlunjF|34fdvuph&watLQ}O9MXxGfD8wW~ZQYD{dY!sKfp`VtBMl)tY7?`dDQl&o}-M-lup-{uQZF!fnQ1ZwUEJ2tp&Z8*;i~1?oL8HMF?(9F zYgd8T3U&010w2avBq#)9_}8>I3qhxS5tjp9C% z$EpxN{w4-}*bK^Og>ySB!ybA;KQU(YJ&7;r=BmZJeDe9SgIeDMNxRNk z7GmHA%{>K2@SS9~Lc0?)YeS9BFa#$w`!(a%wnq_6&SKQyY)Pr$GgeH;#@_bQ|^zOA~-< z3|+eKKj8W$Bjd9id;qrDz=fX}T=BO@wTJEVd-JcI$PfKt0tt3GU;K1}g9IqLMn!)4OF0|NMf;l7z^6I={Pxm~s5hbTo6 zyQeERU|nYSP_?HxYA||z;SN+oH5~MfJcVK~G|pcr*HT26YPSB3s*og@7{pSU1j&0s*<9pl5}T_f{MkKt zteNiQiEHl@2Yn)@wRp$R@)W_(H#k9|y!FHO_hX=&;Y0f_St!yJHBog@D#mM?@&htp z1qU4NagjdgOF+Q)b1iMf-W6Z7N=&8{Re^9}0}M@*47-CMwq3 z96G)iL2P7ilCVi7y9gJjMd^~tkWs_ShaB+u#I`5rsMtw#g9XljX(Dk~qRsW*$0H*F z7v3ZnsX%Tp!S=P7!2--dUMdh|Si9(9n*2_s(y<}ktx{yQ_%y?%WJ?C3l0IAzU8 z7aMqdd3Qe8tb~k!VPXu*@x??X#v%sQ8lUlov-5r@xU@&p#yj{j41>pFF?aWm?##El zB!ce;T~@+e9)E9op%op#y={VoMMS^-l>2eev}=83D@E2`FC?)<@6-+zP|VYb0XKW` z5FpmZ@MakE$ALWJ=|hC?o6bivBJ&GuQ3QI$y9gop)KI91W1vfZMbrp|xD~dDk-KnX zT0*T=dV=K2w|wm}Cb013hjB14b}m1vKUzf`a}Y%nPc4mVR!uh^SmtVVn7HsykmZ|( z=(CUz1i^0BNg_WVo(Rw9rP+->g+8dhXypvjk@FYYt47i9f4oa9y>;76g3rFHZc*tnn?? z0ZxiP*aUC>SUxQpLf^4bx6LI*%NV$FlCIwB8YPDBXx@+#DZr^ZL@%Xr`K7zQ?F}I& zf4Tm@08K!$zXN#z%MV_#_7NqhquRKreLqq>|0}kxfDalF-&^tKOhDf!~$Jw4E= z<*78A6I5{nhn9U8MiYz_MGTn0ptJSL1htl(3hqg6)bI{|c^ILdYTTm@#!=Rz`E4ehr{Z7>2IA?CT@A0o zYDX?qK=C&y2e;EPC)w!u45-<)9x&+lSY>BszET10?*NtfUl4tl+12!I+!!8+8xw4f z6r0ujVizdylBF`6WVUTiPy>0LpQ4I)6xUSrpfwfxGd&d>e-||9u_ni1YnI+E_N=>qg7FNvma8x4-o=(d%M{Pd6*4spc+r5Q7A}m5YOW-C8+U3tDxdh`Aa# z+t)B%7L1x$B8$n`un(%RE@~<4C`k(+Q2ppRn^-wAxHtpK4ck!cWV+Vsjv!j0FmGQ@ zNYF7auMEZ^EUJ1r(h`5|bgd705W|oWbE+_i)~Y}kqhQlL>M3FoX7y3etwkSq1j(o1 z<(~4dZTyH$tnWBQyd9%zM-f@*VN)yc1^Noq(0!&}jdYi5YGWDV~1HC5FCUcP!Ye zFArD5|KMqlnvS(IqjA-f?=ey7fNcr;HKnQhrT|unOxrx97tg)GQ5s=uI&j-;f=dy0 z=R|D8$W%DYn$y%;w4D`Xmtzl?Z`$GkXpi#CcAJo0iU~iMFib5z$rWGjV%`Z>fYCsX z0oLMLj0OO?*L5rsU3Wudy$SJgp6QSPZIHZrA5%+)Z;AG^x`3NDnBwmo5^WHw0A^bq zP?0;sJlJ@bcw%;!3d4inZB?$*tw*rL4lx{{SN0*Sv}v$c>v#r1?b|iGR$i}5LOs6U zwPtmByD&?w1PLe}wn%hDCqU_MEFo{{RK&&Ij`e=~?}FC!bRp&ZWRFC?A||@7>1#p- z8Y-{V{b*$c>ef0Pi0MOUq02lB;!@_AH()K3DK$l0X((K}3T^STQLLW-g^SB-?Y|d0 zVS=g6U6x~hJ}QWRK8ksxEiOjE9JW5F2waPiy$Xjeoy4q-c2Mk9LH|Fr74xFN!#-zY z(8~^(zS2(aBlD*u&+6|rvD#Npckxh*lof)QX4dSi>~v zdy6Ov45uE<*CJ28>|Mk!mx5!jgt=mGOM7jo2ifTHD5G+t3xj*#ZEtJ(mt$Xak4Q70?|0jeKf@vgLBZ zXlRKhk8PZ}a_6(&>YXri<2S^W1woV{4NBE7La1z^{4BYo`5nGh}FT~cFbt!)x z_Wujm!qz_kTbw6s`fOzsu-V=b`R!&aI|GUV9^}}3jTpG94nWl(;tH~*it9!Ol{F9p zp!wl@3WV`^!fB0yP|mIlO^am;;fpvWU-S*`Q1aNfwZ7^%R&ax`e_24scm} z!~+>NRDf;g)F2-lbjtvR`jdnF8h)}>2kNwr)Y_!zS>`A2(^6K;-&oBg`6lfd;N5r`ln8DFvXkgAi05ifH5qLk zlq3-w0=V)u$iWI=!8zt&lElBZ>gghbutP0h0kv@qbp+4}1!ux1n`|%E{bgYA7jkJIutrv|(QVs&4V<&$R zhJLmWjAE>M9g7g8FEGcVpRkbY*;(Y8g8V)kdY&Ex(C;V9$v&_swsD)rJX7Z_vh!*& z?gEy<>>kq5EF+imS_rTjx?K8uEui0JiYx9QTO9wB9pjfE?0|SZj!?;`9Iy?#nkYeX zV}}9^T6vCHH=yJ7CUgVGo2`ewfSr3g9u2IyjLe)`3A-XDJ|UClcP5puzpt*x+CT$wVjTLJPO(2jTQ5@k6f?l;`yc>l7h4B|N3?J~E zz(jnV9z;Zo!K44h9TJ=o)HV4oix^B5o}o%Y9d;4+x@%Gp=V55tLty^`ipc4W@OGjf zAmH+DGGdBnv7@flr3FNAUAiKFWJ?;k>xg zZ5Y(oFgR@0AUmIYAREj^-1?s5U2eeeAm2~DHz@uaqW{Zy&+roI=_?&5kV$|sPXqha zp@y#MY9^tf)7}S*beJzgIjq(h_L}vM0=ueKK-EEn$(T)m*|tfi!)(D4Jj3`GZL~DM zV8iCR$f>6;_sc~FvEHp(P)JkhBBvR)Ox?Dw(E}J8ia6C(#QPQ3jCux>BE`Pk3fvry zw!F#A*sCJ%@>c772jT6?%+IRL1Qh1|g!6Shv=!32)hJ+^!q{M9oZAy*gpU)N)|0>w zt)nza31X->>u9Zl;Y~FpNqkyW#Fj2dz_D&412z>WVkj>ft}#(UTDtZHJF?pjyY zf^-#0gb#GZI>9BeQ+~tRBTd+hsoi2E00&z6Fkpl9Bp0{stBcKwYdzrv>Gc3rn_>BU zl{@AcBb!ND!QOejo3KUYSDGJ90l4GRLx~xeTvQOwyOUtg*$|-{Owg?NDnWU0u>$)5hubHypOai18!`*7x)6q;@KjYG) zE$;1gjq7y~TwOW?#K(Bi*YV=Ns4Cuhrcj1MIDOd`O9Cd20=zm?xaJ`pK|ZU(JoKCa z7+KhC5ZcVU)8gNDz~0tP!=wX1eAuP%<-;M0ogkawN=K4kViAYINWfmou+hogjW>JD zq$>iR{5DNs_}1rKT!_V)5h#2Q8|zigj@H0DT@*X!I_F)WS+5yl4A4=gSAzH*@bWDk zFwGTP{Jg68$=kM!{q_REOBeM>joW(V1%_5ZN#2TC3bW24o8CI2L$;xyi8ulvyKrEM z1C@I1c!pzrB(E8%#Ah9B2=W31`C}%vfben6-MfSI8J~%LhbtUvg`C5~wPp%)9Rkw< z&+O~IjkkYcw$5l=d9PxL(o_?AYk`T9188vq*xNCDWc z2O1+++3cl*m%K%8H0geuEuCr;))N|&P0o5#I@QkltY_n#rGwJNwp?D#S~~Pi3GJk( z4Sk+WzPzOpzM)TogtI>UH}I`&{cnfci2vws3;!8IO(UQYGAsZyC^DMh|C1s%eT6a# z@TW`Q^KWXUwailqpLsnyK_>=`OMzpqj)}Z&M)N9iIanHVFUK<8MJLL9X7f)+mcF z`-3`=hU9Mlp7f^|Flh*E(EmpEN6^0nAqDv@s^o(^Tx-VV_Nqc2`uIMIzgF=mKpjZ8 zQGk;4GL`LBzi6XMN;Qgs6l>@{NM?gOPgDJTDS0TY*?J?1$%#O|$=QaOY?w6&^dOl- zrq%mOGcS+Scz;=30#*Y&OjhXBJL$>-fFL1omF=zfXN8Q%T9aAOqG)OOB1FMHx;;M0 zVB@t2B}k7X-o>tkmfmPlggM6F!UmbQFR(_dI9dbu_9swsmEjsNh}meB!?l4VJsOg3 zp_Fcz?LER4n{l>TC#;b?tU43I)*N*6p3>##(+!F`WAij(iXKDg1z1nlJk|>0JIMQ( z00O;gxa1Baq*up=}7-Y>3}6RwJVLaY~!}5ezaSPs1AqDCFG! zHB8J5=6?Js3BbwFfkvgF*7CD}9nS(7z#@uopiQwm)J{g&3cwwU1H<%qi-Hr7+R#?S zi?whUCfi8E)Ptyh3fI#OMOZo$(+w!~yK&230FtC}ew_|}C9HnGTF-CO_}92MSHd(I z<3X=}d_Sow>vs^aGVF7u5v1=cwnf5cRz;Daxrhwy^zS^#$;*UA9K=R;h>g7*g0TKPv6Q1g#q zumQDwV~SZ!cXFizz%Smn0kN!Ewj@bYGcp|?b61Z$x!4sD_Fb8En!bWJ*o|D8yBXqW z&9MfcohUv8Ko^8p7`=@&yV|ZtVBPRnnqxko201ClVmE`IXL_frweWm1$Vugm=)@gY z+jcD?$~T~!c7s0M#qpkH#vd?Y5rAj(DyHNX&F0e#a)ppHH3)CP-izM^#=&pmewCtf zB*v|3M^IdDkQ-&4XoKA6fDAFAgs%JooVG%dX#_`ukzT{IzTb_-F&}UPU|&UGTKRCz z+etNngtOyeo!o7SuT;>@GOYLWWQJ_f{qimg``pJK6w?eD8VbTChV4VAiSsDI4nQ}O z)@Uu+4ajUORyh=S{1s$5@J=2SrnV5B-1QdIO$qo+h-w=zGS;5U}yOtxn*|%+W3*}Jp*h*wCm9M zR|(Y*!(zSJEeZ8voghCGUhQe8-DE$)AKy1BmojU4txej_lKw06zTRfmLf2v^RpCVs z`8Oqr<_w5eOk$2XAV45Pv&51d)9F^@qqJrglFK>$D<@QOmTH`-f&dCL41k}KC4?nL5ub@!vd}~0o$!d zdxG+Q;Fhm)KLU6LlBsEwP9y&0kaOQLXLMuGXJVpRg`~ezEtXXVvoIND)k`#;MEQ5; z(!-a6rBfIP<6UwVVZmB2;88ySHl2cXwt0|!tB=*A%>W|L&`sa<+mvK`Rh&K2^`l2j zaRXPp5tcZByW~W?*kj%#Pkh8(6Wb|%hO*DzrI=@>@(e`FEJc7MHUqG%8ZPbNca>%q zcT`EC`03#qK(Hg$;ywPLD%SRtD`rM`s8LtFr^r8Cs}#BAMIw(B8lI#1;SB6Zd&N4F40B%aVU!xh#&@G>VhP1nFi`R{u<8k5q<3N2Plz z%?OB#Al@vAFeu-kN$GA-sP^pZfh-5jLP}Xd!SXizM zB-w05`ih=se(^l|?$hdkDKbS28-6Ka(xE=!8;Vn24eflDB6eo7E~8nb+pG5YC?y|1 zU>nh7idcUVw*$wG7<*DkS-LB0My=Tj1V*+=vBwuJO7o&59dH%JW1nT_rA+2mPz(_; z(yamg3DPV;RpLm^t-~?a0;SuRV!{$rdDX!r=ScJ1rcSkI?G*AoI)Xc;x}$n=9>Ro~_io{cw{mb1I?pI`1>yEY94hp(%!PrZ7A(tLrrdSWaSnQwz-gKChf<+RcFm-Sn$&Eg(Sx6r+ zz3yqMv|hwc7;XNb$v>m)sapOJ=B!4ZR6`fHSx&9o2i**qq`8?m9dBJ$wVYme`*D(E^VU^M{rQeHS_aKx0$Cv;+ zHgAw5fKI2l=s6+oy+Z?bbE>JWH?wjNn|Y8$xlFgNMi-cpCUbyzvxj8TUwKec{%N|C z${#iHJo90Ir}7K|?)!n^Cm-w3F2)W-BTw2XE+sd}B)5fLP?Cf0JC2_|E2H|5xXrvp z&OeyEesq*ZfRa_(G%>9PPJxW8CEyfE`5#WQ$hJmX^12QS*Dr?*Z+q36#Drw!ng*!} zsO3}06H6|>rIH!vnAcX(E$t$282bJ&r!AubC%%cTwuI3`^e*bx!T-a5&R#IRfK3wxGF0QWv8GcJ0%t~l3aSM!@e1vNr0T5i$jHSoB zehf%}sf_pIauV|ggmsolcFXau1WB~n`PWEj@Qayvkupv8Rl#3Mb8~1StuhjkZ|d(_!pki;u=I zGG_hweA9bK?J4f+1%%7fbiTO_!Yy=U{#rZ!Ipgf%J=d=#2#2esJ4@djdFme9*iLG@ zVfJzvDP2b;5NjsRT0MByn*{cD2Ck#*IiR&0W?h9F9-laC<=`dlU@Cd)X3K(zxoiI& zgiBig3gMFf1mVi`9}q52N`i2Wv>+w=k5FngA{^IZGiG8(#50x7)-Nf8-l}GE2rd9) zMd2K!KZmsP-p5Z;ExsEBgl;LJvt^o8Y^A&guH7F?qn9z~bNV63- zD%k{TQ+38?1sY{ohkCe)@{=}Ez_e`FLe!H`m5%gtRjRR=txyDZ`gv-jD5)4?MH^O< z6Lx??waru}E9K-ETQ2=&{V7HyQ&0w3P_9*_1FpZM?7X_xpT+OvF%>dFfioy->6go- zbv00?lmN7=L|N&twMI$MH3C+ZN%WAM2kMQo{Hb0qA5#0mpX!y_igl(sShRrl>hYPX z^(-aRLz;nv*{Mfos`d)So1;|Iw~S^X-So*{fUGEYyBfd9&@LWO#Z<~^|ElgyVLI&O zqD6WndjU$~Q=KxQ@`tFy6p{#vBiT=fiDy_kNZGUPN`@TIsfKhb)##9@7a!^l?UKPF z1TGVlnoMc+*`I(XhX`VR7r)#}!Q|~Mw25~Cy4_&<1wi-r7@1q06wufOjgHhD>pqB3 zLWTKgOME~zp>+$7A_^(_`&7sLCiRj(Eu$O!9JA7^8B^%tuoLR>(=b>iyfk~XMp!<@ zpd@v__>oOYD>+qdL@mzi#ZnzMFa#5zV)R3ZAsCc?x>SdiS<4QG#wRhkoHz=A?b}p- z`M$j7Fyod3CBWv-i^))k`;*7xed0|r9bRD4cX6v$d4ZXam}oPIHb(+-MNfQA(P0d-ZG^0$ieG6xxcaRn9XAkw zt~r9>>fZsPMs+HDA@j~s{p39^HF7LRNqImY6@b156`N3~{=ts(8T6b6^B z6LO2zV8$}aq+u&vWXBX2#$#95|deUA!Ot$s|*Yg7&D zP|k^MUf?$i%EuHx2b@09rM#sc$cw$m&iYx0pY~|9-(x4puf$#31Ib~8wQ_G#+MIc( zW2nzUZh(=c6I)C*+WvEEH(NXmV79$b#H%&OdZ>pkgZLJ0lzAi`uuqpF7LI^qg~__3 zS|g}f>txd2I?r+VE484Y7$e{#As8xB8|{J}Zq<~^dY^X;gXv~&uZn6XtLM|n#3bq4 zF$}g6dcn5iM~@;?iPDMF>$um^A@;_u%uW$O(Tf&~Y6{=Ur0mZ#O>xRdCR{N7?}f}K zOmD>?V7CS7{i#- zT9Cj7_9h$RlMYuFJ<)Yg1g;`BIF8H_AZ&aNR%Rig9Po-G!qKqOp)2?X2TH+QKN=kq zUT>&=u~Mnrrq$AEMvxrul5}9!pFEk7aIH=W{E&5ZMW|h_+_q;^aN};^%Kde3kXehm zgS6hOh#%s;KWq;Xsxp1MLMlV7fcZz}%^K{;D-4+ec`4%?%pkc6i~hTjil2_spv(Ax zDoYVwfGPTvN8v6rHXBLneo;^Vi;G|Pc9^i!60)hD^Azi0ioTlT`oN|nmVdpS!oMMl zu;QOUkabic-Ud_!n^u+sLRh?|xrasaq_&hVi8mMfD{ z{bWUjgopWiOWu|K$Pg8Hpbjg@#^LyYNO28jut=GnHZ@#{MVZBfa3l# zM&WBmS5{yq?(7Z9McAa_A1q<(wm#$m)RnE)e5+li3LHiNY)Hbf&j{4bY~#MQOLxA|X}jm1zWI%{D`zZ3%eBC70B##r54GW+Oq-i??a;z)xpZ$n zKA~MFv2o_VD^tyre+jXc<$owsjsKY;SGGcOt&uM+qG)PTw2M3kLNfa@=m}0pX^${t zyS*z91CcH`S&;gkGCqZ1+at^g7Gihxhky;0NvuDkQ%G>{K%Tv;bmmtx?P6(9!L*`z z9|)n(`lEbX>1VbA@T~g+WM)1OQU>n(mF5>RJq?CLgUT=l zqSkXjNR|$DTmzprS_OzHx}FfWV1cawmfBH;@i|MTtBxFB(19!=m4246Nv!u|m0{mHkDCIQ1agD;Y;qKRq&LC@+b@UJ zV-2Vjzo4SfoglKW17)lQjb2R`0#X(Ph4Ico!gZUf3yR_-$|Ac#=Z{IKlpXL7TKou* z_D=LN)#4Kn%UQq=`6s#`y-wkvTK;1_XzfJEZ-hpbpSF1jA1un2t&{&Pz%twYVe0@L z#=`=$UXIYjLqMV_)N&brEK{TiOp=|uzaGRVKtcitDY$_7RfW_#+cg)BmBM<#!oG zj$@tT26!LlovQkgrpvUUcgeoNhDtu?R#mG!1MZlkH_~{ePPzFUN&zCwgpNq}Zs$h( zSbFk&SSu9hDFzhi!;Gbk0@T`WRgSVKh6mX3Q>ZA)XUxuoF0Xbt=3*mA32CmZfTb22 zjy0-N66!IQAQ`UNW(OZEDJJLLstKw^NjB?^ER-b*e!hc>5t8|zP;npLYClfS33*wM zUIWWMWP10jlv*fB@oB0LY!9CBv&9C5wSt_IuUC+p`!+N>1iEw|;H^TYI@ZEFtoDt? z`xTx#r;L3F<={Q^OJjO7|2#M_cb1^Mgovf24PiWuW6+}FsFVNjg)d`IzKG=f3{ z!oL_WD&^b@9dTZdV@@_2S+*w>Hvq@Yc;%D91$R)i`Yiyu->a4JYAw1N>?O~)S*G(l zdP#R?&)~LXnJ!+m6@HsV&aX3_pnTd;gwh4RxZkAXhzhFcol#+4x8`S0Q>d2R(9y}S ztWAdbNs50CHt!t8=`2?2`W&er#c&JHUENig!q_Q7pDinzWVk5#QN>n{w)9zKWH9d> zld>hzY7wJWm;~`MZudG|wx*Lh`xO@DWy5Nzl|jay^z@n36{{@j7Js(g&io~VZu>G6 z`LUXxOs2pRTh~Lq2iO&R17^*$+|F>GMtybDF%aui{M!hJ-epXjZ@PXy8Pe$Ql+d^tZ@yV^_`wi0^uW1* zk#}m|5M=S)*g}0^h2;bZx6^F4y{k`SVmq|jy<1l1@>klIKN+rw9V~}K(if*&;wO0U zWTz6U%2z5n?8iDeSL>R1z1xw2zTv93hY^IkI*xS?Ou=1fzfSjHfWV@=6L#Qm3=H-i zztpc#4B%N)d_0_qaihTNt8Ks-cKvYeGRWLHH9XI@F?JC9iYbH-(A6S_H>ePRcm%zF zWGh~0S5O?I*&KEg8DGQ_@;&}vr_T?qN{pMTdj!FWnxd56VICt``K{1b24m@&#`xsyESi>!nOvq zZ2yJsE(Y*Z`XNlpjFf6lh)htJ_a{i1=P4d|D;XS=>M&N?`O6jSzefal5AT5m zQ`wfvmGAD8M}hIXhf&^!@{*%I4Efs26_4DW4cBIWtITi1s=S!b+t83w>*j7)Ie6jl zo;PHM-PQ>kmd{w|BoiN9IpgdddMB=5J#^vl-lJx(2C?_3IjafqZJfOtU@*boW?b^- z)}uD8oHOq)PA7?7nbHyUzPOvh-i3%~Fj=(>pTw8A&K(l`NRlSkt18N;F<^x*e9r~cDND>W%*suM0 z2`g!i!p>3k&5AgVO9n#5{Qya#<1qs4oM z>qn2NdnPHHvDWe~XSQd1SFp~68QK=COLntfk1|zs+D>P@N$RYNRO$4kUmT`5;E#II zuhqdE6f*1A`sAw&kwtvI6{Huc0LDWAUmxO;8iZ@HO0hfBYYdH6_Cm6EUo-h(cI5+w zpTyggGEsfB_hQx0zNIzO0z>ug*Zg>hK>_mBPLRe_h^B)dKQS**If=Pe6!L=H3URSzhv5f&?<9)bzXOa>=v7jN!0w*CX8S`rUu#Dl&3aM zQ!GxSV12f}goz``&5P$GD;1l6$rSrJOl($|6|3mnPk(GQ(qxiVFY(}boaXc;_i18? zvUv#X4}rf%+BD2`7>w5Fan;X{wZ-KS!#056f4hqJwn;NZ8D&_stOm(9m)`JIRueTk zK;>~P*h(T8wk{})g zR5?MjEl=Q%ZmME9jf3FU@^jT@81K}Ldp8=}QisLNdl_a<7d1K!PkNz?iLG>U<1ibP3}(h+CWhHAd$CK+ zELe0^h?;vfF}2IJ)?h?fKSfMqO!VpwMm(utLyuOxS0R`#XO#<7^ILo{j7w(J6KuCo zwXG%Q4>-y8>;aJ&Z6$CUPg&6}<1wXN%@(PL81mQ0A(ToZ#LcJKNDC!>vIRB2p9~{r|}qA;o{4q6>(cHGsW=AYp_s%PaP#RruzUH zg^=aJe<0@g6P;@`2Je0$>4m=ZbS`eC6C4Z@A`UntpeEiaPy&6nYb{XawIDWoceG>8 z>Dhu*bfWAdlW`V|=DvUd00-rrL4AX4B-A05n&K~yo#5pTxTnG%fj(}lu)cV!rGrVE; z${7oR>}CNN4%<3$!}6gE0qKApPXWMN3w$_i>rrjAIB_f0NEU6kF3RpHdB==5FYRh_ludzq)?uU zg5R1sKo<8ifSC+n1o)25p1>0GKFYv~C$j+ppycZFUStzeD8M>&WF5UVLueH~A(JPv zfK<*``Fa&H$9{>`@uz`=c1w)w2}QV4W*8EgJQH?bvV_%1izgefu=i*lX>jUeC1nk6 zmYd9}wKfxoTPYPgojyleuZFh;9PZY=$JEaFce=PqH~csP7oJ4EkwbLetyd1wNeWSz ztqyD%6dP3TmpAH-BEAf|prr;t3-*LG83gXUj7g7Oaxb(xh9^#hFe8+@2<9B?gAl7{ ztFW`DGNlH9fDPz}hwDLcg~>cTHQQRrP zlj$5zmC&+y6;^T4qs!|$6-R^%EZc_7_aPk)@Mkk97Bk0M#2O_G-+G4k%XPzB#H_Cv zjWT=O@*Y*K$rK{%%N8>6fk9)2>KtS;mnKLkKbsC#D&GSHFJ~KV3a~ieYj+dBzTD9?>{CKoWu(cOhB>i1$A0x;KGEBp2fbjsHtjk%O zgjJM>Df$Ib?!(sHk5c&eHn5M~dj6VOPXR5+Bq;Ip`~pj+n2N(8uEuDMJQ^~o2-J@f z3kudx&sQ5c;4u)HDK4}eit)}@t$?-Lv@??*mU%d(KcBYnUyA% zKsD+i<}dm^*n2O7&hslmh{mY|8)2@jCt+HPR?)F<*;MA;tc$NWj&&0qsJ4atUci0$ zSDDIcX(tg+khwYx%TFE)>vB3}?KMEV4-CG<1d6vt(@|t0Vcb;)|45JqTM4@&1i0~z z=VG(rTD!UJYw;0O`+fp!Z@tk85(n5x@8y27z`|53=A(3A*$_-|=o-IFt;!HAk4SBV z=?1l4M0Cc9ZJ7d%x@`sak~Q0?ELE%p9iVKWpw=nfiuFs1GtnxBz)hZXsE^j@rLhZ| zVM&VhB?iU`ExU$INyp!*hVXk7{+%vt<68Er30kIkxLVzg&sgYIl~SU2F$3-LBni49H#Vh?>hhPoZyLpG7^CpBH1(5HHDn*08?o9kP2GE)7|t=aZJpb_jq(GNS;K8IpR;?kAilY- zV5<0m_l68y*ulILHE_IRtQrn$ zpIoDJU~**ZR7j;xJ&=_5X_1`DA_lGrQhCzh64Uy-?gg5)w0cmK#%#vcpdRgqUe}QX z!KQ+iOJBnb970aQ8+Eal!b~h+I*{ThG0 z22+>8Y<`3tB)els7vizk z2Cnr{0ES-#91m^y=4jwQ>j^6;whWh2BmALD(S~EZpcscSAByQYY?x!6Z#9Z?x>*lT z=0dG@^d`q^v*}3R)_iAMO|5jZrE130y2|6zIGG(v4Mac;2K@^ zP;7Uhyn+09vKPwmTPe}rtTqG$5^Q6*8JB$4J8|8I|L!vs`M(5wv;XAO3;&7XqLLhS z*d)yevVu>b;s(0hHEaRdNFYMR29sj8+a^G!be1BM>Lk+JtV~D<>Ji`qpzALHG$237 z<|4#35CN}1!7Gq=6Uuz0gjb^|aS#8T2?fP%A=d{Y^wF74Q$?g2efhRvZcZM7NX7o$ zL??z#dxh+-Al#r*vEyJIQ9@dayhs#rYYj3w*Xu<#P{vRx&>}BFE6!Izgxnm+3WA~e ze5p~Pb0^0UC<-8ppJU6B;9EuFvOCCL0!Yb+SR-yN1^_%k>rW%-*b}r`40eid)Fsjn zS}#LbL9X0LiK;K77j!+DPAR6!R|1LFU0UTmWkQz14E%-OP8KYHfXGvg7U`WDoyX&s zmy*lHoMJZP_~$y|<#j*+4VcQ9f!5bRiK5x^oL|@ta%#%DRxcn7(s`O=h1Oskwtx*5 zFrWXR0P@KHK-?0id}YY~*^s}7V=JsG26x=ATl zUIB)no6e&H3=C#<(Pr->WSs#FCKsTUMD*4yqY=`nCSTY4!|)#Zf`|1Y3tHsb7AoO+2?7|V^kx!VtBgh~S%t@H zRFuMCsrD5Bu3w=lz%2^LAXdTLe3g-7*4s|=^Jyj|HyxvVA{ksjOw(MKBzz+*9#zSK zA-Z&@R=jEk;c7#kEEd2(bo}@NN*ILPzy$Jc8{*Xlc8;zgnANTGml+wMaoW+0iRSe} zb3G=HtlDajtB&BOBBf^feKUw)?TNXNnir_9c`X1|K7n$=VioXFtz6DzMRJrsLH;bL zc_?nzSO7eglS}0lrdXS(=(bi~;kIe@XpJsT(mKpFKzRib!hY`OSD13Wz?Q|wfY`s+ zplP@RxM5vnq20#@^G?=y)-8 zrGsCJ2gy-zq)tt0MXEOAVAU!_mgD- z>m(%#OnUJX!>uK+vt}$Q%RzaajE(0k*`!{+&ZJ8)eAjHG@uCiD!6#~!2Q^Z)ak>m} zb{#DIWv{T@NAjzg3~oZSt-W6wjNivexpG zs|@c<)i3q{iabV>N9;j6(6(LXNJ>3(^|qJjbDcaAia5Bgh)G!ZXnseVa95wgUl>7n z>uPY;)xcw5HN|?DoP_)`Hz@0)C_)m18#|#V&$NPaM>eeui|$x0m<8>$LaD)8*+#n;`!4^Z<(M%Y!8L|bo-V>@_A+2Yjp88P$r&UQn6KjW z!E6!Zw00iJOUyeTR_Sa>fmN#U`O+7-Yb{7l8r3Vp4yP|1V&V!@N+}H2v2k~_>P-4I5iuHR6V_T4jak0u#yi>7%Pu6VSJ2ohOPp3`Q6!9c%?ruP|)eiG?!_N-a zD-hftqSuqV+b$KbtHaISpy&k@zp%@Xdv(l{@ofvREiQ`ru7qvmWlQMG5fWR;JiQwL z)W~;6el;iFNPe|pTPLo+Rvt6Fo?^1Va;pdFYR+l^ye*W&cgk?wC$Sv4>GwPsJB`@=xFv7eGaGlu=X|n~-mVPOCd82}?Ge z0;~kHWVtK=M1#smNqKh`jA@%>p~m}4pN!B>Ve&f>GWZD-oIqbaA_OsqszbqDFlh-A z#`Hl6ARE7UnbG#8C~1^ztNxIoS-;$b+ghsk5|jD76X>A4Nf{ezsRIdRl)uQpgEP^X z&Re5znPus_3g->A5he`^A*rISb@&p$Tnb(M0wf=OrlOb(RFiGPg=7v*NXeI&Z>F!G zUB>u}%sW`G!F+GVCHajgPKJ@%6mzSG=c)r~OhK(cv&~I_jwKzZ&{C~SWe#mIiVj^N z?@a;(x6G*gR_jZDu8Wz5^tQL1RB;9J;Zhp)AZzCFXw9z0O|=n+cWUCVI`#mPs zXpMUEoY`Mw%M|Z2SflR~_5ef%m;1TUv7zieje5}}^gwZLyAP;U22tZlW-}``D>fI{ z{|8g_jeyib6H7=N7bNE0sRj9+bOz=;Z7@l4ic1fg9IF(n)=pNCyhNu;tW*6|dIxL8 z*K{;1Ltvjxiuk+k@)yk@JpsGu9{j6wm*7Ks7x2oBh4ODDGW&wTJnq$ z%GzK_ITh_~U~aeLl83x5ozHYIkNi+FMJa)2j7Cmla%pDM%pd~|N`|=FwO|Y2n&MxI zr5V6Dd<_)a5roH~xDs+{CLb9=eqyDY53x!F&1V?oB_-QljN|w-MF!}4Z@lB>F4?od zm;v+}-YNqi+0AsU9(|7j7IoYaWH)0)TB{VN4}eA-LDgo!CLbFBbbYXs$t_dQu5p0L z9P*+i#o=fI)0J}EEO0@NS*m9c=Zh#jDE%@}m1NVTCt|R$VH+25hiU_AqHFEQviqCmspIz@tl0{;{HVdaujv>=;CPOa%o zJWz4?zDD~Rn#4Y6)Oz~4k+GRdqS>^YdU%tkIjA$VNe8n5DS#%p2;dEx}1KrXc z`(-!C09D9c5}+XzCg4?!RTc%P$9Oh*$b$HEa!F#`x7)7us2Yr&<8!A`^X6mQwr$%J z+xEno*vZ7UZQHh;Z*1Gv&Tj3S-)_}Y&zt)X=u@ZrK7DmxpP}^SD@@!a7`gzc6cLgB zIZQL)u4(3@aSb4C_`z9t=m=AqQB2+A;uNmJ{a0+az*l0Z*V9mjn-d`;Uv&|Mox=Hbog=R>_V=`YuOd*|CoYqV5v z^g&Os*@;@G>h7qCHyVR=Y|}Ka8+KMI16KU+fcY^U-g$3Dnq=E-*hO0sv>h;my%lbz zpVuvI#esL!lg0EfNh9TgW~;(rvHVZAhcgDAA$xB#CfjBSXC8kPg(OHswnYrP!h554 zrs!s`lfx$T?phbE!82T1fQgI#1}EF`Rna}Ab&`R(h)_{=@17FmGmSBfbM*x{R4tIh z3=7)#qW--IhIKUu{Bgmn6c>Qh<;!h%P{jvASg5{!$BJmaC+0lM4_UhCa)wQp#Cuo7nqLc9yh44w)2DFau z)0inuVmP%kmUeBwzzBkN#VxJe>5~i@W>vld z*R5k0@?Qf-Fz%Wh+~`&w9fIj#J3*v^4QK1$plNkew=g07QcxL9(5c|E{>Q1Io`)lh zwa5*9O5S>@Mm(JuI0%Pp2l!LG2oR%So6kCOC_o^i)CC2tTT^v+)FNm}&S)!9r@#D0 zN_e|MwGiLY{d*cEA;ii@Aum3ed3+{jUr_d2P14+~h8g{F&2a!9bF@bvr)8~>R;RZF zot%OLpr$ehHol`RpY1CCGv=+mhwRqPk7?DE3L7b$7QYlWhTvC}_cu3x5)mM)T$e73 zS;?|=xZTN-2%DP-1+6GD{JBCm?GK|8b6U7pq!IOazwGw$gj#+mwcl&wY*pWK2a1mb zfpu_Bz@{Nc{0qOSJ}v&S9`<9cC_<>Z(&vSf8V|Yq=VifJeq$%=&vEukM+9L^3dy*c z{hKndNLWvT5cmZ!hC_1+(dePICd{G`>;=lXk=-#_e~2Pk>{A0r0#)le&IF1z^^b@V z4%oH9NE$j%Jt?~f({iGWCIjBg>sax0jG=kZo>8;5q@Z<#L=lRh5bu8tX8R2~Bq3(- zS4+&XCv}cSt9AeeXH8Oo*<(ak{H2kX{c-=~X3`{b{L6w}^}U2pZ))*8ovzP?I#70P z3~dyCKu$VZ;>rkZI9nTBP`V<_8h8<}s+B)V#b56DH`qN|eUkkB4%tI8S<>*ulZ8iK z{NXG{qd7+6gMHJn#i%ulJ`7o$96L276e6#1*+jdDx|=g{fl0kMNUhbc#- zP1xGA`7f+wf&xe-S^1)eY6*2ZBeceUhAyqaPezXm88m|a93Mjc&w*Kg0bIWxMA(5a zO6K;8;-=k~3cv;_xF7Ob753{6puq`aYGyTTBXLxkY}vP~@I)EdE?D}4uB3EBTo{%; zD(Jr==+$iUL8I%`>9O?r`@|k`j$9TQ2^+_RYZJ2-N&|fJZiiWP@v;Z*B#QoF>ERqg ze8GlsU7lCz(Q=E!FNuCo4x#i-LS#6}sh8znte;j}#d`tssXxEab*S^*89yEUf`Uyt zaH1fz;}X3tX%?&h#`YPODoWYl?(ZXn5?Vuo*L1G{i*hb9&u1n}Am`MCuV6RkG*VIP zCKQCyJRN#Wc1Tt8mEWhcsD$1oMlCCO8uRxD5?0fGdSQUSfuvEoPs9f3LMa(6%u|>e zj5vJ{<#BbhBdkAhRm_8j9%%~pppH_2^4&AEd_K~kN|X_PBZ8ZsRy{i6`~<20GUaAG zS~9AgUnzc%e0_-vPvVkVE4X_|Il%N|4s@ppJ|Lh-$`%)nBu{EPF?O=*Af&uXd;#2< zV{C!2zw^>V1?*U@Eg{rV1Po9|`OOz)9f*s?&;KN{fj~tMFCW6?QLyhqS4ivkyNk_tVMOG7rV*zGGX=0}t(E=^Xn+>hW&& zbd|%vwCe%LX>)8Njh!CY9EMOaY=4EpGhu@&W|oH2%B@IO5e=I0>+I0S+&IdPLbK$X zV0s&d&Elt`?RVJqX$~@$No)}*=u?Y)nm18NwElzcxpkAe z20!~u{vlqVTL0nb+a>OJtv=EGhmq1FRLMuA`>DM+#2u+Yi=021WWN>XjGORrL0DCl zRvpFBy-`J2rfy0v7K^xEuDa=DN&XOxa#u-SK1h|TFF4Pjd~T{Gj(iR0M|<;X3+$BN zt=je+2J0%ioOWWSAjAi!ZoevF$&^eXGkg*Q-H2xi8@tM$;&`>Z?x%_b2oo9z+ssA# ztX1;@WKjSC8hA!tNL9V)E@oY;r%oM7%2x@MUQ zUl%PyO}JG=@4Un~b#d&b%J(`w1H>9_?md+==%D*=Z3I<7f~B~O!=UtG{s_Y|cAcvO z+J_vA4TNWitpm%2Lvb1o5S9egRe{FFduII&CuL1jZ z9nSVPmtO2F;M8(`m-WJ#tJPtHHh(42BI{c~j*^0x2MwN`^rU1EU3Fo}!j1Jo{gy8! zXg{UGbV8^_oMKH)b?>TW?}ze*AmwFu`X7!IuDYubb{BX#leJLSEQOU`pI z0O_j9YHWoFBOIYMg*{H!PvIz>HZyh!RksqIqu3DHXKg@wDdNVYRp1Deh&(tR9m-EblDn!Wdg z?bg8~RKKUs_^qQkO>%d=s4yMnkcI6Bmp$wM87Zv`7!KmkxCl+#fem_;-s2UJf9XbQ6gtN?_>zWzB4AK_8wAOF=;MK4!`~e>?{R|aB%S}eY@+NF-{boWOlvVdyHJ@z z7zv)GlTmNXbX)iF4;9C9861mmDq!j~%>2ei4GsuH+)wog^X#XLV*ukanu=&=>Q1{* zcBS$u>X;KdFwro^PAhUhIQHS&eS zCL%Io9=s2yG_Im%Tl2>ZfTHX(9c6dink!megOB5|C3kjvCb0%(z@%WtM<%$0u z1NZ)qea3IKs|lGXRItLw7I)rg1d;}3fIf|4K@*9Vz-4LbLg~tJWs=PU{8k`n33feoLe3L zN}dob$s>!>IFtII-%|HoA+9ITSeg|cZqlaogNo}0? z%aqM@CWh+eCXz)aMB~>N?3*ngxKS+nIh9{kDUo1;nN*~7y@KZzwj5bXuN2ThM-O>5 zj3{@UJrZ;qNo2U5WMwB5_A}bPQ^5lyK5HVs=+pHbw?UOCcXC>yI;I zl_Uv#>8wUo5gpwHQ^D6*Z<5c#`C)#ru~*ue)Bn;C}e!uGbOKrRg2HWU6g(R$Q<8AwL-?U_p? z;1TEVM4n^-6R?dA=M#-F@h$767NRRX5yMxqY`r8+4dIm{w6>T#uwnb=qyyC(Z+_TC z0>fWFzO2oearD0ML8?T1dr((@&Sb*5zHbXZxso@8Gw(!W7QWZhCkEaD{~s3yf~j z2cdc`oOu&`4IH*+|uYt z2h@Mch)S(DJ-A(DWK+;gGh}1Zl#rwqoh z;@|jAPwJ#M=O&zh#}DS@jz%KGQNAgW0B!TgF}!+C)!kjw-Hjl}bDgwA>OdJ=HamzKeH~-5nhSK96k}_Q{Z${{b>iKz4}{aO@)e^We=x z8QJl-dQwY`fZ53toGmipWyI<;CugE^0r?i(2@EUb)hBl{b}s0W?9JBJaI>5}ebgE& zL0v)L+<10AW=a^ZVxIbAcV{n|x=NiP@O27;iTbhc%aAf2TY4qNaKB=vf=Y7Y6e5>9 zhi)vCM%Whu$d3ypa;}p1vyW)Z&xCb;FRIGy#cG<*uOz6U^=-xLS90R0KS{H*Kk_8B zu6#!0vd6I<$@TAt{8CgOhfIhXjtWjfD^HzNp z_jMysq18?>RM!EeOOL94b$4yhgoja6y=Z?Fq5Bp0FF&P?Cb!GD1;4$GraOj~752##nz{d(ANI{l0K8YrybvOdaNEc2IzeAuKboq{^Y*w$RCZ9F%d z>K;IL;&+rnSY)y?vD}Q9E}b^F8UoOf5)9H&0eS2qwM~)3@0+d>D@gR} zgnvkfXqr@}Z<RxQUg8wow=#xuN&2%M%18*FjOkw+ zphjP7VC;_`BcTel4Tw;a*4nYuZ!!9^Fe>$G^sAcz`;rcL8VZ z*JN8*FEUDS#Im4`I%U|z3So?T3V(#D5OYf1q@dC++(cj@y|~TpOt1rh>A4OD!0Jn6HX=77RGjs zM6JzahHP(rRSd9TzBCJ$g(-?IJHW5f7RUU^G&~VE>&=Au3B)L(1P}l4Bg{&;_Nvgi zd<)JN^!n!X<6wLLz*4%d!27r3^+TA?f=G4B{bhu_2PSpwnqM;bDje1>JrS6Hyrr^* zQ2G#A)I1s&H0VY;a*LJ=yWu9Z+}>qmg-9V%BQ|WbZeD0Ahjrjx{(!e{Uh#JK079Wu7XJn_m79=U8>GZ=`yF1;6TOAUy+NZF^`o=NIt zOQ7J-b|0|uh+P<)M;}ja}cbghBF9i!Z&i0N(FlXIRw-!x8#q9U6<3j;>kSWVQWuz#O;zYjJ0WHQi6r0Cj`yHk}|Sl1*L2Mp~_IS|z@ zRBV&folTUe&m+`fY*5~yLW53;Q^oHKE{xgJ55tm?7``>9t3nmz0EgHRwm#KLWfo?V zdL<`b9p7A?fTby>SS^VR{Q7cQS9d*3pBLt1DMWcI&5M(BWzEERmY0U=*Nrbwdjhns z(ZS$X#yeK7%5ez}QhP2>|C78zcdydEpIsE*jX%nGeIV>hhJQp@4p3Fs8hWQXQlCNB zXX&j40ivtUJbQHfu!gIbT)rygK0i?|*V-irtJvb5uay9I}@!Q4k>^}+E8TeG5#@cwOZxwHfsN3u8gsf z$ge;KIaUiTu&!TG7)@E#FNIjaO|Mj2@{w)wG(w*0Qs4YH0y!Yv`m&!T!f#i37ll^> z0?k(weu5rG@j-uel}X!rUJ~JYGjcOPaN}PX(WQGu6}yE<9{Lv6I=^oG&QN>O zFNIyuKG~2wPim&bw}xs#b1#Iaf=Jv(=e{F^A`USnMyOU5QTkThm z-E;)RmJx8v8n+7p3p-^1m(z*8t3EpagDgL{cd4YW$=`$GpSdvT5q`#9EYWLrEZ0&y z1oYopHax@%>*P?NMqKsr-XW51774j^J z>K}#V*$c%*YB(zXvB;?v5c9e4h4ic`Fl^IV6!-n)tol3>bw?7@AMfAl`Dg|aGieMD zCZ4HBlS0*gR|FH_Ru|}@?}euRDh%4B^_M#(I4#Lmycwmm6qZ7tW?u4X3HU_!4*P}D zK1Xe7xX~6Xy(|H&%`wTO&Q1C1N~MO)S1}>7I-M4B>nL66m5i)?HRS~_$6Dm|r3Qdg zi5Q(fLlm{e+@KLZ2KQ(^MZ*OIK* z-4Jg%QoKPbH9{+3_#FuKmTe;Cw%#AD%hoUEfPJwOVg%#zADUdoy6IGOM+gb9^5;EF z`%Pzc-j7VIDsLXeNquv0Bk3MoLJ?du?lYCzXXL;`sA~8TT-CZ7rwP!a0z$ncTRpZX zt%aOAZ^Vk8E%0S(2T{T}kcyS8w;x;Mh$rNHhw^U?B=cnf#hl?*&l{-m}_h1!`ZL$WSoLdqb-UGoaa?Mn`kB%{=(2S-dW z=<`e0KKMvpoj4Q}RSx_*W>ZY;j?&YT1*N)n3tv$h3M>(qxu}w$p z#<%qtiS%BDQE`Yp&&hhbr8^`iw{GJJM*k`1S7`@G^yl8OH10{_Hij+^GZ^roW2KR! zH}%FTEb4lIW9_g8L*3vUzfjV z$Hpm&M1K{0;jIvv3ZeB;@x8N)B(UqXl#f>aHE0t3IHT6il=w4eCfI5gMj6{trV14j zZBl~ITBu%$UT#``TMZE$*xs53w+LTRif;DY1P2Hjav z98qxmzS=GP2JyICtxDG^Q3@j;$%of{y1UU(&`2S8s5!dsD5J_zwu}TYSb{N&P(*)l zA`B&B<)GdO6XqIJ9fO3Tzwr{|Q_b)(Ms}V?j5c5DXs8(2JzRmE@M0!N?zSDml3yq@ z)+~WQV4k;7Io3wX)vuRZj>|5bmPGI&4DHvvq&*dS^I|V{l8GhEkuV@FMyxYp-KJes zc})#bfnK~&+r)X={SyL=&;P6Z9&bwm(Aqv*L8`#TlpU&IS{?NAmlGm#;^^r&UpN|?T-yR(j+AIB(SI{VVMD@DHxh$ll|bj7je59)rG8ZIAM zS@dy%iFM0AN==<#3-tXA7MFCIj|V{f=ww&I!U< zN&+bJ6t_QaAWM^Va>PI%jq) z$r_H0R#T!cfZt6abT_~Fo#y+AmRcLM?u)#oG-#1l29!?-I41mL#N4`jqzJ%%hd$lPQ+XKq% zob*Wbh_OB(K|Ti5iwAYgOo|dZ&M%Kujt$7Q@d_hz;+7YG!tCW_!3|203#k=od2z=& z`9YPi#SKVyg(Ie5XPoXYdV?<-Z$Yn=c13Fr!i=S9pSX+Dp0fDVki4~DUrJ03#^`)1 z4Q7M6KAq_jSQfC%%T3$^XvL?jc$~MK8(mW#=+&uB+jo`j7|kp*PfVxMoTSMQB+-r0 zdlOo7M9lI!)gJ$_SHRn_GSxt?Qr9W};H;la=)o&;G?mVpz&B>v$QdK?QBYJI2)J=s zO(uttD*=#I5f;kiKLILbd2B)hYl~vhgo8SokGOS{Hj1;#HNOz~@Z!u_zObFpzzwnC_8g+r6Vn*3lnF%Wh|eO#4Rg%*9a_uP9&zwxkiZswrV~3FQMfdCCd-U z9R^(t^EZ{U2~Pb~!;5eKZKub2qk=G%yTb1A+|2ohg ziy5CH9wHYYV)B-}{Y_$ZT0zS7XExZw@k9}g$ z$C*l-&Y59bB0GgB>&|1}((@kAa=lWBM4Y^lt3iE<+!$4}F*tWWl?q#)Wn7bpbfRYe zyE&qHg;xHnO7XH1zpX!?8Q{0K6ILtW6bt|& zg+7`gvnF|>s7s|Pe%J{B$RmyEg3ZsW&&xLz*v-ZCxzZbPXqz0>f13IzA;V^8HSlB6 zu8mp_QTIv3UXSL}EOTT7D=e;qIceQ=J~xMC9f_+>b|B92GFd)~YGjCsu;L|RkrYVh zSIs^|Vivn~p9ap05cHSwN-U#f&R}mu&p7uv#NO-r<}fNSR7i5pGZP??lQx+uA+)#Z z7bJk;)_rSPPBQZ!W;wmRSs^VlJGkWS#^T>EDCz|YE8rXJ;>vSfUc#73_#+~^K<%ks zFIugEp!2H~DBqc-{`gDteLgbA&?zA+yt|WLj_QvgQq3{VbgpipGVYYwd?*kyJ8%9# zD$0O@q5VV0LIJ_KHb_yV4l${K0RbVw{}=RD?*Bw@omQ8#!4*f#;pFYyj?QKa*U4BY zVvivqZiYwLPHIcg3DrbSrqWhL4UXII*#2X-=U%wQ?j(fF?Y}F5%*h}n6--Jg4^>`` z6zU|R3UUOb%qV_zqpYTe*f-^F(fA$dMWqbTzR65~>GAmA-{Vb%=0QjUo22Rv_FV0T z6~L9m)>LX2O2O=OZNb^my>gUy!`sK~MkWo9WOHfov-qqlopZa@vaDPCq36j($#RkU zoLNY~a{=q4_=dyPS*)Mv_rr|r0d5GkUypg}4i#65T3r^q2GO2;5izqH^YB#BQ6zW; z1!-K-M%wiJJnhZnxtjgsW%ov<#zlA@|K&1n=T4qooHwgA+`6o+=E82! zT`W+M)xbR!cLuqI@-;NTA8$_rM$<77a}IP^FW&l$X%OW!)=%(nHKd^(NM*WZtM~$I z&a4J<%9*^?u5@Bb1fLQ6A1 z$bI+M5{tn>XPf!a(!9==jkf5YM)$}g5C0SF#v`q0<`a^#CIp7^rpAES0-+@PH3>@K zShcYeCv+GaV=HK0TZ*VOjW1by*x_%+4%);^XJP-g=NrRCK|d?cbyjUnhl03=Hb5ms zgDBmJ$_?8F-@ck+@5DAk1ZuYRB1iBJXr08ggzVPdVst;bea7-5L@KRg)h!Mf=1ym= zA_Sd1R#hii!B2J@dXP8H?x7ocGLxgmA%Cy$cM0v8KNi zF4d=Kr{@R|^AnM8OtIkEgR2cXxj?^m zc7w)43IB@?soLBCN6t=^D~p{nSPzCdxwMzSsk29@ChI0!2F_bknzM~KW2;KDOz}91 z48sS$0E*2OO*lU&%nJEit-u?6Hx_3_sCUpK!!YiJzQ0pkYzfXnXV_?+6@M;-U#jWeh~j>s$v%C zpBes_xFUi7i&SOwpHkKT;a*?=U+(p8R<2H*K}PC+hK6!dy7tunPrt?mMU2$f*Mb2B zI~D$I{q3HHeFx!5o(`^fCkr(^HeHaKq6~bLBt$N{n<49;p*-P~e@~g$#2=9l9-fTk zXzp6(Yk>Ba^l4h=4W-%xJFTSnkbWa!jp1^t?EFTQJ7EM zFF;5`2n<6|CU?1m9QfNqQ5eAwYP#Xbf8-ET5gcw7Y`6vZ|DkZJ{o4pff&c>Ill?D( zWBi|jbEU&;hpL43htCo8F$`Z(LS%`ESV?J4Nm&dzA~-N9m<7cv&jk3=KPLm;-!f0k z7!}ND^kJL7*Fsxi@0FGH#&?GMWjlSmu(tMQ+w1xH_3-j=Z~lQ3Gqu#BmO(!&S3f7q zz)xG(-auf>_xAC1JbMsPqqoU^H6)v zt-i%r+41pwWJuiV)Zwf4$l8K`%_O+kAXt94c@?5p$#K9cK2N(>`n(2ohM7Q7cP(Vx z0#HCOxGJ{>>eok|gblwkxOB6-_mAy&z;rb{$+!j;Ad2W({x+Htc~dSgH-~M7A#jV)`8ycpV%UPNNhYZ#o&|&)YbBtln=@ z%^m-Y%?NZzZmaV|A0JJ^6$nRcrVC=O)~;Fh81Z-CfF~*EtqTbO*apHU+|*P)s@WhD zQM;+3h{fpXF_wtGz|QbjH5iG9t`V}D*Lma|485$`5+{;*cz_~zCo%Rd%VddHN())aS%sa*w~>vf&*@i&Uy!TI?TKh~cp9Yrl7R{!k@X6Y~%<6;%&EQ5}3paMDK z#bXbS!6ECa=bm+~tz<47-5ic2^Qk-8H*61Wermy|6z76ipK2c!O%-9L;kM4lJ~V(t zOR=@0i4?KO`RG2V)sNe;b(I}WTeO}>w?D!+#H-h9&KU`vlxOR&&TtDyMaCMNH zBc?l|6*7Q`@m;zZ`n^WOFuSR?rM+z8=&S5S)!X&r{AhSNiualt_nyH^pltDW+2a38lmaB$tt_ z&0mqZ9G2@Y+Z{%=2xa_K=$9t8imLZ&r1H1i7SM%h3UEh|A0_MI?dU`$&ZY&lq~eMo zxUCS{pL$PlE6VGj$b4o3<1*wZSyKC;+`GJ-&|^Jp%z_clcY({qzf9nW zrucZHEvxMUpohxKRGry=lQIhznYoe>AE}|!6|0$9xTVe{jykA?b*R(uvOy|K5R;W# zIhVLh$Khik7SESK=R1P1qa8j;`teJm_dQa3Z#I`TZQCLOSR%CF#KLLhC&h1 zBo!}Is{DJUG*%VKqo`}`!#5wzU59I(N~|`7YgU^oJo!Se&Ws)~A>BYA!Oo zOe9=&!t^!api4W|OlFIg0%1KKczQo$un;|v)A5qgL$@i*vY}*ot#1l1{yu`co+cZ) z8K}bJ0;3$telKhWXR7j421^W6svTAC;40^z@Xim~R$7?HhZL=IST{1b$=phLViL{@ z5lt`BF^^04cYv9BQgZ-Izv{s5+G@(@;wT)ps34p-LS}0E=CYwwgOV{V>Cpj7=sipS zhiEK_DiAVw_DM(7TY|b#x0@L8y4?7%NuM8#bkFRD?m>#yJg{SW0--w##9IY=NUX4Lj>5sX9tjPOk@*5G$v1;BJiW+AYlAnfd2_}3eI$jkivf<;O znb%+RDy!titG?)b(PZ}~op`S?>;wgSE!kLIU;c#!NF~hONy399kr<1c$EqyO+P#E+ zHmOC?TRKVY3+&aOP=u9r@yg_ogHy>I_%0oUoY7SNzcQT!S){E+V0f7te3wSKkiJa71Z^juU7Kb|Vx60`kEB)RHixO~ZcGk1MC9OSjpn?ioBXV7t% zs3XkzP@#bm+52j)$*?54;$!p^Fi0`3f;Mj1FWdZ<7N4Lc4O=&<^ik;zMFvBD#-GSA zfrt;?0G$AtFg#T@m|!}%%P;U3h_e;_X9q$Cxm<$)*UTE<*{RQ?h9HK2DrAQ|W$q4! znd+P%HyMdvEX(Z=x0Q_Xm9w>opsd32BAQwSU({u0tiSIvgM^{d>DiHq^fw6|y&WUd zjNMEeA_C;m%6LdrDlnwrNp))uUTfZ^dA^BqX(o3zB|vn(1g%4uJEd<)%Z4<8M9`@r z$$^zTuPce@`kPXFfySwYyg?0-umPh|T_qPS2JnM#fW=!9$sd}Tgp36ug0IVquf5Xp zhaE4c8(+`|R?D0Jy3_A}{zPE{Pk1390RhpV{uk$G`k&74D(&n7EL3aM!mXy;{Kmxm zIjy^PZEY*P`^M{eJDcCa{DwA98O~UR>5N!Al0Ld#Q!Y#rR-}54MkG=UWL>$K1I4?P zoLx*M|A{L3b)Z7b&mQI**>^`^wvy2x=j4Gey7$e0ki+BpdrCntand!n+rF02{W?AO zN?*_&Jm;~x){6DByD&)qYfSLx3R3{Y+IQ1KUNEN{^r!Zx(gsAa_5RLSXC_ub$7a7@ z-OV)Ro|y*D5gn}fTo_m<+eVm-vTj_KQBOpUuc-x1N1Ow^6`FtG4dI$sdvIns+UnA{ zItsTI;iQAyJvtz!OM20zi9J8gf$1>9lUWt~y^Ybf5$@VoR9B!u4jCf2fC8XyHC5Ay zWvKqpLpm(S{bO#%Sp7;y_+ldEDa0i)*n^3*a@X8i!9%HQP3CkTY$b8!`}?x!97o1- zDFVnX6P!=bGh2FibJut;&d3`ZwAnd8=1%c3>cesidIvu>^_ZK{0|Vb;Mde|}Eg9L9 zC`P3G$Qm}^ir{%hPDrLTbJn7*uw^!@1xa|NWbv!yvYFeWY=3BR!nk?j3^W*Ppa7th z4`n~Suf+<`m&UO>i`H^t60KVdb-yY{0O&AXW{7=B)vI=xtb7x2u|qVV-H+&9QC!Mrmtu^!GU_cZk{1rDIOatuZGriX2;2VOVNIyzg!XnjzVR z-hd%p60zd%kGli26-Y!qsAA%VBuD*fx3{xzw{cwV2`G6#U8wNFcOf$=>+Cek(428~ zul%rdWw34kjUUpHlbl@1$k2%HNWaUU2g@X9LvkyTm@i$S5a;l3}rCYVmN7#@-`!uou= zdZA|uOzozSF_h%a;4=yQDq#639(4rQ^RnhSzHK_}r$8IVlC?_)mg@&hmO9r5`m#Z4 zj&2v@Yr!Q`K_Kd^ZDCI{+-udb>EgXu)!r0s!ek_-;yx+%)ZI~3<;vP29A$zKSB7f0 zD&&b5mxU7zh3AJThnn{w4+(nu$*YUNaW-^%;m$l%TX#oZ`{dPaYif)IEbCee!j-4& z$A;|9{tTW=WxHj5JH*w01aU(lZkf1vXT(t#RZfV)_?9v0N}eFwH@O#NOua#YL?te1 zrv?%GdMhGK+Lw6Som5IJ^LgrK=um@cdKZoQUr}!3R^BZ^Hs;Z`j7RX8X_rgr9Gx7y zl~xzVu4i(u_8!)l$Bx6u^T{37n(C=st9Ac;Jz#lpkyp=77bw^>2Dg*@?`aSkw=N_> zhqjtdwJHwDCme$ra3xKb*l9uA&4tVHkMK(p9{Sy-#`s0{Oj^TX$_}g|7%p*}rXig@6J|ewrjm~rf$dVNOC5>{%t|xwi3690f#?UASYTCV7D=nfK*z2Jex2IE zPp821Lp5P{qNktef|uH^U*ik^8zzK?EX7Q~ zLqerx_Ue;p1dIV?CUNpw^*%%+C0tP_j@|Y!U2m-C>4_J#@WdkCpR==p3qRMmR3ukY z)yTJdTk&@_e5@S1Z}C-xdC5UuG8hrYtphYj@Ns_+Qk6Tl*01AiAa-pup&Fj35oopk zDs7w(j-_N9t`B(2Hnh!j@HkenNjPdRHNoMwZvuRzg*HjWDydN<VM?0o5CQEu+wdSvKFh6fw3m^~0DvkuF{i1L0ap3KYtg2R_ z)v+{R*Zz3xldlfui3pid+?5(Y>Vl)Hpc%Q=Yu=l~SeQ@y%Cn_O*nWlM3|@~6Pq3`K zb84Aa5EoBX7QSl0+rRyrl()sN`!~x?hqnL@!ogt~@$wFA^v@TsA6Hr50&2?U?woGz z>UISg>%88oFJ*~}@pP5juUy7EYisL1X|m!Nd}2tc&tPsKc1 z#{-8Q3O2Im<`s>wiiS;_T^^7DmyuFVSl{8RqV0_H0(CLr%1Ya0pXVpO<)kZHqnOx~ z7Cr+ixqqen4NmnnsQFmCGIpFDjnaM5CANBkOjxgKko^_D1+P4qySexXoFOQ6ZcQlf zpsB-H79#K_YjA*(zhr!#D#L1{DGX)z^ww#S(v=(P(LzXpK5p6q4OU1_2}$G~6f@G5 zx}$9@%&Kd-JpnS;s&|Cuy!I?YMHQF#e&jK;9=5WEq?|6Cv3%r+2|8Rps&r({B#Et} za;moMUOH&3lULP~)r2hwNZ}-;8*epiBeRj#UVjNvSFCd>?Pbl|m9L3B!zgIZwC64% z9HJUlB=`iU=X_OK6@jz(frwdeKJdGjMGh#}PsfcU`Uk=|V+9v;JJT7f3Ci+d;RYv6 z(@3``P9t?|hRMjHbKn;%o6=pl`)db04X5L5c0TTd@o07?v}||#00sET+diL{XIEZg zlm~3IClS5E9WAToxxi`0!nM@nU;P0~tBpuRA%d+0%hld{I#sjdh6bvb=OLBlLz&n! zLp3zH6z0s+7Gum4P#T=MQrqj$FX)uxT0@C?#LTLSOynLfa+XR8${Z!TBW)BwV_T0q zA}t~BLgiQ#C~w_H(OuU#_^cdK^E_v2=P|~Z6?*nOGq_SivbCCSB{MCZ!!+czR}s2( zh%OR5hYrpefsBeTSbDI$Tb_Dw!gP?83;X6?4#}weXcE)3DtCNhhYG5ySZ>(aUTaac zm6s&xktq-t;n&I9+n7Tk8F##%&4I{G2PyB7+<4&MR4IYLn-R$6OLwXirzh)5&4|{B z9tq3S^p}nUR19ZvNN;aG0TzPc9tnztXrZ-nx!ZW5F(O?+#&d_IO5qCqMak;){O}+RsbNdPR^O%BhAM-O2n6ds&Xpk#iIxm2@TO;r;Hur)zOk`n=;@^b} z0u?z^UlO-g-D5g#5X9lZy43GY9F^o}DODFOFL z+4AplxvANL1;k>>OdRMm&PAr>SH(kHDYJ)96+>0Z4l-u;Mm@-MMqgSJBJhUJWa*}f z54=fkZZDR**;o6`s_^z}qgx4_X3gBT%p?#6SKZdp`fsI?rILd#iR^>s#on=6PfIo8t>hE_#K+8d#tw z&CR8?4HeCsbmZF4#oSSUFwal}-}n_TqFQ~!yP zns*}KTCpZb@iwQXr&rbXF?C#|9xAf~>6{_3Q5@Phfyj(@<>F%7E74y);)TbrZzf1c z4E@qXxW3xM{V-IH7ZEw4=!kJlG2%1J4X8$M^&QH{+Y0*fG-;u1H3i1MZcz!&F7Nvy zsYlf3S*E*yXBm_`HP$I(uZg@qX89O4v>o|doxN6*fX$WW5MhnG*N_1^VJ9t4u_|I| zsH*W+APx-jVnoLvwb>*!>%{BMMD9djGTokI1sO=n-yf zs$9CUnjTNL`Iyi2&qm)sTP<3|45*7n&Ou>Z+=-wI8afM7YZtt-)0L+^ zfk93%QHJVR&zTOIg45-P&dh6&=4*`)9B?^O0O2vUa_R#yZ!I^1)u6J{6BDAgn8zF2 z0G|-iSvxn!xD3CT&WNhQcp5f9?gaNlMZ&9!XgE#Hy-hqNrsKoWZGvPk^ngrUck0U_ ztkVqN8VQ<}|MwIjD6oHCo3d{b2cKz(wmlCXYy6QMlrNv-(+DS*-ih^Z9p)TsiBZrY znIqAakvA1&<9;O{HxHAVXAlMNI~m5GnU7qLjoWX;-gk{1Zp9tnhn=1TX5WwET3+)$ zom`La$lM=@?~#wFACsSi&zV|v14N&Ppn&Y-Qs0kMje3Qj`J3L*ZT+|X3kc`iTES0T z#?*$NJNf(G?}vYG7c}yM@2R;T!?T!v525>|`ah3unFqb@H`(?OJ04e}H(#gdpZ6m# zq6Q!1e(${RsJZ{bt zxbBOOZMAzq$%B7;WWd->)?M>iceIvU|8-3%_}lMmt5$5=_h7o!;2S__KQGAF_15lI zjQBMqI5zu~Tk3c57P&v!+B&8oc;VAc0ZsZq zK#Uv-L9i-@aSKrABFnMUhARAavie`0oe4OU{}#u`z9q6R6Cq2s!C1>KVeFKhv5qA> z$-a#xOJVGZxY;Gi5?MlINwTX@*|VhVd+w{-|6bGoKFq!IJTLF#;e6iTIj=MCIp5!T z#;mxgGl&ahzZZw!gutqz&Aw^NrsBu)!t3m06_hBIihY)PZn~ho0OuAJdy4h!oFF4T z-H(sa}z3iujoIk-7Kvr`FdpTHRS~gaZ#G zJe>3E5=MGf9dbrQpWHRS;5IP4uJNvhkHPc+_I;uMYrSOML6MigM&MFVqZBXzI=r5z z`9Az95leu_k{3%lr|=7dGRg4Iv^6SfQT3*%L(QmtZwJYXZ{$-P2RtG+OsC)EhVnq; z%aYh@utz5F$HI(V8GR?tj)nX65^POit-k`DX;JXTjk6%kYSN{Pu>AqPmTkGxV@(w| zDJU7yz9OF0MCnbTIeAGkykJE*s~PG|^KMc?GIDN3(yWQZ`y6DFy*?~;MaZm~hq&@n zBuB|QcLS`f|K>VF!?m)$8!uq`^V|&@Wdn=rmmB04S9lT&}@CYbegK)i1I=s){e=3<#TvNT#hpY{-*?*`1uNiC5gsdv>#bbUKF+ z(S-`YJ**BKr-st@q)`5n8|Qz^jpXrg6I6Pt8Q3hz5M@LaZ_xR7!I6hn?N)s=*4<7B zUAAoQ)l|km?lK9hSd&&vl~B7=FAZ}gl;B7SHG<(CDWUV^$CXfye<`8*DcCdX^6aJB z!gil<_+@S_h9cwEUK|H+~7B4F3{B z9W4E@sbfLDGGyEx&Nx+WBwT8+3*cokt>i1ck0X5_+TwNR;xHt_Wj~w|f`^k}p{E|d zamz(zz&8e%gw6g`-z@wN+fph#$#^Nl2$hC1Kz00=kl|c;6+~z^VICYxOb+bD0hC&S zgv_maJ)A#c$M$pZ^*(TIm6&7*hf}X_`Y#nbCH;QlWO`}}{3uC!|B)o03Aq9vyE&Ii z!(M=GL;q6&3+JZ4ij5pM8zmcw+8ssF3sr=Uda{9TDhM;*a^pa4XUixvhP26GLrwzS zS=2*tN}BHdXwyt0{;6oyfqo$#I=2iQ-6jh7{q&aBR>IrDlnMM7t)WW(h7Q_ph(IQ& zK>e2p%0)=%@~lQDM7>J|FTz)wh=h`DF`Lx9=fQrx=ToR5K?T$hwYui<9>0R*L$qpi z@a}v1w4y9M!%0dHcX5M47XeLw24zuch@eN!4x$2m5Ru0j`Oxl>01mFe5l&o8aT)Vw=}--!9MR zm|oBG&?_{5>jN7w+Y^PgnWa!Z?a$1WX4S!hK(FyfXprbX>D6l%s{2FrPjD6`$4w zrw<%-U~l@>-rZ?qH~D4IOSe4bS@*W-xY(|Y$STQ}j{Bf4F7RUOvtN z%NW)`ssd*x&D`@Hp%&gBA{D|;B7+gz2GfIN<4x}Oz=QqqgFZx$dPmF!-JdfBYzEU@ zka_2EW0JT0(i-^9+T+S==O;kopYS1~xJ8#lB+Qb+?hZ_+4Xtlqzc-XnElzDi7)-nqT@O;^vv4B1oi#=fhYyGHRQsr#&5F&Lus-iL_O zx~N|nQOxW`&HKTLl>OyE~M&{(nOe}$n zeXVFrW~(8M!_WB(3+r&+G0ze7+}~UAS)>m{Stg_>7f5sN3v)DO)zAk|zP)x<-s$R2&uG2NRb3F8)ciwr^g;tCG!CApJ*xhY7V7Fur13%@(ZwjN`k7k! ziuDg{>LIs_#fy8mEhBBc55`Nrd}oqZEya}5X5CKe2LvmEh<%O@_-T67`VJ3CdjVS zU$}c6LHy7w*|qn{T3~)ctH$axmfI0N$W3?3?rj02ef~4KTZ^`#g6&`?giVRt!$8he zEy+EpW(8VJTqx3|KuBRKP;t;9+FOtZS}MF3Jf>r-yj$OT7fVC%_LK8cle2zU5(dc+ zUwU8Zr{I>Uyilak%h#WneW|}x>!q-y&xGQ0&VX#~HDp>v%;*6h(C&uCA{8 zuAe^yO(X{Wk&XH~n$mcIY{Um-Bkq&4k%zN|E1##0hbLT+-`c|S$ZO}*ZR@-3N1wK50m+0_K9x2w=*vX(catmZy_m-KJ^4)*(IyOz#c-AulC zCs(IvJ?34q`__|>O`#MaX&w6_uk$~n{FHm z_%XlF8bUsa6*h#b-AQjS4J8z?aWskOE>eXzQ1Tx7e9wbE>o#RvpmgN&9F5z2F{ZHU zqZuMfC6R7*BX=pScbPKt67Q!#{k|NbrmUe!(>e#NXM5E*$+4Yk#SCc;6K0kPGV>Avsa#iWWVlre68+%OyJuFCK zL03YnqsJ&|O|sKmcX-_+JMhBvYQRneGyJQL$eC&14TBnoHpVsTgi?KdKa&|EXb1PF z4!l}gazseU*B!S<93x?zI5$Ts0}~s znM(ie_yF?(Q!@9)4vBsvVQtawm^fL@1W#yzCabfV1Ot29AsPGIXw4`}oyn_7;)fBa z%*6;A9ahIm*88Kefido^gUw-E6nyNvbgd2xHE*qDmXyuP3@65Mb2sBTsy?bMuZs<` zSFRKmbN<-q&6Cm@Rof+q?mK^W>l<$Vxkz@W{7U($Rn|GsgC^mCSjxOZL#0rwR+1@P z+*VD(QMDaPhOQ0n6mb=EU+6s9xealGWvzM~iGcX^qHXzWA@OlR3ktHYzKW5Bne1;8 z%J--f)ou?5?!2T~Md6K0kKs<~OYwBk*y`=H58wXc%(Cr-6%K>G@`&TTUH-z`a`UZV z!&NwTxN>rZu^x+#J}%1uIm|`c`p1uZUm_WbDpZGKrP8jlv}oOTl5&Rp^xCO8R}51x z6o5r+ws*!AwK}kw-$RjNi}g(0;WY&Ja`?Ptv*>f={@ScOZb;II&q_y{`VjTm{~CCD zlK%1*&(K`zI)`fNGvRdla+;~7%}1}qMju|rh7bCSuQ5rXYN60I=5fi2t0v}Flp8bB zRfK-*#uGJd$XoBAg9WAP=FzDf)2z>Ze*(oxLf-AZ?^AoTe`*~5@a2F66) zdcshrXu}U{?X|sN{WiL_k%L#7o0QeVhkrZ~lU=kfM*%gd1h8d;>!f_-WaDXpVK%bS zQrD!CBR_wZBLb=a-8Td$&qIoIlob&UuHJ6247ua%MoWPv;G>4L~=#bWbr|yvrC%Dd{rPh z3sMX`mAi&U1ZUeXE8TMLlgP;b|^_-|icHThLFiK-wvb_|awQx6j(ULX6&q;KR1IE^y`^FI5$43w91z4(TxAz_j3T|@TGAd+afwYO8c{(lDSY2%Yy8~F2qeg z4?n~$3!U6m(-7dJcj#+8U(3QAuJFWTKYp3qE|VgCFrO0hVE^adS@bH1%tgqX7l?TT za8Ugf^Zy~frg6d)s3FE zAt7HVx6f=WkL&+9=ecP~F%aWH7vU0dcxd}w_c5=Ea8pz8lsHM<;+q&@e&tfxr16}7 zNn)8S{{%Z^mnub1k7nrbM$hJqp6!Jlyz01jq2_A~p1FN=OrPw0OP-igv*WqR-RU4m z;sO~^|}es@%;0QN#!8q{rt5)9)00hDg_b7w<{sdcNG{`<1HR$y}IhP z=BWK;o{1}eYjODA#a6e3*^CGTxmBi+rc%^ug3y^9p7>Iun?XY4U{B}t6a{2|aE#;? zsabWMk}d>iUaEe8r{|lz&x~p!p{ss}g_&ci?POItsHKOVnU01PRjH7I;;qV+T@rN9 zPnkpD`9OWw0v!ML{QvMi|GVLpYM}KX0p0@Ca&~#0ZM!AsIaW&~fKq6v>S9&R#ZONw zc&iGZW3;Qf%y6SW$7))s-_| z|2hR6m7Y=DRcNF21|m@^B?7@yk##V=trDnWsp0Q-%~Bh-l0(#%*D0$IGXR^A|7IVR zE|I4wn-Tc)XMn8LDhWQ9`9hTK(dJ_;Y$hDw0S9;;jgkC1`TYeCMECoQ9P~}@==4_? z?8bFG5Kfx4r2Z{$5)c;d25@KonnU^1)J>z9$Rh(s2k~!H!0&=`LuCFk^{a@()E@9h z|9+98U0Me4bF>EC|6`IK&E({tCXe_0{vY#!mclL2rzP&uCz#{V#urA9-00BbqFHVPhRXe*w$2sm62Q%17}fe|yn za0CwYb=+1ug&~9~qoy7uec`_{mVvwegkZqoP)r&9Hj)B{K*d1$YY6MOuMn_P7{VAc zjIjg|z};yN9O%BH)jWkEf+<6S6UQEh4FsaS@h4;GG=?aqj6s9aMloOwSN^sR0gqGG zA%-bqrc&1waSH@0%KdX4FHd8LW6D4uygMHAfcaC_0gT~c_CJU|F>{>3wR{Q#n4iIn zfj)S0oT2#r6b3Mcf*AvS)A4ae>BcDxU<)#44D^k$#~Dsrr!atRvY0W@WwGOo0RpU( za|s}lVa7mLppG-T2~S}FdK6|1bZO)`BbE3R1|XDR#z5CNjx&TvPGJCw2WAX(t>8FA zlaG_;!OIPE(9Z`%K98NDVrZn=lyw`Ghyga-`y9la|tfq=(oPzxjQ HXaf2tRZAf3 literal 0 HcmV?d00001 From 6e3b49c522c6cfffe6b797b63315ea6391ba66e4 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 18 May 2016 12:04:46 -0400 Subject: [PATCH 17/36] Fix inequality symbol in test assertion This commit fixes the inequality symbol used in a test assertion in RepositoryS3SettingsTests#testInvalidChunkBufferSizeRepositorySettings. The inequality symbol was previously backwards but fixed in commit cad0608cdb28e2b8485e5c01c26579a35cb84356 but fixing the inequality symbol here was missed in that commit. Closes #18449 --- .../org/elasticsearch/cloud/aws/RepositoryS3SettingsTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/RepositoryS3SettingsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/RepositoryS3SettingsTests.java index c5c617e8591..4cb8e4d3abb 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/RepositoryS3SettingsTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/RepositoryS3SettingsTests.java @@ -316,7 +316,7 @@ public class RepositoryS3SettingsTests extends ESTestCase { "Failed to parse value [4mb] for setting [buffer_size] must be >= 5mb"); // chunk > 5tb should fail internalTestInvalidChunkBufferSizeSettings(new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(6, ByteSizeUnit.TB), - "Failed to parse value [6tb] for setting [chunk_size] must be =< 5tb"); + "Failed to parse value [6tb] for setting [chunk_size] must be <= 5tb"); } private Settings buildSettings(Settings... global) { From ee4e470f6000fda030ad1f6d9216efec70119227 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 18 May 2016 15:10:01 -0400 Subject: [PATCH 18/36] Add a wait_for_stats=yellow to a docs snippet It was making unstable tests. --- docs/reference/query-dsl/parent-id-query.asciidoc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/reference/query-dsl/parent-id-query.asciidoc b/docs/reference/query-dsl/parent-id-query.asciidoc index 713e19c26ce..a29073dafa9 100644 --- a/docs/reference/query-dsl/parent-id-query.asciidoc +++ b/docs/reference/query-dsl/parent-id-query.asciidoc @@ -28,6 +28,8 @@ PUT /my_index } } } + +GET /_cluster/health?wait_for_status=yellow ------------------------------------------ // CONSOLE // TESTSETUP @@ -73,7 +75,7 @@ This query has two required parameters: `id`:: The required parent id select documents must referrer to. -`ignore_unmapped`:: When set to `true` this will ignore an unmapped `type` and will not match any +`ignore_unmapped`:: When set to `true` this will ignore an unmapped `type` and will not match any documents for this query. This can be useful when querying multiple indexes which might have different mappings. When set to `false` (the default value) the query will throw an exception if the `type` is not mapped. From 9a9301f7d8acd94f621e4bf88522c6e58cf63af2 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 18 May 2016 23:00:57 +0200 Subject: [PATCH 19/36] Remove dead BloomFilter code We don't use this class for a quite a while. lets trash it. --- .../common/util/BloomFilter.java | 629 ------------------ 1 file changed, 629 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/common/util/BloomFilter.java diff --git a/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java b/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java deleted file mode 100644 index 6c471cddb55..00000000000 --- a/core/src/main/java/org/elasticsearch/common/util/BloomFilter.java +++ /dev/null @@ -1,629 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.util; - -import org.apache.lucene.store.DataInput; -import org.apache.lucene.store.DataOutput; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.hash.MurmurHash3; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.unit.SizeValue; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; - -/** - * A bloom filter. Inspired by Guava bloom filter implementation though with some optimizations. - */ -public class BloomFilter { - - /** - * A factory that can use different fpp based on size. - */ - public static class Factory { - - public static final Factory DEFAULT = buildDefault(); - - private static Factory buildDefault() { - // Some numbers: - // 10k =0.001: 140.4kb , 10 Hashes - // 10k =0.01 : 93.6kb , 6 Hashes - // 100k=0.01 : 936.0kb , 6 Hashes - // 100k=0.03 : 712.7kb , 5 Hashes - // 500k=0.01 : 4.5mb , 6 Hashes - // 500k=0.03 : 3.4mb , 5 Hashes - // 500k=0.05 : 2.9mb , 4 Hashes - // 1m=0.01 : 9.1mb , 6 Hashes - // 1m=0.03 : 6.9mb , 5 Hashes - // 1m=0.05 : 5.9mb , 4 Hashes - // 5m=0.01 : 45.7mb , 6 Hashes - // 5m=0.03 : 34.8mb , 5 Hashes - // 5m=0.05 : 29.7mb , 4 Hashes - // 50m=0.01 : 457.0mb , 6 Hashes - // 50m=0.03 : 297.3mb , 4 Hashes - // 50m=0.10 : 228.5mb , 3 Hashes - return buildFromString("10k=0.01,1m=0.03"); - } - - /** - * Supports just passing fpp, as in "0.01", and also ranges, like "50k=0.01,1m=0.05". If - * its null, returns {@link #buildDefault()}. - */ - public static Factory buildFromString(@Nullable String config) { - if (config == null) { - return buildDefault(); - } - String[] sEntries = config.split(","); - if (sEntries.length == 0) { - if (config.length() > 0) { - return new Factory(new Entry[]{new Entry(0, Double.parseDouble(config))}); - } - return buildDefault(); - } - Entry[] entries = new Entry[sEntries.length]; - for (int i = 0; i < sEntries.length; i++) { - int index = sEntries[i].indexOf('='); - entries[i] = new Entry( - (int) SizeValue.parseSizeValue(sEntries[i].substring(0, index).trim()).singles(), - Double.parseDouble(sEntries[i].substring(index + 1).trim()) - ); - } - return new Factory(entries); - } - - private final Entry[] entries; - - public Factory(Entry[] entries) { - this.entries = entries; - // the order is from the upper most expected insertions to the lowest - Arrays.sort(this.entries, new Comparator() { - @Override - public int compare(Entry o1, Entry o2) { - return o2.expectedInsertions - o1.expectedInsertions; - } - }); - } - - public BloomFilter createFilter(int expectedInsertions) { - for (Entry entry : entries) { - if (expectedInsertions > entry.expectedInsertions) { - return BloomFilter.create(expectedInsertions, entry.fpp); - } - } - return BloomFilter.create(expectedInsertions, 0.03); - } - - public static class Entry { - public final int expectedInsertions; - public final double fpp; - - Entry(int expectedInsertions, double fpp) { - this.expectedInsertions = expectedInsertions; - this.fpp = fpp; - } - } - } - - /** - * Creates a bloom filter based on the with the expected number - * of insertions and expected false positive probability. - * - * @param expectedInsertions the number of expected insertions to the constructed - * @param fpp the desired false positive probability (must be positive and less than 1.0) - */ - public static BloomFilter create(int expectedInsertions, double fpp) { - return create(expectedInsertions, fpp, -1); - } - - /** - * Creates a bloom filter based on the expected number of insertions, expected false positive probability, - * and number of hash functions. - * - * @param expectedInsertions the number of expected insertions to the constructed - * @param fpp the desired false positive probability (must be positive and less than 1.0) - * @param numHashFunctions the number of hash functions to use (must be less than or equal to 255) - */ - public static BloomFilter create(int expectedInsertions, double fpp, int numHashFunctions) { - if (expectedInsertions == 0) { - expectedInsertions = 1; - } - /* - * TODO(user): Put a warning in the javadoc about tiny fpp values, - * since the resulting size is proportional to -log(p), but there is not - * much of a point after all, e.g. optimalM(1000, 0.0000000000000001) = 76680 - * which is less that 10kb. Who cares! - */ - long numBits = optimalNumOfBits(expectedInsertions, fpp); - - // calculate the optimal number of hash functions - if (numHashFunctions == -1) { - numHashFunctions = optimalNumOfHashFunctions(expectedInsertions, numBits); - } - - try { - return new BloomFilter(new BitArray(numBits), numHashFunctions, Hashing.DEFAULT); - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("Could not create BloomFilter of " + numBits + " bits", e); - } - } - - public static void skipBloom(IndexInput in) throws IOException { - int version = in.readInt(); // we do nothing with this now..., defaults to 0 - final int numLongs = in.readInt(); - in.seek(in.getFilePointer() + (numLongs * 8) + 4 + 4); // filter + numberOfHashFunctions + hashType - } - - public static BloomFilter deserialize(DataInput in) throws IOException { - int version = in.readInt(); // we do nothing with this now..., defaults to 0 - int numLongs = in.readInt(); - long[] data = new long[numLongs]; - for (int i = 0; i < numLongs; i++) { - data[i] = in.readLong(); - } - int numberOfHashFunctions = in.readInt(); - int hashType = in.readInt(); - return new BloomFilter(new BitArray(data), numberOfHashFunctions, Hashing.fromType(hashType)); - } - - public static void serilaize(BloomFilter filter, DataOutput out) throws IOException { - out.writeInt(0); // version - BitArray bits = filter.bits; - out.writeInt(bits.data.length); - for (long l : bits.data) { - out.writeLong(l); - } - out.writeInt(filter.numHashFunctions); - out.writeInt(filter.hashing.type()); // hashType - } - - public static BloomFilter readFrom(StreamInput in) throws IOException { - int version = in.readVInt(); // we do nothing with this now..., defaults to 0 - int numLongs = in.readVInt(); - long[] data = new long[numLongs]; - for (int i = 0; i < numLongs; i++) { - data[i] = in.readLong(); - } - int numberOfHashFunctions = in.readVInt(); - int hashType = in.readVInt(); // again, nothing to do now... - return new BloomFilter(new BitArray(data), numberOfHashFunctions, Hashing.fromType(hashType)); - } - - public static void writeTo(BloomFilter filter, StreamOutput out) throws IOException { - out.writeVInt(0); // version - BitArray bits = filter.bits; - out.writeVInt(bits.data.length); - for (long l : bits.data) { - out.writeLong(l); - } - out.writeVInt(filter.numHashFunctions); - out.writeVInt(filter.hashing.type()); // hashType - } - - /** - * The bit set of the BloomFilter (not necessarily power of 2!) - */ - final BitArray bits; - /** - * Number of hashes per element - */ - final int numHashFunctions; - - final Hashing hashing; - - BloomFilter(BitArray bits, int numHashFunctions, Hashing hashing) { - this.bits = bits; - this.numHashFunctions = numHashFunctions; - this.hashing = hashing; - /* - * This only exists to forbid BFs that cannot use the compact persistent representation. - * If it ever throws, at a user who was not intending to use that representation, we should - * reconsider - */ - if (numHashFunctions > 255) { - throw new IllegalArgumentException("Currently we don't allow BloomFilters that would use more than 255 hash functions"); - } - } - - public boolean put(BytesRef value) { - return hashing.put(value, numHashFunctions, bits); - } - - public boolean mightContain(BytesRef value) { - return hashing.mightContain(value, numHashFunctions, bits); - } - - public int getNumHashFunctions() { - return this.numHashFunctions; - } - - public long getSizeInBytes() { - return bits.ramBytesUsed(); - } - - @Override - public int hashCode() { - return bits.hashCode() + numHashFunctions; - } - - /* - * Cheat sheet: - * - * m: total bits - * n: expected insertions - * b: m/n, bits per insertion - - * p: expected false positive probability - * - * 1) Optimal k = b * ln2 - * 2) p = (1 - e ^ (-kn/m))^k - * 3) For optimal k: p = 2 ^ (-k) ~= 0.6185^b - * 4) For optimal k: m = -nlnp / ((ln2) ^ 2) - */ - - /** - * Computes the optimal k (number of hashes per element inserted in Bloom filter), given the - * expected insertions and total number of bits in the Bloom filter. - *

- * See http://en.wikipedia.org/wiki/File:Bloom_filter_fp_probability.svg for the formula. - * - * @param n expected insertions (must be positive) - * @param m total number of bits in Bloom filter (must be positive) - */ - static int optimalNumOfHashFunctions(long n, long m) { - return Math.max(1, (int) Math.round(m / n * Math.log(2))); - } - - /** - * Computes m (total bits of Bloom filter) which is expected to achieve, for the specified - * expected insertions, the required false positive probability. - *

- * See http://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives for the formula. - * - * @param n expected insertions (must be positive) - * @param p false positive rate (must be 0 < p < 1) - */ - static long optimalNumOfBits(long n, double p) { - if (p == 0) { - p = Double.MIN_VALUE; - } - return (long) (-n * Math.log(p) / (Math.log(2) * Math.log(2))); - } - - // Note: We use this instead of java.util.BitSet because we need access to the long[] data field - static final class BitArray { - final long[] data; - final long bitSize; - long bitCount; - - BitArray(long bits) { - this(new long[size(bits)]); - } - - private static int size(long bits) { - long quotient = bits / 64; - long remainder = bits - quotient * 64; - return Math.toIntExact(remainder == 0 ? quotient : 1 + quotient); - } - - // Used by serialization - BitArray(long[] data) { - this.data = data; - long bitCount = 0; - for (long value : data) { - bitCount += Long.bitCount(value); - } - this.bitCount = bitCount; - this.bitSize = data.length * Long.SIZE; - } - - /** Returns true if the bit changed value. */ - boolean set(long index) { - if (!get(index)) { - data[(int) (index >>> 6)] |= (1L << index); - bitCount++; - return true; - } - return false; - } - - boolean get(long index) { - return (data[(int) (index >>> 6)] & (1L << index)) != 0; - } - - /** Number of bits */ - long bitSize() { - return bitSize; - } - - /** Number of set bits (1s) */ - long bitCount() { - return bitCount; - } - - BitArray copy() { - return new BitArray(data.clone()); - } - - /** Combines the two BitArrays using bitwise OR. */ - void putAll(BitArray array) { - bitCount = 0; - for (int i = 0; i < data.length; i++) { - data[i] |= array.data[i]; - bitCount += Long.bitCount(data[i]); - } - } - - @Override public boolean equals(Object o) { - if (o instanceof BitArray) { - BitArray bitArray = (BitArray) o; - return Arrays.equals(data, bitArray.data); - } - return false; - } - - @Override public int hashCode() { - return Arrays.hashCode(data); - } - - public long ramBytesUsed() { - return Long.BYTES * data.length + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 16; - } - } - - static enum Hashing { - - V0() { - @Override - protected boolean put(BytesRef value, int numHashFunctions, BitArray bits) { - long bitSize = bits.bitSize(); - long hash64 = hash3_x64_128(value.bytes, value.offset, value.length, 0); - int hash1 = (int) hash64; - int hash2 = (int) (hash64 >>> 32); - boolean bitsChanged = false; - for (int i = 1; i <= numHashFunctions; i++) { - int nextHash = hash1 + i * hash2; - if (nextHash < 0) { - nextHash = ~nextHash; - } - bitsChanged |= bits.set(nextHash % bitSize); - } - return bitsChanged; - } - - @Override - protected boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits) { - long bitSize = bits.bitSize(); - long hash64 = hash3_x64_128(value.bytes, value.offset, value.length, 0); - int hash1 = (int) hash64; - int hash2 = (int) (hash64 >>> 32); - for (int i = 1; i <= numHashFunctions; i++) { - int nextHash = hash1 + i * hash2; - if (nextHash < 0) { - nextHash = ~nextHash; - } - if (!bits.get(nextHash % bitSize)) { - return false; - } - } - return true; - } - - @Override - protected int type() { - return 0; - } - }, - V1() { - @Override - protected boolean put(BytesRef value, int numHashFunctions, BitArray bits) { - long bitSize = bits.bitSize(); - MurmurHash3.Hash128 hash128 = MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, new MurmurHash3.Hash128()); - - boolean bitsChanged = false; - long combinedHash = hash128.h1; - for (int i = 0; i < numHashFunctions; i++) { - // Make the combined hash positive and indexable - bitsChanged |= bits.set((combinedHash & Long.MAX_VALUE) % bitSize); - combinedHash += hash128.h2; - } - return bitsChanged; - } - - @Override - protected boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits) { - long bitSize = bits.bitSize(); - MurmurHash3.Hash128 hash128 = MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, new MurmurHash3.Hash128()); - - long combinedHash = hash128.h1; - for (int i = 0; i < numHashFunctions; i++) { - // Make the combined hash positive and indexable - if (!bits.get((combinedHash & Long.MAX_VALUE) % bitSize)) { - return false; - } - combinedHash += hash128.h2; - } - return true; - } - - @Override - protected int type() { - return 1; - } - } - ; - - protected abstract boolean put(BytesRef value, int numHashFunctions, BitArray bits); - - protected abstract boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits); - - protected abstract int type(); - - public static final Hashing DEFAULT = Hashing.V1; - - public static Hashing fromType(int type) { - if (type == 0) { - return Hashing.V0; - } if (type == 1) { - return Hashing.V1; - } else { - throw new IllegalArgumentException("no hashing type matching " + type); - } - } - } - - // START : MURMUR 3_128 USED FOR Hashing.V0 - // NOTE: don't replace this code with the o.e.common.hashing.MurmurHash3 method which returns a different hash - - protected static long getblock(byte[] key, int offset, int index) { - int i_8 = index << 3; - int blockOffset = offset + i_8; - return ((long) key[blockOffset + 0] & 0xff) + (((long) key[blockOffset + 1] & 0xff) << 8) + - (((long) key[blockOffset + 2] & 0xff) << 16) + (((long) key[blockOffset + 3] & 0xff) << 24) + - (((long) key[blockOffset + 4] & 0xff) << 32) + (((long) key[blockOffset + 5] & 0xff) << 40) + - (((long) key[blockOffset + 6] & 0xff) << 48) + (((long) key[blockOffset + 7] & 0xff) << 56); - } - - protected static long rotl64(long v, int n) { - return ((v << n) | (v >>> (64 - n))); - } - - protected static long fmix(long k) { - k ^= k >>> 33; - k *= 0xff51afd7ed558ccdL; - k ^= k >>> 33; - k *= 0xc4ceb9fe1a85ec53L; - k ^= k >>> 33; - - return k; - } - - @SuppressWarnings("fallthrough") // Uses fallthrough to implement a well know hashing algorithm - public static long hash3_x64_128(byte[] key, int offset, int length, long seed) { - final int nblocks = length >> 4; // Process as 128-bit blocks. - - long h1 = seed; - long h2 = seed; - - long c1 = 0x87c37b91114253d5L; - long c2 = 0x4cf5ad432745937fL; - - //---------- - // body - - for (int i = 0; i < nblocks; i++) { - long k1 = getblock(key, offset, i * 2 + 0); - long k2 = getblock(key, offset, i * 2 + 1); - - k1 *= c1; - k1 = rotl64(k1, 31); - k1 *= c2; - h1 ^= k1; - - h1 = rotl64(h1, 27); - h1 += h2; - h1 = h1 * 5 + 0x52dce729; - - k2 *= c2; - k2 = rotl64(k2, 33); - k2 *= c1; - h2 ^= k2; - - h2 = rotl64(h2, 31); - h2 += h1; - h2 = h2 * 5 + 0x38495ab5; - } - - //---------- - // tail - - // Advance offset to the unprocessed tail of the data. - offset += nblocks * 16; - - long k1 = 0; - long k2 = 0; - - switch (length & 15) { - case 15: - k2 ^= ((long) key[offset + 14]) << 48; - case 14: - k2 ^= ((long) key[offset + 13]) << 40; - case 13: - k2 ^= ((long) key[offset + 12]) << 32; - case 12: - k2 ^= ((long) key[offset + 11]) << 24; - case 11: - k2 ^= ((long) key[offset + 10]) << 16; - case 10: - k2 ^= ((long) key[offset + 9]) << 8; - case 9: - k2 ^= ((long) key[offset + 8]) << 0; - k2 *= c2; - k2 = rotl64(k2, 33); - k2 *= c1; - h2 ^= k2; - - case 8: - k1 ^= ((long) key[offset + 7]) << 56; - case 7: - k1 ^= ((long) key[offset + 6]) << 48; - case 6: - k1 ^= ((long) key[offset + 5]) << 40; - case 5: - k1 ^= ((long) key[offset + 4]) << 32; - case 4: - k1 ^= ((long) key[offset + 3]) << 24; - case 3: - k1 ^= ((long) key[offset + 2]) << 16; - case 2: - k1 ^= ((long) key[offset + 1]) << 8; - case 1: - k1 ^= (key[offset]); - k1 *= c1; - k1 = rotl64(k1, 31); - k1 *= c2; - h1 ^= k1; - } - - //---------- - // finalization - - h1 ^= length; - h2 ^= length; - - h1 += h2; - h2 += h1; - - h1 = fmix(h1); - h2 = fmix(h2); - - h1 += h2; - h2 += h1; - - //return (new long[]{h1, h2}); - // SAME AS GUAVA, they take the first long out of the 128bit - return h1; - } - - // END: MURMUR 3_128 -} From 27e6908c8d9b0f69d219c54a9162f1da4b036211 Mon Sep 17 00:00:00 2001 From: Isabel Drost-Fromm Date: Thu, 19 May 2016 09:33:29 +0200 Subject: [PATCH 20/36] Add indent --- .../search/request/named-queries-and-filters.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/search/request/named-queries-and-filters.asciidoc b/docs/reference/search/request/named-queries-and-filters.asciidoc index f8be0f1be69..0fb60253938 100644 --- a/docs/reference/search/request/named-queries-and-filters.asciidoc +++ b/docs/reference/search/request/named-queries-and-filters.asciidoc @@ -10,8 +10,8 @@ GET /_search "query": { "bool" : { "should" : [ - {"match" : { "name.first" : {"query" : "shay", "_name" : "first"} }}, - {"match" : { "name.last" : {"query" : "banon", "_name" : "last"} }} + {"match" : { "name.first" : {"query" : "shay", "_name" : "first"} }}, + {"match" : { "name.last" : {"query" : "banon", "_name" : "last"} }} ], "filter" : { "terms" : { From 2b972f1f75c03eaeca28abbaf912170b30cec732 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 19 May 2016 09:40:10 +0200 Subject: [PATCH 21/36] FSync translog outside of the writers global lock (#18360) FSync translog outside of the writers global lock Today we aquire a write global lock that blocks all modification to the translog file while we fsync / checkpoint the file. Yet, we don't necessarily needt to block concurrent operations here. This can lead to a lot of blocked threads if the machine has high concurrency (lot os CPUs) but uses slow disks (spinning disks) which is absolutely unnecessary. We just need to protect from fsyncing / checkpointing concurrently but we can fill buffers and write to the underlying file in a concurrent fashion. This change introduces an additional lock that we hold while fsyncing but moves the checkpointing code outside of the writers global lock. --- .../index/translog/TranslogWriter.java | 64 +++++++++++-------- 1 file changed, 37 insertions(+), 27 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index e215669761c..84278fa92b3 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -39,6 +39,7 @@ import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.ReentrantLock; public class TranslogWriter extends BaseTranslogReader implements Closeable { @@ -60,7 +61,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { private volatile long totalOffset; protected final AtomicBoolean closed = new AtomicBoolean(false); - + // lock order synchronized(syncLock) -> synchronized(this) + private final Object syncLock = new Object(); public TranslogWriter(ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException { super(generation, channel, path, channel.position()); @@ -146,23 +148,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { * raising the exception. */ public void sync() throws IOException { - if (syncNeeded()) { - synchronized (this) { - ensureOpen(); - final long offsetToSync; - final int opsCounter; - try { - outputStream.flush(); - offsetToSync = totalOffset; - opsCounter = operationCounter; - checkpoint(offsetToSync, opsCounter, generation, channel, path); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - lastSyncedOffset = offsetToSync; - } - } + syncUpTo(Long.MAX_VALUE); } /** @@ -229,9 +215,38 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { * @return true if this call caused an actual sync operation */ public boolean syncUpTo(long offset) throws IOException { - if (lastSyncedOffset < offset) { - sync(); - return true; + if (lastSyncedOffset < offset && syncNeeded()) { + synchronized (syncLock) { // only one sync/checkpoint should happen concurrently but we wait + if (lastSyncedOffset < offset && syncNeeded()) { + // double checked locking - we don't want to fsync unless we have to and now that we have + // the lock we should check again since if this code is busy we might have fsynced enough already + final long offsetToSync; + final int opsCounter; + synchronized (this) { + ensureOpen(); + try { + outputStream.flush(); + offsetToSync = totalOffset; + opsCounter = operationCounter; + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + } + // now do the actual fsync outside of the synchronized block such that + // we can continue writing to the buffer etc. + try { + channel.force(false); + writeCheckpoint(offsetToSync, opsCounter, path.getParent(), generation, StandardOpenOption.WRITE); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; + } + assert lastSyncedOffset <= offsetToSync : "illegal state: " + lastSyncedOffset + " <= " + offsetToSync; + lastSyncedOffset = offsetToSync; // write protected by syncLock + return true; + } + } } return false; } @@ -254,11 +269,6 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); } - private synchronized void checkpoint(long lastSyncPosition, int operationCounter, long generation, FileChannel translogFileChannel, Path translogFilePath) throws IOException { - translogFileChannel.force(false); - writeCheckpoint(lastSyncPosition, operationCounter, translogFilePath.getParent(), generation, StandardOpenOption.WRITE); - } - private static void writeCheckpoint(long syncPosition, int numOperations, Path translogFile, long generation, OpenOption... options) throws IOException { final Path checkpointFile = translogFile.resolve(Translog.CHECKPOINT_FILE_NAME); Checkpoint checkpoint = new Checkpoint(syncPosition, numOperations, generation); @@ -269,7 +279,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { static final ChannelFactory DEFAULT = new ChannelFactory(); - // only for testing until we have a disk-full FileSystemt + // only for testing until we have a disk-full FileSystem public FileChannel open(Path file) throws IOException { return FileChannel.open(file, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW); } From d77c299cb9b2a55b47fff539b539ebb9846bd858 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 19 May 2016 10:42:35 +0200 Subject: [PATCH 22/36] Register `indices.query.bool.max_clause_count` setting (#18341) * Register `indices.query.bool.max_clause_count` setting This commit registers `indices.query.bool.max_clause_count` as a node level setting and removes support for its synonym setting `index.query.bool.max_clause_count`. Closes #18336 --- .../elasticsearch/common/settings/ClusterSettings.java | 4 +++- .../elasticsearch/common/settings/SettingsModule.java | 7 ++++++- .../main/java/org/elasticsearch/search/SearchModule.java | 6 ++++-- .../common/settings/SettingsModuleTests.java | 9 +++++++++ docs/reference/migration/migrate_5_0/settings.asciidoc | 6 ++++++ 5 files changed, 28 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index e66534a4feb..5b6130281d4 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -87,6 +87,7 @@ import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.repositories.uri.URLRepository; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; @@ -420,6 +421,7 @@ public final class ClusterSettings extends AbstractScopedSettings { ResourceWatcherService.ENABLED, ResourceWatcherService.RELOAD_INTERVAL_HIGH, ResourceWatcherService.RELOAD_INTERVAL_MEDIUM, - ResourceWatcherService.RELOAD_INTERVAL_LOW + ResourceWatcherService.RELOAD_INTERVAL_LOW, + SearchModule.INDICES_MAX_CLAUSE_COUNT_SETTING ))); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 2e7acd6ae8c..2ed5ffd86cd 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -65,7 +65,12 @@ public class SettingsModule extends AbstractModule { protected void configure() { final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values())); final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values())); - Settings indexSettings = settings.filter((s) -> s.startsWith("index.") && clusterSettings.get(s) == null); + Settings indexSettings = settings.filter((s) -> (s.startsWith("index.") && + // special case - we want to get Did you mean indices.query.bool.max_clause_count + // which means we need to by-pass this check for this setting + // TODO remove in 6.0!! + "index.query.bool.max_clause_count".equals(s) == false) + && clusterSettings.get(s) == null); if (indexSettings.isEmpty() == false) { try { String separator = IntStream.range(0, 85).mapToObj(s -> "*").collect(Collectors.joining("")).trim(); diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 513a5c88a37..59e9c94c27d 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.search.function.ScoreFunction; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.index.percolator.PercolatorHighlightSubFetchPhase; @@ -290,6 +291,8 @@ public class SearchModule extends AbstractModule { private final Settings settings; private final NamedWriteableRegistry namedWriteableRegistry; + public static final Setting INDICES_MAX_CLAUSE_COUNT_SETTING = Setting.intSetting("indices.query.bool.max_clause_count", + 1024, 1, Integer.MAX_VALUE, Setting.Property.NodeScope); // pkg private so tests can mock Class searchServiceImpl = SearchService.class; @@ -650,8 +653,7 @@ public class SearchModule extends AbstractModule { registerQuery(MatchAllQueryBuilder::new, MatchAllQueryBuilder::fromXContent, MatchAllQueryBuilder.QUERY_NAME_FIELD); registerQuery(QueryStringQueryBuilder::new, QueryStringQueryBuilder::fromXContent, QueryStringQueryBuilder.QUERY_NAME_FIELD); registerQuery(BoostingQueryBuilder::new, BoostingQueryBuilder::fromXContent, BoostingQueryBuilder.QUERY_NAME_FIELD); - BooleanQuery.setMaxClauseCount(settings.getAsInt("index.query.bool.max_clause_count", - settings.getAsInt("indices.query.bool.max_clause_count", BooleanQuery.getMaxClauseCount()))); + BooleanQuery.setMaxClauseCount(INDICES_MAX_CLAUSE_COUNT_SETTING.get(settings)); registerQuery(BoolQueryBuilder::new, BoolQueryBuilder::fromXContent, BoolQueryBuilder.QUERY_NAME_FIELD); registerQuery(TermQueryBuilder::new, TermQueryBuilder::fromXContent, TermQueryBuilder.QUERY_NAME_FIELD); registerQuery(TermsQueryBuilder::new, TermsQueryBuilder::fromXContent, TermsQueryBuilder.QUERY_NAME_FIELD); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java index 977d5fa7b09..5e992fc947c 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -208,4 +208,13 @@ public class SettingsModuleTests extends ModuleTestCase { assertThat(e.getMessage(), containsString("Cannot register setting [foo.bar] twice")); } } + + public void testOldMaxClauseCountSetting() { + Settings settings = Settings.builder().put("index.query.bool.max_clause_count", 1024).build(); + SettingsModule module = new SettingsModule(settings); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> assertInstanceBinding(module, Settings.class, (s) -> s == settings)); + assertEquals("unknown setting [index.query.bool.max_clause_count] did you mean [indices.query.bool.max_clause_count]?", + ex.getMessage()); + } } diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index e916ec6b24c..85895d65b67 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -258,3 +258,9 @@ Previously script mode settings (e.g., "script.inline: true", Prior to 5.0 a third option could be specified for the `script.inline` and `script.stored` settings ("sandbox"). This has been removed, You can now only set `script.line: true` or `script.stored: true`. + +==== Search settings + +The setting `index.query.bool.max_clause_count` has been removed. In order to +set the maximum number of boolean clauses `indices.query.bool.max_clause_count` +should be used instead. From 050145f61b3bf6666712059bea07b54ca1076ed6 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 18 May 2016 16:13:52 +0200 Subject: [PATCH 23/36] parent/child: Allow adding additional child types that point to an existing parent type From 2.0 adding child types to existing types was forbidden because the`_parent` field stores the join between parent and child at index time. This is to protect from the fact that types that weren't a parent before become a parent while previously indexed documents would not have a join field. This would break the parent/child queries. The restriction was a bit too strict in the sense that also if a type was a parent type the restriction would forbid adding child types that point to a parent type (so child points already point to it). This change make sure that the restriction only applies if that type isn't a parent type already. Closes #17956 --- .../metadata/MetaDataMappingService.java | 7 +- .../metadata/MetaDataMappingServiceTests.java | 66 +++++++++++++++++++ .../search/child/ChildQuerySearchIT.java | 23 ------- 3 files changed, 71 insertions(+), 25 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 311ffae71fd..d3b5e7ecbad 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -281,8 +281,11 @@ public class MetaDataMappingService extends AbstractComponent { // Also the order of the mappings may be backwards. if (newMapper.parentFieldMapper().active()) { for (ObjectCursor mapping : indexMetaData.getMappings().values()) { - if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) { - throw new IllegalArgumentException("can't add a _parent field that points to an already existing type"); + String parentType = newMapper.parentFieldMapper().type(); + if (parentType.equals(mapping.value.type()) && + indexService.mapperService().getParentTypes().contains(parentType) == false) { + throw new IllegalArgumentException("can't add a _parent field that points to an " + + "already existing type, that isn't already a parent"); } } } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java new file mode 100644 index 00000000000..c6ce30e2a52 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataMappingServiceTests.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class MetaDataMappingServiceTests extends ESSingleNodeTestCase { + + // Tests _parent meta field logic, because part of the validation is in MetaDataMappingService + public void testAddChildTypePointingToAlreadyExistingType() throws Exception { + createIndex("test", Settings.EMPTY, "type", "field", "type=keyword"); + + // Shouldn't be able the add the _parent field pointing to an already existing type, which isn't a parent type + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().admin() + .indices() + .preparePutMapping("test") + .setType("child") + .setSource("_parent", "type=type") + .get()); + assertThat(e.getMessage(), + equalTo("can't add a _parent field that points to an already existing type, that isn't already a parent")); + } + + // Tests _parent meta field logic, because part of the validation is in MetaDataMappingService + public void testAddExtraChildTypePointingToAlreadyParentExistingType() throws Exception { + IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test") + .addMapping("parent") + .addMapping("child1", "_parent", "type=parent") + ); + + // adding the extra child type that points to an already existing parent type is allowed: + client().admin() + .indices() + .preparePutMapping("test") + .setType("child2") + .setSource("_parent", "type=parent") + .get(); + + DocumentMapper documentMapper = indexService.mapperService().documentMapper("child2"); + assertThat(documentMapper.parentFieldMapper().type(), equalTo("parent")); + assertThat(documentMapper.parentFieldMapper().active(), is(true)); + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 1a258abc9cc..90f5c65c066 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -1321,29 +1321,6 @@ public class ChildQuerySearchIT extends ESIntegTestCase { } } - public void testAddParentFieldAfterIndexingParentDocButBeforeIndexingChildDoc() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(Settings.builder() - .put(indexSettings()) - .put("index.refresh_interval", -1))); - ensureGreen(); - - String parentId = "p1"; - client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").get(); - refresh(); - - try { - assertAcked(client().admin() - .indices() - .preparePutMapping("test") - .setType("child") - .setSource("_parent", "type=parent")); - fail("Shouldn't be able the add the _parent field pointing to an already existing parent type"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("can't add a _parent field that points to an already existing type")); - } - } - public void testParentChildCaching() throws Exception { assertAcked(prepareCreate("test") .setSettings( From e2691d7e5ca6006a93ae421bf28183c21047910b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 19 May 2016 13:08:30 +0200 Subject: [PATCH 24/36] test: Don't generate a value of 0, because FuzzyQuery constructor does't allow that --- .../org/elasticsearch/index/query/MatchQueryBuilderTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 6cec3ae8c52..87f8f3981d3 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -89,7 +89,7 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase Date: Tue, 15 Sep 2015 17:35:10 +0200 Subject: [PATCH 25/36] Add Google Cloud Storage repository plugin Closes #12880 --- .../plugins/InstallPluginCommand.java | 1 + .../blobstore/FsBlobStoreContainerTests.java | 2 +- .../common/blobstore/FsBlobStoreTests.java | 2 +- .../snapshots/FsBlobStoreRepositoryIT.java | 2 +- dev-tools/smoke_test_rc.py | 1 + docs/plugins/repository-gcs.asciidoc | 216 +++++++ docs/plugins/repository.asciidoc | 5 + docs/reference/modules/snapshots.asciidoc | 1 + plugins/repository-gcs/build.gradle | 54 ++ .../licenses/commons-codec-1.10.jar.sha1 | 1 + .../licenses/commons-codec-LICENSE.txt | 202 +++++++ .../licenses/commons-codec-NOTICE.txt | 17 + .../licenses/commons-logging-1.1.3.jar.sha1 | 1 + .../licenses/commons-logging-LICENSE.txt | 202 +++++++ .../licenses/commons-logging-NOTICE.txt | 5 + .../licenses/google-LICENSE.txt | 201 +++++++ .../repository-gcs/licenses/google-NOTICE.txt | 1 + .../google-api-client-1.21.0.jar.sha1 | 1 + ...-services-storage-v1-rev66-1.21.0.jar.sha1 | 1 + .../google-http-client-1.21.0.jar.sha1 | 1 + ...oogle-http-client-jackson2-1.21.0.jar.sha1 | 1 + .../google-oauth-client-1.21.0.jar.sha1 | 1 + .../licenses/httpclient-4.3.6.jar.sha1 | 1 + .../licenses/httpclient-LICENSE.txt | 558 ++++++++++++++++++ .../licenses/httpclient-NOTICE.txt | 5 + .../licenses/httpcore-4.3.3.jar.sha1 | 1 + .../licenses/httpcore-LICENSE.txt | 241 ++++++++ .../licenses/httpcore-NOTICE.txt | 8 + .../gcs/GoogleCloudStorageBlobContainer.java | 120 ++++ .../gcs/GoogleCloudStorageBlobStore.java | 432 ++++++++++++++ .../gcs/GoogleCloudStorageModule.java | 31 + .../gcs/GoogleCloudStoragePlugin.java | 131 ++++ .../gcs/GoogleCloudStorageRepository.java | 154 +++++ .../gcs/GoogleCloudStorageService.java | 176 ++++++ .../plugin-metadata/plugin-security.policy | 26 + ...leCloudStorageBlobStoreContainerTests.java | 36 ++ .../gcs/GoogleCloudStorageBlobStoreTests.java | 36 ++ .../blobstore/gcs/MockHttpTransport.java | 432 ++++++++++++++ ...eCloudStorageBlobStoreRepositoryTests.java | 96 +++ .../GoogleCloudStorageRepositoryRestIT.java | 41 ++ .../test/repository_gcs/10_basic.yaml | 13 + .../scripts/module_and_plugin_test_cases.bash | 8 + settings.gradle | 1 + .../ESBlobStoreContainerTestCase.java | 9 +- .../ESBlobStoreRepositoryIntegTestCase.java | 21 +- .../repositories}/ESBlobStoreTestCase.java | 3 +- 46 files changed, 3487 insertions(+), 12 deletions(-) create mode 100644 docs/plugins/repository-gcs.asciidoc create mode 100644 plugins/repository-gcs/build.gradle create mode 100644 plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/commons-codec-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/commons-codec-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/commons-logging-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/commons-logging-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-api-client-1.21.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-api-services-storage-v1-rev66-1.21.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-1.21.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-jackson2-1.21.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-oauth-client-1.21.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/httpclient-4.3.6.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/httpclient-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/httpclient-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/httpcore-4.3.3.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/httpcore-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/httpcore-NOTICE.txt create mode 100644 plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java create mode 100644 plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java create mode 100644 plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStorageModule.java create mode 100644 plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java create mode 100644 plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java create mode 100644 plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java create mode 100644 plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy create mode 100644 plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java create mode 100644 plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java create mode 100644 plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/MockHttpTransport.java create mode 100644 plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java create mode 100644 plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java create mode 100644 plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yaml rename {core/src/test/java/org/elasticsearch/test => test/framework/src/main/java/org/elasticsearch/repositories}/ESBlobStoreContainerTestCase.java (94%) rename {core/src/test/java/org/elasticsearch/test => test/framework/src/main/java/org/elasticsearch/repositories}/ESBlobStoreRepositoryIntegTestCase.java (91%) rename {core/src/test/java/org/elasticsearch/test => test/framework/src/main/java/org/elasticsearch/repositories}/ESBlobStoreTestCase.java (97%) diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 2e5faa6bfae..e81d376f3b5 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -126,6 +126,7 @@ class InstallPluginCommand extends Command { "mapper-murmur3", "mapper-size", "repository-azure", + "repository-gcs", "repository-hdfs", "repository-s3", "store-smb", diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java index 63c04b1c5e1..b08b81db11a 100644 --- a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java +++ b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.blobstore.fs.FsBlobStore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.test.ESBlobStoreContainerTestCase; +import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; import java.io.IOException; import java.nio.file.Path; diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java index f6f53549ce4..7d4ac1acc07 100644 --- a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java +++ b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.blobstore.fs.FsBlobStore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.test.ESBlobStoreTestCase; +import org.elasticsearch.repositories.ESBlobStoreTestCase; import java.io.IOException; import java.nio.file.Path; diff --git a/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java b/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java index fceedff8e70..84c3a03f2c8 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java @@ -20,7 +20,7 @@ package org.elasticsearch.snapshots; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.test.ESBlobStoreRepositoryIntegTestCase; +import org.elasticsearch.repositories.ESBlobStoreRepositoryIntegTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index 244ab8caa25..39db9929a54 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -75,6 +75,7 @@ DEFAULT_PLUGINS = ["analysis-icu", "mapper-murmur3", "mapper-size", "repository-azure", + "repository-gcs", "repository-hdfs", "repository-s3", "store-smb"] diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc new file mode 100644 index 00000000000..bed78b4cbbf --- /dev/null +++ b/docs/plugins/repository-gcs.asciidoc @@ -0,0 +1,216 @@ +[[repository-gcs]] +=== Google Cloud Storage Repository Plugin + +The GCS repository plugin adds support for using the https://cloud.google.com/storage/[Google Cloud Storage] +service as a repository for {ref}/modules-snapshots.html[Snapshot/Restore]. + +[[repository-gcs-install]] +[float] +==== Installation + +This plugin can be installed using the plugin manager: + +[source,sh] +---------------------------------------------------------------- +sudo bin/elasticsearch-plugin install repository-gcs +---------------------------------------------------------------- + +NOTE: The plugin requires new permission to be installed in order to work + +The plugin must be installed on every node in the cluster, and each node must +be restarted after installation. + +[[repository-gcs-remove]] +[float] +==== Removal + +The plugin can be removed with the following command: + +[source,sh] +---------------------------------------------------------------- +sudo bin/elasticsearch-plugin remove repository-gcs +---------------------------------------------------------------- + +The node must be stopped before removing the plugin. + +[[repository-gcs-usage]] +==== Getting started + +The plugin uses the https://cloud.google.com/storage/docs/json_api/[Google Cloud Storage JSON API] (v1) +to connect to the Storage service. If this is the first time you use Google Cloud Storage, you first +need to connect to the https://console.cloud.google.com/[Google Cloud Platform Console] and create a new +project. Once your project is created, you must enable the Cloud Storage Service for your project. + +[[repository-gcs-creating-bucket]] +===== Creating a Bucket + +Google Cloud Storage service uses the concept of https://cloud.google.com/storage/docs/key-terms[Bucket] +as a container for all the data. Buckets are usually created using the +https://console.cloud.google.com/[Google Cloud Platform Console]. The plugin will not automatically +create buckets. + +To create a new bucket: + +1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console] +2. Select your project +3. Got to the https://console.cloud.google.com/storage/browser[Storage Browser] +4. Click the "Create Bucket" button +5. Enter a the name of the new bucket +6. Select a storage class +7. Select a location +8. Click the "Create" button + +The bucket should now be created. + +[[repository-gcs-service-authentication]] +===== Service Authentication + +The plugin supports two authentication modes: + +* the built-in <>. This mode is +recommended if your elasticsearch node is running on a Compute Engine virtual machine. + +* the <> authentication mode. + +[[repository-gcs-using-compute-engine]] +===== Using Compute Engine +When running on Compute Engine, the plugin use the Google's built-in authentication mechanism to +authenticate on the Storage service. Compute Engine virtual machines are usually associated to a +default service account. This service account can be found in the VM instance details in the +https://console.cloud.google.com/compute/[Compute Engine console]. + +To indicate that a repository should use the built-in authentication, +the repository `service_account` setting must be set to `_default_`: + +[source,json] +---- +PUT _snapshot/my_gcs_repository_on_compute_engine +{ + "type": "gcs", + "settings": { + "bucket": "my_bucket", + "service_account": "_default_" + } +} +---- +// CONSOLE + +NOTE: The Compute Engine VM must be allowed to use the Storage service. This can be done only at VM +creation time, when "Storage" access can be configured to "Read/Write" permission. Check your +instance details at the section "Cloud API access scopes". + +[[repository-gcs-using-service-account]] +===== Using a Service Account +If your elasticsearch node is not running on Compute Engine, or if you don't want to use Google +built-in authentication mechanism, you can authenticate on the Storage service using a +https://cloud.google.com/iam/docs/overview#service_account[Service Account] file. + +To create a service account file: +1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console] +2. Select your project +3. Got to the https://console.cloud.google.com/permissions[Permission] tab +4. Select the https://console.cloud.google.com/permissions/serviceaccounts[Service Accounts] tab +5. Click on "Create service account" +6. Once created, select the new service account and download a JSON key file + +A service account file looks like this: + +[source,json] +---- +{ + "type": "service_account", + "project_id": "your-project-id", + "private_key_id": "...", + "private_key": "-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n", + "client_email": "service-account-for-your-repository@your-project-id.iam.gserviceaccount.com", + "client_id": "...", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://accounts.google.com/o/oauth2/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "..." +} +---- + +This file must be copied in the `config` directory of the elasticsearch installation and on +every node of the cluster. + +To indicate that a repository should use a service account file: + +[source,json] +---- +PUT _snapshot/my_gcs_repository +{ + "type": "gcs", + "settings": { + "bucket": "my_bucket", + "service_account": "service_account.json" + } +} +---- +// CONSOLE + + +[[repository-gcs-bucket-permission]] +===== Set Bucket Permission + +The service account used to access the bucket must have the "Writer" access to the bucket: + +1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console] +2. Select your project +3. Got to the [https://console.cloud.google.com/storage/browser]Storage Browser +4. Select the bucket and "Edit bucket permission" +5. The service account must be configured as a "User" with "Writer" access + + +[[repository-gcs-repository]] +==== Create a Repository + +Once everything is installed and every node is started, you can create a new repository that +uses Google Cloud Storage to store snapshots: + +[source,json] +---- +PUT _snapshot/my_gcs_repository +{ + "type": "gcs", + "settings": { + "bucket": "my_bucket", + "service_account": "service_account.json" + } +} +---- +// CONSOLE + +The following settings are supported: + +`bucket`:: + + The name of the bucket to be used for snapshots. (Mandatory) + +`service_account`:: + + The service account to use. It can be a relative path to a service account JSON file + or the value `_default_` that indicate to use built-in Compute Engine service account. + +`base_path`:: + + Specifies the path within bucket to repository data. Defaults to + the root of the bucket. + +`chunk_size`:: + + Big files can be broken down into chunks during snapshotting if needed. + The chunk size can be specified in bytes or by using size value notation, + i.e. `1g`, `10m`, `5k`. Defaults to `100m`. + +`compress`:: + + When set to `true` metadata files are stored in compressed format. This + setting doesn't affect index files that are already compressed by default. + Defaults to `false`. + +`application_name`:: + + Name used by the plugin when it uses the Google Cloud JSON API. Setting + a custom name can be useful to authenticate your cluster when requests + statistics are logged in the Google Cloud Platform. Default to `repository-gcs` diff --git a/docs/plugins/repository.asciidoc b/docs/plugins/repository.asciidoc index 08557b9e03e..9a4e90bebd7 100644 --- a/docs/plugins/repository.asciidoc +++ b/docs/plugins/repository.asciidoc @@ -22,6 +22,10 @@ The Azure repository plugin adds support for using Azure as a repository. The Hadoop HDFS Repository plugin adds support for using HDFS as a repository. +<>:: + +The GCS repository plugin adds support for using Google Cloud Storage service as a repository. + [float] === Community contributed repository plugins @@ -37,3 +41,4 @@ include::repository-s3.asciidoc[] include::repository-hdfs.asciidoc[] +include::repository-gcs.asciidoc[] diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc index 6c5245ce646..0f87744d317 100644 --- a/docs/reference/modules/snapshots.asciidoc +++ b/docs/reference/modules/snapshots.asciidoc @@ -162,6 +162,7 @@ Other repository backends are available in these official plugins: * {plugins}/repository-s3.html[repository-s3] for S3 repository support * {plugins}/repository-hdfs.html[repository-hdfs] for HDFS repository support in Hadoop environments * {plugins}/repository-azure.html[repository-azure] for Azure storage repositories +* {plugins}/repository-gcs.html[repository-gcs] for Google Cloud Storage repositories [float] ===== Repository Verification diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle new file mode 100644 index 00000000000..9968d4408e4 --- /dev/null +++ b/plugins/repository-gcs/build.gradle @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +esplugin { + description 'The GCS repository plugin adds Google Cloud Storage support for repositories.' + classname 'org.elasticsearch.plugin.repository.gcs.GoogleCloudStoragePlugin' +} + +versions << [ + 'google': '1.21.0', +] + +dependencies { + compile "com.google.apis:google-api-services-storage:v1-rev66-${versions.google}" + compile "com.google.api-client:google-api-client:${versions.google}" + compile "com.google.oauth-client:google-oauth-client:${versions.google}" + compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" + compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" + compile "commons-logging:commons-logging:${versions.commonslogging}" + compile "commons-codec:commons-codec:${versions.commonscodec}" + compile "com.google.http-client:google-http-client:${versions.google}" + compile "com.google.http-client:google-http-client-jackson2:${versions.google}" +} + +dependencyLicenses { + mapping from: /google-.*/, to: 'google' +} + +thirdPartyAudit.excludes = [ + // classes are missing + 'com.google.common.base.Splitter', + 'com.google.common.collect.Lists', + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', +] diff --git a/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 b/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 new file mode 100644 index 00000000000..3fe8682a1b0 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 @@ -0,0 +1 @@ +4b95f4897fa13f2cd904aee711aeafc0c5295cd8 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-codec-LICENSE.txt b/plugins/repository-gcs/licenses/commons-codec-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-codec-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/commons-codec-NOTICE.txt b/plugins/repository-gcs/licenses/commons-codec-NOTICE.txt new file mode 100644 index 00000000000..56916449bbe --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-codec-NOTICE.txt @@ -0,0 +1,17 @@ +Apache Commons Codec +Copyright 2002-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +src/test/org/apache/commons/codec/language/DoubleMetaphoneTest.java +contains test data from http://aspell.net/test/orig/batch0.tab. +Copyright (C) 2002 Kevin Atkinson (kevina@gnu.org) + +=============================================================================== + +The content of package org.apache.commons.codec.language.bm has been translated +from the original php source code available at http://stevemorse.org/phoneticinfo.htm +with permission from the original authors. +Original source copyright: +Copyright (c) 2008 Alexander Beider & Stephen P. Morse. diff --git a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 new file mode 100644 index 00000000000..5b8f029e582 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 @@ -0,0 +1 @@ +f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-logging-LICENSE.txt b/plugins/repository-gcs/licenses/commons-logging-LICENSE.txt new file mode 100644 index 00000000000..57bc88a15a0 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-logging-LICENSE.txt @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/plugins/repository-gcs/licenses/commons-logging-NOTICE.txt b/plugins/repository-gcs/licenses/commons-logging-NOTICE.txt new file mode 100644 index 00000000000..72eb32a9024 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-logging-NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons CLI +Copyright 2001-2009 The Apache Software Foundation + +This product includes software developed by +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/repository-gcs/licenses/google-LICENSE.txt b/plugins/repository-gcs/licenses/google-LICENSE.txt new file mode 100644 index 00000000000..980a15ac24e --- /dev/null +++ b/plugins/repository-gcs/licenses/google-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-NOTICE.txt b/plugins/repository-gcs/licenses/google-NOTICE.txt new file mode 100644 index 00000000000..8d1c8b69c3f --- /dev/null +++ b/plugins/repository-gcs/licenses/google-NOTICE.txt @@ -0,0 +1 @@ + diff --git a/plugins/repository-gcs/licenses/google-api-client-1.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-api-client-1.21.0.jar.sha1 new file mode 100644 index 00000000000..56988521028 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-client-1.21.0.jar.sha1 @@ -0,0 +1 @@ +16a6b3c680f3bf7b81bb42790ff5c1b72c5bbedc \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev66-1.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev66-1.21.0.jar.sha1 new file mode 100644 index 00000000000..2a97aed2b79 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev66-1.21.0.jar.sha1 @@ -0,0 +1 @@ +eb753d716e4f8dec203deb0f8fdca86913a79029 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-1.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-1.21.0.jar.sha1 new file mode 100644 index 00000000000..401abd444ce --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-1.21.0.jar.sha1 @@ -0,0 +1 @@ +42631630fe1276d4d6d6397bb07d53a4e4fec278 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.21.0.jar.sha1 new file mode 100644 index 00000000000..e7ca5887412 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.21.0.jar.sha1 @@ -0,0 +1 @@ +8ce17bdd15fff0fd8cf359757f29e778fc7191ad \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-oauth-client-1.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-oauth-client-1.21.0.jar.sha1 new file mode 100644 index 00000000000..7e3de94a9bc --- /dev/null +++ b/plugins/repository-gcs/licenses/google-oauth-client-1.21.0.jar.sha1 @@ -0,0 +1 @@ +61ec42bbfc51aafde5eb8b4923c602c5b5965bc2 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpclient-4.3.6.jar.sha1 b/plugins/repository-gcs/licenses/httpclient-4.3.6.jar.sha1 new file mode 100644 index 00000000000..2c18ef0f54c --- /dev/null +++ b/plugins/repository-gcs/licenses/httpclient-4.3.6.jar.sha1 @@ -0,0 +1 @@ +4c47155e3e6c9a41a28db36680b828ced53b8af4 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpclient-LICENSE.txt b/plugins/repository-gcs/licenses/httpclient-LICENSE.txt new file mode 100644 index 00000000000..32f01eda18f --- /dev/null +++ b/plugins/repository-gcs/licenses/httpclient-LICENSE.txt @@ -0,0 +1,558 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project includes Public Suffix List copied from + +licensed under the terms of the Mozilla Public License, v. 2.0 + +Full license text: + +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/plugins/repository-gcs/licenses/httpclient-NOTICE.txt b/plugins/repository-gcs/licenses/httpclient-NOTICE.txt new file mode 100644 index 00000000000..4f6058178b2 --- /dev/null +++ b/plugins/repository-gcs/licenses/httpclient-NOTICE.txt @@ -0,0 +1,5 @@ +Apache HttpComponents Client +Copyright 1999-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/repository-gcs/licenses/httpcore-4.3.3.jar.sha1 b/plugins/repository-gcs/licenses/httpcore-4.3.3.jar.sha1 new file mode 100644 index 00000000000..0ad1d24aa9f --- /dev/null +++ b/plugins/repository-gcs/licenses/httpcore-4.3.3.jar.sha1 @@ -0,0 +1 @@ +f91b7a4aadc5cf486df6e4634748d7dd7a73f06d \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpcore-LICENSE.txt b/plugins/repository-gcs/licenses/httpcore-LICENSE.txt new file mode 100644 index 00000000000..72819a9f06f --- /dev/null +++ b/plugins/repository-gcs/licenses/httpcore-LICENSE.txt @@ -0,0 +1,241 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project contains annotations in the package org.apache.http.annotation +which are derived from JCIP-ANNOTATIONS +Copyright (c) 2005 Brian Goetz and Tim Peierls. +See http://www.jcip.net and the Creative Commons Attribution License +(http://creativecommons.org/licenses/by/2.5) +Full text: http://creativecommons.org/licenses/by/2.5/legalcode + +License + +THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED. + +BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS. + +1. Definitions + + "Collective Work" means a work, such as a periodical issue, anthology or encyclopedia, in which the Work in its entirety in unmodified form, along with a number of other contributions, constituting separate and independent works in themselves, are assembled into a collective whole. A work that constitutes a Collective Work will not be considered a Derivative Work (as defined below) for the purposes of this License. + "Derivative Work" means a work based upon the Work or upon the Work and other pre-existing works, such as a translation, musical arrangement, dramatization, fictionalization, motion picture version, sound recording, art reproduction, abridgment, condensation, or any other form in which the Work may be recast, transformed, or adapted, except that a work that constitutes a Collective Work will not be considered a Derivative Work for the purpose of this License. For the avoidance of doubt, where the Work is a musical composition or sound recording, the synchronization of the Work in timed-relation with a moving image ("synching") will be considered a Derivative Work for the purpose of this License. + "Licensor" means the individual or entity that offers the Work under the terms of this License. + "Original Author" means the individual or entity who created the Work. + "Work" means the copyrightable work of authorship offered under the terms of this License. + "You" means an individual or entity exercising rights under this License who has not previously violated the terms of this License with respect to the Work, or who has received express permission from the Licensor to exercise rights under this License despite a previous violation. + +2. Fair Use Rights. Nothing in this license is intended to reduce, limit, or restrict any rights arising from fair use, first sale or other limitations on the exclusive rights of the copyright owner under copyright law or other applicable laws. + +3. License Grant. Subject to the terms and conditions of this License, Licensor hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the duration of the applicable copyright) license to exercise the rights in the Work as stated below: + + to reproduce the Work, to incorporate the Work into one or more Collective Works, and to reproduce the Work as incorporated in the Collective Works; + to create and reproduce Derivative Works; + to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission the Work including as incorporated in Collective Works; + to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission Derivative Works. + + For the avoidance of doubt, where the work is a musical composition: + Performance Royalties Under Blanket Licenses. Licensor waives the exclusive right to collect, whether individually or via a performance rights society (e.g. ASCAP, BMI, SESAC), royalties for the public performance or public digital performance (e.g. webcast) of the Work. + Mechanical Rights and Statutory Royalties. Licensor waives the exclusive right to collect, whether individually or via a music rights agency or designated agent (e.g. Harry Fox Agency), royalties for any phonorecord You create from the Work ("cover version") and distribute, subject to the compulsory license created by 17 USC Section 115 of the US Copyright Act (or the equivalent in other jurisdictions). + Webcasting Rights and Statutory Royalties. For the avoidance of doubt, where the Work is a sound recording, Licensor waives the exclusive right to collect, whether individually or via a performance-rights society (e.g. SoundExchange), royalties for the public digital performance (e.g. webcast) of the Work, subject to the compulsory license created by 17 USC Section 114 of the US Copyright Act (or the equivalent in other jurisdictions). + +The above rights may be exercised in all media and formats whether now known or hereafter devised. The above rights include the right to make such modifications as are technically necessary to exercise the rights in other media and formats. All rights not expressly granted by Licensor are hereby reserved. + +4. Restrictions.The license granted in Section 3 above is expressly made subject to and limited by the following restrictions: + + You may distribute, publicly display, publicly perform, or publicly digitally perform the Work only under the terms of this License, and You must include a copy of, or the Uniform Resource Identifier for, this License with every copy or phonorecord of the Work You distribute, publicly display, publicly perform, or publicly digitally perform. You may not offer or impose any terms on the Work that alter or restrict the terms of this License or the recipients' exercise of the rights granted hereunder. You may not sublicense the Work. You must keep intact all notices that refer to this License and to the disclaimer of warranties. You may not distribute, publicly display, publicly perform, or publicly digitally perform the Work with any technological measures that control access or use of the Work in a manner inconsistent with the terms of this License Agreement. The above applies to the Work as incorporated in a Collective Work, but this does not require the Collective Work apart from the Work itself to be made subject to the terms of this License. If You create a Collective Work, upon notice from any Licensor You must, to the extent practicable, remove from the Collective Work any credit as required by clause 4(b), as requested. If You create a Derivative Work, upon notice from any Licensor You must, to the extent practicable, remove from the Derivative Work any credit as required by clause 4(b), as requested. + If you distribute, publicly display, publicly perform, or publicly digitally perform the Work or any Derivative Works or Collective Works, You must keep intact all copyright notices for the Work and provide, reasonable to the medium or means You are utilizing: (i) the name of the Original Author (or pseudonym, if applicable) if supplied, and/or (ii) if the Original Author and/or Licensor designate another party or parties (e.g. a sponsor institute, publishing entity, journal) for attribution in Licensor's copyright notice, terms of service or by other reasonable means, the name of such party or parties; the title of the Work if supplied; to the extent reasonably practicable, the Uniform Resource Identifier, if any, that Licensor specifies to be associated with the Work, unless such URI does not refer to the copyright notice or licensing information for the Work; and in the case of a Derivative Work, a credit identifying the use of the Work in the Derivative Work (e.g., "French translation of the Work by Original Author," or "Screenplay based on original Work by Original Author"). Such credit may be implemented in any reasonable manner; provided, however, that in the case of a Derivative Work or Collective Work, at a minimum such credit will appear where any other comparable authorship credit appears and in a manner at least as prominent as such other comparable authorship credit. + +5. Representations, Warranties and Disclaimer + +UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU. + +6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +7. Termination + + This License and the rights granted hereunder will terminate automatically upon any breach by You of the terms of this License. Individuals or entities who have received Derivative Works or Collective Works from You under this License, however, will not have their licenses terminated provided such individuals or entities remain in full compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of this License. + Subject to the above terms and conditions, the license granted here is perpetual (for the duration of the applicable copyright in the Work). Notwithstanding the above, Licensor reserves the right to release the Work under different license terms or to stop distributing the Work at any time; provided, however that any such election will not serve to withdraw this License (or any other license that has been, or is required to be, granted under the terms of this License), and this License will continue in full force and effect unless terminated as stated above. + +8. Miscellaneous + + Each time You distribute or publicly digitally perform the Work or a Collective Work, the Licensor offers to the recipient a license to the Work on the same terms and conditions as the license granted to You under this License. + Each time You distribute or publicly digitally perform a Derivative Work, Licensor offers to the recipient a license to the original Work on the same terms and conditions as the license granted to You under this License. + If any provision of this License is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this License, and without further action by the parties to this agreement, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. + No term or provision of this License shall be deemed waived and no breach consented to unless such waiver or consent shall be in writing and signed by the party to be charged with such waiver or consent. + This License constitutes the entire agreement between the parties with respect to the Work licensed here. There are no understandings, agreements or representations with respect to the Work not specified here. Licensor shall not be bound by any additional provisions that may appear in any communication from You. This License may not be modified without the mutual written agreement of the Licensor and You. diff --git a/plugins/repository-gcs/licenses/httpcore-NOTICE.txt b/plugins/repository-gcs/licenses/httpcore-NOTICE.txt new file mode 100644 index 00000000000..c0be50a505e --- /dev/null +++ b/plugins/repository-gcs/licenses/httpcore-NOTICE.txt @@ -0,0 +1,8 @@ +Apache HttpComponents Core +Copyright 2005-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +This project contains annotations derived from JCIP-ANNOTATIONS +Copyright (c) 2005 Brian Goetz and Tim Peierls. See http://www.jcip.net diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java new file mode 100644 index 00000000000..d8117180ce3 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore.gcs; + +import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStoreException; +import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; +import org.elasticsearch.common.bytes.BytesReference; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + + +public class GoogleCloudStorageBlobContainer extends AbstractBlobContainer { + + private final GoogleCloudStorageBlobStore blobStore; + private final String path; + + GoogleCloudStorageBlobContainer(BlobPath path, GoogleCloudStorageBlobStore blobStore) { + super(path); + this.blobStore = blobStore; + + String keyPath = path.buildAsString("/"); + // TODO Move this keyPath logic to the buildAsString() method + if (!keyPath.isEmpty()) { + keyPath = keyPath + "/"; + } + this.path = keyPath; + } + + @Override + public boolean blobExists(String blobName) { + try { + return blobStore.blobExists(buildKey(blobName)); + } catch (Exception e) { + throw new BlobStoreException("Failed to check if blob [" + blobName + "] exists", e); + } + } + + @Override + public Map listBlobs() throws IOException { + return blobStore.listBlobs(path); + } + + @Override + public Map listBlobsByPrefix(String prefix) throws IOException { + return blobStore.listBlobsByPrefix(path, prefix); + } + + @Override + public InputStream readBlob(String blobName) throws IOException { + return blobStore.readBlob(buildKey(blobName)); + } + + @Override + public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + blobStore.writeBlob(buildKey(blobName), inputStream, blobSize); + } + + @Override + public void writeBlob(String blobName, BytesReference bytes) throws IOException { + writeBlob(blobName, bytes.streamInput(), bytes.length()); + } + + @Override + public void deleteBlob(String blobName) throws IOException { + blobStore.deleteBlob(buildKey(blobName)); + } + + @Override + public void deleteBlobsByPrefix(String prefix) throws IOException { + blobStore.deleteBlobsByPrefix(buildKey(prefix)); + } + + @Override + public void deleteBlobs(Collection blobNames) throws IOException { + blobStore.deleteBlobs(buildKeys(blobNames)); + } + + @Override + public void move(String sourceBlobName, String targetBlobName) throws IOException { + blobStore.moveBlob(buildKey(sourceBlobName), buildKey(targetBlobName)); + } + + protected String buildKey(String blobName) { + assert blobName != null; + return path + blobName; + } + + protected Set buildKeys(Collection blobNames) { + Set keys = new HashSet<>(); + if (blobNames != null) { + keys.addAll(blobNames.stream().map(this::buildKey).collect(Collectors.toList())); + } + return keys; + } +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java new file mode 100644 index 00000000000..7bf79494440 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java @@ -0,0 +1,432 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore.gcs; + +import com.google.api.client.googleapis.batch.BatchRequest; +import com.google.api.client.googleapis.batch.json.JsonBatchCallback; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.api.client.http.HttpHeaders; +import com.google.api.client.http.InputStreamContent; +import com.google.api.services.storage.Storage; +import com.google.api.services.storage.model.Bucket; +import com.google.api.services.storage.model.Objects; +import com.google.api.services.storage.model.StorageObject; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.BlobStoreException; +import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.CountDown; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Spliterator; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; + +public class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore { + + /** + * Google Cloud Storage batch requests are limited to 1000 operations + **/ + private static final int MAX_BATCHING_REQUESTS = 999; + + private final Storage client; + private final String bucket; + + public GoogleCloudStorageBlobStore(Settings settings, String bucket, Storage storageClient) { + super(settings); + this.bucket = bucket; + this.client = storageClient; + + if (doesBucketExist(bucket) == false) { + throw new BlobStoreException("Bucket [" + bucket + "] does not exist"); + } + } + + @Override + public BlobContainer blobContainer(BlobPath path) { + return new GoogleCloudStorageBlobContainer(path, this); + } + + @Override + public void delete(BlobPath path) throws IOException { + String keyPath = path.buildAsString("/"); + // TODO Move this keyPath logic to the buildAsString() method + if (!keyPath.isEmpty()) { + keyPath = keyPath + "/"; + } + deleteBlobsByPrefix(keyPath); + } + + @Override + public void close() { + } + + /** + * Return true if the given bucket exists + * + * @param bucketName name of the bucket + * @return true if the bucket exists, false otherwise + */ + boolean doesBucketExist(String bucketName) { + try { + return doPrivileged(() -> { + try { + Bucket bucket = client.buckets().get(bucketName).execute(); + if (bucket != null) { + return Strings.hasText(bucket.getId()); + } + } catch (GoogleJsonResponseException e) { + GoogleJsonError error = e.getDetails(); + if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { + return false; + } + throw e; + } + return false; + }); + } catch (IOException e) { + throw new BlobStoreException("Unable to check if bucket [" + bucketName + "] exists", e); + } + } + + /** + * List all blobs in the bucket + * + * @param path base path of the blobs to list + * @return a map of blob names and their metadata + */ + Map listBlobs(String path) throws IOException { + return doPrivileged(() -> listBlobsByPath(bucket, path, path)); + } + + /** + * List all blobs in the bucket which have a prefix + * + * @param path base path of the blobs to list + * @param prefix prefix of the blobs to list + * @return a map of blob names and their metadata + */ + Map listBlobsByPrefix(String path, String prefix) throws IOException { + return doPrivileged(() -> listBlobsByPath(bucket, buildKey(path, prefix), path)); + } + + /** + * Lists all blobs in a given bucket + * + * @param bucketName name of the bucket + * @param path base path of the blobs to list + * @param pathToRemove if true, this path part is removed from blob name + * @return a map of blob names and their metadata + */ + private Map listBlobsByPath(String bucketName, String path, String pathToRemove) throws IOException { + return blobsStream(client, bucketName, path, MAX_BATCHING_REQUESTS) + .map(new BlobMetaDataConverter(pathToRemove)) + .collect(Collectors.toMap(PlainBlobMetaData::name, Function.identity())); + } + + /** + * Returns true if the blob exists in the bucket + * + * @param blobName name of the blob + * @return true if the blob exists, false otherwise + */ + boolean blobExists(String blobName) throws IOException { + return doPrivileged(() -> { + try { + StorageObject blob = client.objects().get(bucket, blobName).execute(); + if (blob != null) { + return Strings.hasText(blob.getId()); + } + } catch (GoogleJsonResponseException e) { + GoogleJsonError error = e.getDetails(); + if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { + return false; + } + throw e; + } + return false; + }); + } + + /** + * Returns an {@link java.io.InputStream} for a given blob + * + * @param blobName name of the blob + * @return an InputStream + */ + InputStream readBlob(String blobName) throws IOException { + return doPrivileged(() -> { + try { + Storage.Objects.Get object = client.objects().get(bucket, blobName); + return object.executeMediaAsInputStream(); + } catch (GoogleJsonResponseException e) { + GoogleJsonError error = e.getDetails(); + if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { + throw new FileNotFoundException(e.getMessage()); + } + throw e; + } + }); + } + + /** + * Writes a blob in the bucket. + * + * @param inputStream content of the blob to be written + * @param blobSize expected size of the blob to be written + */ + void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + doPrivileged(() -> { + InputStreamContent stream = new InputStreamContent(null, inputStream); + stream.setLength(blobSize); + + Storage.Objects.Insert insert = client.objects().insert(bucket, null, stream); + insert.setName(blobName); + insert.execute(); + return null; + }); + } + + /** + * Deletes a blob in the bucket + * + * @param blobName name of the blob + */ + void deleteBlob(String blobName) throws IOException { + doPrivileged(() -> client.objects().delete(bucket, blobName).execute()); + } + + /** + * Deletes multiple blobs in the bucket that have a given prefix + * + * @param prefix prefix of the buckets to delete + */ + void deleteBlobsByPrefix(String prefix) throws IOException { + doPrivileged(() -> { + deleteBlobs(listBlobsByPath(bucket, prefix, null).keySet()); + return null; + }); + } + + /** + * Deletes multiple blobs in the given bucket (uses a batch request to perform this) + * + * @param blobNames names of the bucket to delete + */ + void deleteBlobs(Collection blobNames) throws IOException { + if (blobNames == null || blobNames.isEmpty()) { + return; + } + + if (blobNames.size() == 1) { + deleteBlob(blobNames.iterator().next()); + return; + } + + doPrivileged(() -> { + final List deletions = new ArrayList<>(); + final Iterator blobs = blobNames.iterator(); + + while (blobs.hasNext()) { + // Create a delete request for each blob to delete + deletions.add(client.objects().delete(bucket, blobs.next())); + + if (blobs.hasNext() == false || deletions.size() == MAX_BATCHING_REQUESTS) { + try { + // Deletions are executed using a batch request + BatchRequest batch = client.batch(); + + // Used to track successful deletions + CountDown countDown = new CountDown(deletions.size()); + + for (Storage.Objects.Delete delete : deletions) { + // Queue the delete request in batch + delete.queue(batch, new JsonBatchCallback() { + @Override + public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { + logger.error("failed to delete blob [{}] in bucket [{}]: {}", delete.getObject(), delete.getBucket(), e + .getMessage()); + } + + @Override + public void onSuccess(Void aVoid, HttpHeaders responseHeaders) throws IOException { + countDown.countDown(); + } + }); + } + + batch.execute(); + + if (countDown.isCountedDown() == false) { + throw new IOException("Failed to delete all [" + deletions.size() + "] blobs"); + } + } finally { + deletions.clear(); + } + } + } + return null; + }); + } + + /** + * Moves a blob within the same bucket + * + * @param sourceBlob name of the blob to move + * @param targetBlob new name of the blob in the target bucket + */ + void moveBlob(String sourceBlob, String targetBlob) throws IOException { + doPrivileged(() -> { + // There's no atomic "move" in GCS so we need to copy and delete + client.objects().copy(bucket, sourceBlob, bucket, targetBlob, null).execute(); + client.objects().delete(bucket, sourceBlob).execute(); + return null; + }); + } + + /** + * Executes a {@link PrivilegedExceptionAction} with privileges enabled. + */ + T doPrivileged(PrivilegedExceptionAction operation) throws IOException { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + try { + return AccessController.doPrivileged((PrivilegedExceptionAction) operation::run); + } catch (PrivilegedActionException e) { + throw (IOException) e.getException(); + } + } + + private String buildKey(String keyPath, String s) { + assert s != null; + return keyPath + s; + } + + /** + * Converts a {@link StorageObject} to a {@link PlainBlobMetaData} + */ + class BlobMetaDataConverter implements Function { + + private final String pathToRemove; + + BlobMetaDataConverter(String pathToRemove) { + this.pathToRemove = pathToRemove; + } + + @Override + public PlainBlobMetaData apply(StorageObject storageObject) { + String blobName = storageObject.getName(); + if (Strings.hasLength(pathToRemove)) { + blobName = blobName.substring(pathToRemove.length()); + } + return new PlainBlobMetaData(blobName, storageObject.getSize().longValue()); + } + } + + /** + * Spliterator can be used to list storage objects stored in a bucket. + */ + static class StorageObjectsSpliterator implements Spliterator { + + private final Storage.Objects.List list; + + StorageObjectsSpliterator(Storage client, String bucketName, String prefix, long pageSize) throws IOException { + list = client.objects().list(bucketName); + list.setMaxResults(pageSize); + if (prefix != null) { + list.setPrefix(prefix); + } + } + + @Override + public boolean tryAdvance(Consumer action) { + try { + // Retrieves the next page of items + Objects objects = list.execute(); + + if ((objects == null) || (objects.getItems() == null) || (objects.getItems().isEmpty())) { + return false; + } + + // Consumes all the items + objects.getItems().forEach(action::accept); + + // Sets the page token of the next page, + // null indicates that all items have been consumed + String next = objects.getNextPageToken(); + if (next != null) { + list.setPageToken(next); + return true; + } + + return false; + } catch (Exception e) { + throw new BlobStoreException("Exception while listing objects", e); + } + } + + @Override + public Spliterator trySplit() { + return null; + } + + @Override + public long estimateSize() { + return Long.MAX_VALUE; + } + + @Override + public int characteristics() { + return 0; + } + } + + /** + * Returns a {@link Stream} of {@link StorageObject}s that are stored in a given bucket. + */ + static Stream blobsStream(Storage client, String bucketName, String prefix, long pageSize) throws IOException { + return StreamSupport.stream(new StorageObjectsSpliterator(client, bucketName, prefix, pageSize), false); + } + +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStorageModule.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStorageModule.java new file mode 100644 index 00000000000..8a4bf88ed74 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStorageModule.java @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.repository.gcs; + +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.repositories.gcs.GoogleCloudStorageService; + +public class GoogleCloudStorageModule extends AbstractModule { + + @Override + protected void configure() { + bind(GoogleCloudStorageService.class).to(GoogleCloudStorageService.InternalGoogleCloudStorageService.class).asEagerSingleton(); + } +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java new file mode 100644 index 00000000000..477a083fad5 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.repository.gcs; + +import com.google.api.client.auth.oauth2.TokenRequest; +import com.google.api.client.auth.oauth2.TokenResponse; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.http.GenericUrl; +import com.google.api.client.http.HttpHeaders; +import com.google.api.client.json.GenericJson; +import com.google.api.client.json.webtoken.JsonWebSignature; +import com.google.api.client.json.webtoken.JsonWebToken; +import com.google.api.client.util.ClassInfo; +import com.google.api.client.util.Data; +import com.google.api.services.storage.Storage; +import com.google.api.services.storage.model.Bucket; +import com.google.api.services.storage.model.Objects; +import com.google.api.services.storage.model.StorageObject; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.RepositoriesModule; +import org.elasticsearch.repositories.gcs.GoogleCloudStorageRepository; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Collection; +import java.util.Collections; + +public class GoogleCloudStoragePlugin extends Plugin { + + public static final String NAME = "repository-gcs"; + + static { + /* + * Google HTTP client changes access levels because its silly and we + * can't allow that on any old stack stack so we pull it here, up front, + * so we can cleanly check the permissions for it. Without this changing + * the permission can fail if any part of core is on the stack because + * our plugin permissions don't allow core to "reach through" plugins to + * change the permission. Because that'd be silly. + */ + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + AccessController.doPrivileged((PrivilegedAction) () -> { + // ClassInfo put in cache all the fields of a given class + // that are annoted with @Key; at the same time it changes + // the field access level using setAccessible(). Calling + // them here put the ClassInfo in cache (they are never evicted) + // before the SecurityManager is installed. + ClassInfo.of(HttpHeaders.class, true); + + ClassInfo.of(JsonWebSignature.Header.class, false); + ClassInfo.of(JsonWebToken.Payload.class, false); + + ClassInfo.of(TokenRequest.class, false); + ClassInfo.of(TokenResponse.class, false); + + ClassInfo.of(GenericJson.class, false); + ClassInfo.of(GenericUrl.class, false); + + Data.nullOf(GoogleJsonError.ErrorInfo.class); + ClassInfo.of(GoogleJsonError.class, false); + + Data.nullOf(Bucket.Cors.class); + ClassInfo.of(Bucket.class, false); + ClassInfo.of(Bucket.Cors.class, false); + ClassInfo.of(Bucket.Lifecycle.class, false); + ClassInfo.of(Bucket.Logging.class, false); + ClassInfo.of(Bucket.Owner.class, false); + ClassInfo.of(Bucket.Versioning.class, false); + ClassInfo.of(Bucket.Website.class, false); + + ClassInfo.of(StorageObject.class, false); + ClassInfo.of(StorageObject.Owner.class, false); + + ClassInfo.of(Objects.class, false); + + ClassInfo.of(Storage.Buckets.Get.class, false); + ClassInfo.of(Storage.Buckets.Insert.class, false); + + ClassInfo.of(Storage.Objects.Get.class, false); + ClassInfo.of(Storage.Objects.Insert.class, false); + ClassInfo.of(Storage.Objects.Delete.class, false); + ClassInfo.of(Storage.Objects.Copy.class, false); + ClassInfo.of(Storage.Objects.List.class, false); + + return null; + }); + } + + @Override + public String name() { + return NAME; + } + + @Override + public String description() { + return "Google Cloud Storage Repository Plugin"; + } + + @Override + public Collection nodeModules() { + return Collections.singletonList(new GoogleCloudStorageModule()); + } + + public void onModule(RepositoriesModule repositoriesModule) { + repositoriesModule.registerRepository(GoogleCloudStorageRepository.TYPE, + GoogleCloudStorageRepository.class, BlobStoreIndexShardRepository.class); + } +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java new file mode 100644 index 00000000000..337fbcf8d72 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -0,0 +1,154 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.gcs; + +import com.google.api.services.storage.Storage; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.gcs.GoogleCloudStorageBlobStore; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.snapshots.IndexShardRepository; +import org.elasticsearch.plugin.repository.gcs.GoogleCloudStoragePlugin; +import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.repositories.RepositoryName; +import org.elasticsearch.repositories.RepositorySettings; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; + +import java.util.function.Function; + +import static org.elasticsearch.common.settings.Setting.Property; +import static org.elasticsearch.common.settings.Setting.boolSetting; +import static org.elasticsearch.common.settings.Setting.byteSizeSetting; +import static org.elasticsearch.common.settings.Setting.simpleString; +import static org.elasticsearch.common.settings.Setting.timeSetting; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + +public class GoogleCloudStorageRepository extends BlobStoreRepository { + + public static final String TYPE = "gcs"; + + public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); + + public static final Setting BUCKET = + simpleString("bucket", Property.NodeScope, Property.Dynamic); + public static final Setting BASE_PATH = + simpleString("base_path", Property.NodeScope, Property.Dynamic); + public static final Setting COMPRESS = + boolSetting("compress", false, Property.NodeScope, Property.Dynamic); + public static final Setting CHUNK_SIZE = + byteSizeSetting("chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB), Property.NodeScope, Property.Dynamic); + public static final Setting APPLICATION_NAME = + new Setting<>("application_name", GoogleCloudStoragePlugin.NAME, Function.identity(), Property.NodeScope, Property.Dynamic); + public static final Setting SERVICE_ACCOUNT = + simpleString("service_account", Property.NodeScope, Property.Dynamic, Property.Filtered); + public static final Setting HTTP_READ_TIMEOUT = + timeSetting("http.read_timeout", NO_TIMEOUT, Property.NodeScope, Property.Dynamic); + public static final Setting HTTP_CONNECT_TIMEOUT = + timeSetting("http.connect_timeout", NO_TIMEOUT, Property.NodeScope, Property.Dynamic); + + private final ByteSizeValue chunkSize; + private final boolean compress; + private final BlobPath basePath; + private final GoogleCloudStorageBlobStore blobStore; + + @Inject + public GoogleCloudStorageRepository(RepositoryName repositoryName, RepositorySettings repositorySettings, + IndexShardRepository indexShardRepository, + GoogleCloudStorageService storageService) throws Exception { + super(repositoryName.getName(), repositorySettings, indexShardRepository); + + String bucket = get(BUCKET, repositoryName, repositorySettings); + String application = get(APPLICATION_NAME, repositoryName, repositorySettings); + String serviceAccount = get(SERVICE_ACCOUNT, repositoryName, repositorySettings); + + String basePath = BASE_PATH.get(repositorySettings.settings()); + if (Strings.hasLength(basePath)) { + BlobPath path = new BlobPath(); + for (String elem : basePath.split("/")) { + path = path.add(elem); + } + this.basePath = path; + } else { + this.basePath = BlobPath.cleanPath(); + } + + TimeValue connectTimeout = null; + TimeValue readTimeout = null; + + TimeValue timeout = HTTP_CONNECT_TIMEOUT.get(repositorySettings.settings()); + if ((timeout != null) && (timeout.millis() != NO_TIMEOUT.millis())) { + connectTimeout = timeout; + } + + timeout = HTTP_READ_TIMEOUT.get(repositorySettings.settings()); + if ((timeout != null) && (timeout.millis() != NO_TIMEOUT.millis())) { + readTimeout = timeout; + } + + this.compress = get(COMPRESS, repositoryName, repositorySettings); + this.chunkSize = get(CHUNK_SIZE, repositoryName, repositorySettings); + + logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}], application [{}]", + bucket, basePath, chunkSize, compress, application); + + Storage client = storageService.createClient(serviceAccount, application, connectTimeout, readTimeout); + this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, client); + } + + + @Override + protected BlobStore blobStore() { + return blobStore; + } + + @Override + protected BlobPath basePath() { + return basePath; + } + + @Override + protected boolean isCompress() { + return compress; + } + + @Override + protected ByteSizeValue chunkSize() { + return chunkSize; + } + + /** + * Get a given setting from the repository settings, throwing a {@link RepositoryException} if the setting does not exist or is empty. + */ + static T get(Setting setting, RepositoryName name, RepositorySettings repositorySettings) { + T value = setting.get(repositorySettings.settings()); + if (value == null) { + throw new RepositoryException(name.getName(), "Setting [" + setting.getKey() + "] is not defined for repository"); + } + if ((value instanceof String) && (Strings.hasText((String) value)) == false) { + throw new RepositoryException(name.getName(), "Setting [" + setting.getKey() + "] is empty for repository"); + } + return value; + } +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java new file mode 100644 index 00000000000..098ce5f1504 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -0,0 +1,176 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.gcs; + +import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; +import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; +import com.google.api.client.http.HttpBackOffIOExceptionHandler; +import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler; +import com.google.api.client.http.HttpIOExceptionHandler; +import com.google.api.client.http.HttpRequest; +import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpUnsuccessfulResponseHandler; +import com.google.api.client.http.javanet.NetHttpTransport; +import com.google.api.client.json.jackson2.JacksonFactory; +import com.google.api.client.util.ExponentialBackOff; +import com.google.api.services.storage.Storage; +import com.google.api.services.storage.StorageScopes; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; + +public interface GoogleCloudStorageService { + + /** + * Creates a client that can be used to manage Google Cloud Storage objects. + * + * @param serviceAccount path to service account file + * @param application name of the application + * @param connectTimeout connection timeout for HTTP requests + * @param readTimeout read timeout for HTTP requests + * @return a Client instance that can be used to manage objects + */ + Storage createClient(String serviceAccount, String application, TimeValue connectTimeout, TimeValue readTimeout) throws Exception; + + /** + * Default implementation + */ + class InternalGoogleCloudStorageService extends AbstractComponent implements GoogleCloudStorageService { + + private static final String DEFAULT = "_default_"; + + private final Environment environment; + + @Inject + public InternalGoogleCloudStorageService(Settings settings, Environment environment) { + super(settings); + this.environment = environment; + } + + @Override + public Storage createClient(String serviceAccount, String application, TimeValue connectTimeout, TimeValue readTimeout) + throws Exception { + try { + GoogleCredential credentials = (DEFAULT.equalsIgnoreCase(serviceAccount)) ? loadDefault() : loadCredentials(serviceAccount); + NetHttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport(); + + Storage.Builder storage = new Storage.Builder(httpTransport, JacksonFactory.getDefaultInstance(), + new DefaultHttpRequestInitializer(credentials, connectTimeout, readTimeout)); + storage.setApplicationName(application); + + logger.debug("initializing client with service account [{}/{}]", + credentials.getServiceAccountId(), credentials.getServiceAccountUser()); + return storage.build(); + } catch (IOException e) { + throw new ElasticsearchException("Error when loading Google Cloud Storage credentials file", e); + } + } + + /** + * HTTP request initializer that loads credentials from the service account file + * and manages authentication for HTTP requests + */ + private GoogleCredential loadCredentials(String serviceAccount) throws IOException { + if (serviceAccount == null) { + throw new ElasticsearchException("Cannot load Google Cloud Storage service account file from a null path"); + } + + Path account = environment.configFile().resolve(serviceAccount); + if (Files.exists(account) == false) { + throw new ElasticsearchException("Unable to find service account file [" + serviceAccount + + "] defined for repository"); + } + + try (InputStream is = Files.newInputStream(account)) { + GoogleCredential credential = GoogleCredential.fromStream(is); + if (credential.createScopedRequired()) { + credential = credential.createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); + } + return credential; + } + } + + /** + * HTTP request initializer that loads default credentials when running on Compute Engine + */ + private GoogleCredential loadDefault() throws IOException { + return GoogleCredential.getApplicationDefault(); + } + + /** + * HTTP request initializer that set timeouts and backoff handler while deferring authentication to GoogleCredential. + * See https://cloud.google.com/storage/transfer/create-client#retry + */ + class DefaultHttpRequestInitializer implements HttpRequestInitializer { + + private final TimeValue connectTimeout; + private final TimeValue readTimeout; + private final GoogleCredential credential; + private final HttpUnsuccessfulResponseHandler handler; + private final HttpIOExceptionHandler ioHandler; + + DefaultHttpRequestInitializer(GoogleCredential credential, TimeValue connectTimeout, TimeValue readTimeout) { + this.credential = credential; + this.connectTimeout = connectTimeout; + this.readTimeout = readTimeout; + this.handler = new HttpBackOffUnsuccessfulResponseHandler(newBackOff()); + this.ioHandler = new HttpBackOffIOExceptionHandler(newBackOff()); + } + + @Override + public void initialize(HttpRequest request) throws IOException { + if (connectTimeout != null) { + request.setConnectTimeout((int) connectTimeout.millis()); + } + if (readTimeout != null) { + request.setReadTimeout((int) readTimeout.millis()); + } + + request.setIOExceptionHandler(ioHandler); + request.setInterceptor(credential); + + request.setUnsuccessfulResponseHandler((req, resp, supportsRetry) -> { + // Let the credential handle the response. If it failed, we rely on our backoff handler + return credential.handleResponse(req, resp, supportsRetry) || handler.handleResponse(req, resp, supportsRetry); + } + ); + } + + private ExponentialBackOff newBackOff() { + return new ExponentialBackOff.Builder() + .setInitialIntervalMillis(100) + .setMaxIntervalMillis(6000) + .setMaxElapsedTimeMillis(900000) + .setMultiplier(1.5) + .setRandomizationFactor(0.5) + .build(); + } + } + } +} diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 00000000000..bc7acd60602 --- /dev/null +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,26 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +grant { + permission java.lang.RuntimePermission "accessDeclaredMembers"; + permission java.lang.RuntimePermission "setFactory"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + permission java.net.URLPermission "http://www.googleapis.com/*", "*"; + permission java.net.URLPermission "https://www.googleapis.com/*", "*"; +}; diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java new file mode 100644 index 00000000000..4fe8c718345 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore.gcs; + +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; + +import java.io.IOException; +import java.util.Locale; + +public class GoogleCloudStorageBlobStoreContainerTests extends ESBlobStoreContainerTestCase { + + @Override + protected BlobStore newBlobStore() throws IOException { + String bucket = randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, MockHttpTransport.newStorage(bucket, getTestName())); + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java new file mode 100644 index 00000000000..b5489466b51 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore.gcs; + +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.repositories.ESBlobStoreTestCase; + +import java.io.IOException; +import java.util.Locale; + +public class GoogleCloudStorageBlobStoreTests extends ESBlobStoreTestCase { + + @Override + protected BlobStore newBlobStore() throws IOException { + String bucket = randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, MockHttpTransport.newStorage(bucket, getTestName())); + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/MockHttpTransport.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/MockHttpTransport.java new file mode 100644 index 00000000000..196fcf12f87 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/MockHttpTransport.java @@ -0,0 +1,432 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore.gcs; + +import com.google.api.client.http.HttpTransport; +import com.google.api.client.http.LowLevelHttpRequest; +import com.google.api.client.http.LowLevelHttpResponse; +import com.google.api.client.json.Json; +import com.google.api.client.json.jackson2.JacksonFactory; +import com.google.api.client.testing.http.MockLowLevelHttpRequest; +import com.google.api.client.testing.http.MockLowLevelHttpResponse; +import com.google.api.services.storage.Storage; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.path.PathTrie; +import org.elasticsearch.common.util.Callback; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.support.RestUtils; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + +/** + * Mock for {@link HttpTransport} to test Google Cloud Storage service. + *

+ * This basically handles each type of request used by the {@link GoogleCloudStorageBlobStore} and provides appropriate responses like + * the Google Cloud Storage service would do. It is largely based on official documentation available at https://cloud.google + * .com/storage/docs/json_api/v1/. + */ +public class MockHttpTransport extends com.google.api.client.testing.http.MockHttpTransport { + + private final AtomicInteger objectsCount = new AtomicInteger(0); + private final Map objectsNames = ConcurrentCollections.newConcurrentMap(); + private final Map objectsContent = ConcurrentCollections.newConcurrentMap(); + + private final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER); + + public MockHttpTransport(String bucket) { + + // GET Bucket + // + // https://cloud.google.com/storage/docs/json_api/v1/buckets/get + handlers.insert("GET https://www.googleapis.com/storage/v1/b/{bucket}", (url, params, req) -> { + String name = params.get("bucket"); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "bucket name is missing"); + } + + if (name.equals(bucket)) { + return newMockResponse().setContent(buildBucketResource(bucket)); + } else { + return newMockError(RestStatus.NOT_FOUND, "bucket not found"); + } + }); + + // GET Object + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/get + handlers.insert("GET https://www.googleapis.com/storage/v1/b/{bucket}/o/{object}", (url, params, req) -> { + String name = params.get("object"); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + + for (Map.Entry object : objectsNames.entrySet()) { + if (object.getValue().equals(name)) { + byte[] content = objectsContent.get(object.getKey()); + if (content != null) { + return newMockResponse().setContent(buildObjectResource(bucket, name, object.getKey(), content.length)); + } + } + } + return newMockError(RestStatus.NOT_FOUND, "object not found"); + }); + + // Download Object + // + // https://cloud.google.com/storage/docs/request-endpoints + handlers.insert("GET https://www.googleapis.com/download/storage/v1/b/{bucket}/o/{object}", (url, params, req) -> { + String name = params.get("object"); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + + for (Map.Entry object : objectsNames.entrySet()) { + if (object.getValue().equals(name)) { + byte[] content = objectsContent.get(object.getKey()); + if (content == null) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "object content is missing"); + } + return newMockResponse().setContent(new ByteArrayInputStream(content)); + } + } + return newMockError(RestStatus.NOT_FOUND, "object not found"); + }); + + // Insert Object (initialization) + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/insert + handlers.insert("POST https://www.googleapis.com/upload/storage/v1/b/{bucket}/o", (url, params, req) -> { + if ("resumable".equals(params.get("uploadType")) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "upload type must be resumable"); + } + + String name = params.get("name"); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + + String objectId = String.valueOf(objectsCount.getAndIncrement()); + objectsNames.put(objectId, name); + + return newMockResponse() + .setStatusCode(RestStatus.CREATED.getStatus()) + .addHeader("Location", "https://www.googleapis.com/upload/storage/v1/b/" + bucket + + "/o?uploadType=resumable&upload_id=" + objectId); + }); + + // Insert Object (upload) + // + // https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload + handlers.insert("PUT https://www.googleapis.com/upload/storage/v1/b/{bucket}/o", (url, params, req) -> { + String objectId = params.get("upload_id"); + if (Strings.hasText(objectId) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "upload id is missing"); + } + + String name = objectsNames.get(objectId); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.NOT_FOUND, "object name not found"); + } + + ByteArrayOutputStream os = new ByteArrayOutputStream((int) req.getContentLength()); + try { + req.getStreamingContent().writeTo(os); + os.close(); + } catch (IOException e) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage()); + } + + byte[] content = os.toByteArray(); + objectsContent.put(objectId, content); + return newMockResponse().setContent(buildObjectResource(bucket, name, objectId, content.length)); + }); + + // List Objects + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/list + handlers.insert("GET https://www.googleapis.com/storage/v1/b/{bucket}/o", (url, params, req) -> { + String prefix = params.get("prefix"); + + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + builder.field("kind", "storage#objects"); + builder.startArray("items"); + for (Map.Entry o : objectsNames.entrySet()) { + if (prefix != null && o.getValue().startsWith(prefix) == false) { + continue; + } + buildObjectResource(builder, bucket, o.getValue(), o.getKey(), objectsContent.get(o.getKey()).length); + } + builder.endArray(); + builder.endObject(); + return newMockResponse().setContent(builder.string()); + } catch (IOException e) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage()); + } + }); + + // Delete Object + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/delete + handlers.insert("DELETE https://www.googleapis.com/storage/v1/b/{bucket}/o/{object}", (url, params, req) -> { + String name = params.get("object"); + if (Strings.hasText(name) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + + String objectId = null; + for (Map.Entry object : objectsNames.entrySet()) { + if (object.getValue().equals(name)) { + objectId = object.getKey(); + break; + } + } + + if (objectId != null) { + objectsNames.remove(objectId); + objectsContent.remove(objectId); + return newMockResponse().setStatusCode(RestStatus.NO_CONTENT.getStatus()); + } + return newMockError(RestStatus.NOT_FOUND, "object not found"); + }); + + // Copy Object + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/copy + handlers.insert("POST https://www.googleapis.com/storage/v1/b/{srcBucket}/o/{srcObject}/copyTo/b/{destBucket}/o/{destObject}", + (url, params, req) -> { + String source = params.get("srcObject"); + if (Strings.hasText(source) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); + } + + String dest = params.get("destObject"); + if (Strings.hasText(dest) == false) { + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "destination object name is missing"); + } + + String srcObjectId = null; + for (Map.Entry object : objectsNames.entrySet()) { + if (object.getValue().equals(source)) { + srcObjectId = object.getKey(); + break; + } + } + + if (srcObjectId == null) { + return newMockError(RestStatus.NOT_FOUND, "source object not found"); + } + + byte[] content = objectsContent.get(srcObjectId); + if (content == null) { + return newMockError(RestStatus.NOT_FOUND, "source content can not be found"); + } + + String destObjectId = String.valueOf(objectsCount.getAndIncrement()); + objectsNames.put(destObjectId, dest); + objectsContent.put(destObjectId, content); + + return newMockResponse().setContent(buildObjectResource(bucket, dest, destObjectId, content.length)); + }); + + // Batch + // + // https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch + handlers.insert("POST https://www.googleapis.com/batch", (url, params, req) -> { + List responses = new ArrayList<>(); + + // A batch request body looks like this: + // + // --__END_OF_PART__ + // Content-Length: 71 + // Content-Type: application/http + // content-id: 1 + // content-transfer-encoding: binary + // + // DELETE https://www.googleapis.com/storage/v1/b/ohifkgu/o/foo%2Ftest + // + // + // --__END_OF_PART__ + // Content-Length: 71 + // Content-Type: application/http + // content-id: 2 + // content-transfer-encoding: binary + // + // DELETE https://www.googleapis.com/storage/v1/b/ohifkgu/o/bar%2Ftest + // + // + // --__END_OF_PART__-- + + // Here we simply process the request body line by line and delegate to other handlers + // if possible. + try (ByteArrayOutputStream os = new ByteArrayOutputStream((int) req.getContentLength())) { + req.getStreamingContent().writeTo(os); + + Streams.readAllLines(new ByteArrayInputStream(os.toByteArray()), new Callback() { + @Override + public void handle(String line) { + Handler handler = handlers.retrieve(line, params); + if (handler != null) { + try { + responses.add(handler.execute(line, params, req)); + } catch (IOException e) { + responses.add(newMockError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } + } + } + }); + } + + // Now we can build the response + String boundary = "__END_OF_PART__"; + String sep = "--"; + String line = "\r\n"; + + StringBuilder builder = new StringBuilder(); + for (MockLowLevelHttpResponse resp : responses) { + builder.append(sep).append(boundary).append(line); + builder.append(line); + builder.append("HTTP/1.1 ").append(resp.getStatusCode()).append(' ').append(resp.getReasonPhrase()).append(line); + builder.append("Content-Length: ").append(resp.getContentLength()).append(line); + builder.append(line); + } + builder.append(line); + builder.append(sep).append(boundary).append(sep); + + return newMockResponse().setContentType("multipart/mixed; boundary=" + boundary).setContent(builder.toString()); + }); + } + + @Override + public LowLevelHttpRequest buildRequest(String method, String url) throws IOException { + return new MockLowLevelHttpRequest() { + @Override + public LowLevelHttpResponse execute() throws IOException { + String rawPath = url; + Map params = new HashMap<>(); + + int pathEndPos = url.indexOf('?'); + if (pathEndPos != -1) { + rawPath = url.substring(0, pathEndPos); + RestUtils.decodeQueryString(url, pathEndPos + 1, params); + } + + Handler handler = handlers.retrieve(method + " " + rawPath, params); + if (handler != null) { + return handler.execute(rawPath, params, this); + } + return newMockError(RestStatus.INTERNAL_SERVER_ERROR, "Unable to handle request [method=" + method + ", url=" + url + "]"); + } + }; + } + + private static MockLowLevelHttpResponse newMockResponse() { + return new MockLowLevelHttpResponse() + .setContentType(Json.MEDIA_TYPE) + .setStatusCode(RestStatus.OK.getStatus()) + .setReasonPhrase(RestStatus.OK.name()); + } + + private static MockLowLevelHttpResponse newMockError(RestStatus status, String message) { + MockLowLevelHttpResponse response = newMockResponse().setStatusCode(status.getStatus()).setReasonPhrase(status.name()); + try { + response.setContent(buildErrorResource(status, message)); + } catch (IOException e) { + response.setContent("Failed to build error resource [" + message + "] because of: " + e.getMessage()); + } + return response; + } + + /** + * Storage Error JSON representation + */ + private static String buildErrorResource(RestStatus status, String message) throws IOException { + return jsonBuilder() + .startObject() + .startObject("error") + .field("code", status.getStatus()) + .field("message", message) + .startArray("errors") + .startObject() + .field("domain", "global") + .field("reason", status.toString()) + .field("message", message) + .endObject() + .endArray() + .endObject() + .endObject() + .string(); + } + + /** + * Storage Bucket JSON representation as defined in + * https://cloud.google.com/storage/docs/json_api/v1/bucket#resource + */ + private static String buildBucketResource(String name) throws IOException { + return jsonBuilder().startObject() + .field("kind", "storage#bucket") + .field("id", name) + .endObject() + .string(); + } + + /** + * Storage Object JSON representation as defined in + * https://cloud.google.com/storage/docs/json_api/v1/objects#resource + */ + private static XContentBuilder buildObjectResource(XContentBuilder builder, String bucket, String name, String id, int size) + throws IOException { + return builder.startObject() + .field("kind", "storage#object") + .field("id", String.join("/", bucket, name, id)) + .field("name", name) + .field("size", String.valueOf(size)) + .endObject(); + } + + private static String buildObjectResource(String bucket, String name, String id, int size) throws IOException { + return buildObjectResource(jsonBuilder(), bucket, name, id, size).string(); + } + + interface Handler { + MockLowLevelHttpResponse execute(String url, Map params, MockLowLevelHttpRequest request) throws IOException; + } + + /** + * Instanciates a mocked Storage client for tests. + */ + public static Storage newStorage(String bucket, String applicationName) { + return new Storage.Builder(new MockHttpTransport(bucket), new JacksonFactory(), null) + .setApplicationName(applicationName) + .build(); + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java new file mode 100644 index 00000000000..c5b57ba6cd6 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.gcs; + +import com.google.api.services.storage.Storage; +import org.elasticsearch.common.blobstore.gcs.MockHttpTransport; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.plugin.repository.gcs.GoogleCloudStorageModule; +import org.elasticsearch.plugin.repository.gcs.GoogleCloudStoragePlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.ESBlobStoreRepositoryIntegTestCase; +import org.junit.BeforeClass; + +import java.util.Collection; +import java.util.Collections; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + +public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCase { + + private static final String BUCKET = "gcs-repository-test"; + + // Static storage client shared among all nodes in order to act like a remote repository service: + // all nodes must see the same content + private static final AtomicReference storage = new AtomicReference<>(); + + @Override + protected Collection> nodePlugins() { + return pluginList(MockGoogleCloudStoragePlugin.class); + } + + @Override + protected void createTestRepository(String name) { + assertAcked(client().admin().cluster().preparePutRepository(name) + .setType(GoogleCloudStorageRepository.TYPE) + .setSettings(Settings.builder() + .put("bucket", BUCKET) + .put("base_path", GoogleCloudStorageBlobStoreRepositoryTests.class.getSimpleName()) + .put("service_account", "_default_") + .put("compress", randomBoolean()) + .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); + } + + @BeforeClass + public static void setUpStorage() { + storage.set(MockHttpTransport.newStorage(BUCKET, GoogleCloudStorageBlobStoreRepositoryTests.class.getName())); + } + + public static class MockGoogleCloudStoragePlugin extends GoogleCloudStoragePlugin { + + public MockGoogleCloudStoragePlugin() { + } + + @Override + public Collection nodeModules() { + return Collections.singletonList(new MockGoogleCloudStorageModule()); + } + } + + public static class MockGoogleCloudStorageModule extends GoogleCloudStorageModule { + @Override + protected void configure() { + bind(GoogleCloudStorageService.class).to(MockGoogleCloudStorageService.class).asEagerSingleton(); + } + } + + public static class MockGoogleCloudStorageService implements GoogleCloudStorageService { + + @Override + public Storage createClient(String serviceAccount, String application, TimeValue connectTimeout, TimeValue readTimeout) throws + Exception { + return storage.get(); + } + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java new file mode 100644 index 00000000000..18862d05aa0 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepositoryRestIT.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.gcs; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; + +public class GoogleCloudStorageRepositoryRestIT extends ESRestTestCase { + + public GoogleCloudStorageRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return createParameters(0, 1); + } +} + diff --git a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yaml b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yaml new file mode 100644 index 00000000000..a37fb779549 --- /dev/null +++ b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yaml @@ -0,0 +1,13 @@ +# Integration tests for Repository GCS component +# +"Repository GCS loaded": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: repository-gcs } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index bd5cd499015..07fea76bd8b 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -289,6 +289,10 @@ fi install_and_check_plugin repository azure azure-storage-*.jar } +@test "[$GROUP] install repository-gcs plugin" { + install_and_check_plugin repository gcs google-api-services-storage-*.jar +} + @test "[$GROUP] install repository-s3 plugin" { install_and_check_plugin repository s3 aws-java-sdk-core-*.jar } @@ -387,6 +391,10 @@ fi remove_plugin repository-azure } +@test "[$GROUP] remove repository-gcs plugin" { + remove_plugin repository-gcs +} + @test "[$GROUP] remove repository-hdfs plugin" { remove_plugin repository-hdfs } diff --git a/settings.gradle b/settings.gradle index 3a8b0f66210..88217a9dde5 100644 --- a/settings.gradle +++ b/settings.gradle @@ -37,6 +37,7 @@ List projects = [ 'plugins:mapper-murmur3', 'plugins:mapper-size', 'plugins:repository-azure', + 'plugins:repository-gcs', 'plugins:repository-hdfs', 'plugins:repository-s3', 'plugins:jvm-example', diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java similarity index 94% rename from core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java rename to test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java index 291d00a8dde..8462cf007f0 100644 --- a/core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test; +package org.elasticsearch.repositories; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; @@ -25,6 +25,7 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.io.InputStream; @@ -32,9 +33,9 @@ import java.util.Arrays; import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.test.ESBlobStoreTestCase.writeRandomBlob; -import static org.elasticsearch.test.ESBlobStoreTestCase.randomBytes; -import static org.elasticsearch.test.ESBlobStoreTestCase.readBlobFully; +import static org.elasticsearch.repositories.ESBlobStoreTestCase.writeRandomBlob; +import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; +import static org.elasticsearch.repositories.ESBlobStoreTestCase.readBlobFully; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreRepositoryIntegTestCase.java similarity index 91% rename from core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java rename to test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreRepositoryIntegTestCase.java index dc49683de63..2ffd30fa470 100644 --- a/core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreRepositoryIntegTestCase.java @@ -16,13 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test; +package org.elasticsearch.repositories; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; import java.util.HashSet; @@ -59,7 +60,8 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase String snapshotName = randomAsciiName(); logger.info("--> create snapshot {}:{}", repoName, snapshotName); - assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName).setWaitForCompletion(true).setIndices(indexNames)); + assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName) + .setWaitForCompletion(true).setIndices(indexNames)); List deleteIndices = randomSubsetOf(randomIntBetween(0, indexCount), indexNames); if (deleteIndices.size() > 0) { @@ -99,6 +101,9 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase for (int i = 0; i < indexCount; i++) { assertHitCount(client().prepareSearch(indexNames[i]).setSize(0).get(), docCounts[i]); } + + logger.info("--> delete snapshot {}:{}", repoName, snapshotName); + assertAcked(client().admin().cluster().prepareDeleteSnapshot(repoName, snapshotName).get()); } public void testMultipleSnapshotAndRollback() throws Exception { @@ -130,7 +135,8 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase // Check number of documents in this iteration docCounts[i] = (int) client().prepareSearch(indexName).setSize(0).get().getHits().totalHits(); logger.info("--> create snapshot {}:{} with {} documents", repoName, snapshotName + "-" + i, docCounts[i]); - assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName + "-" + i).setWaitForCompletion(true).setIndices(indexName)); + assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName + "-" + i) + .setWaitForCompletion(true).setIndices(indexName)); } int restoreOperations = randomIntBetween(1, 3); @@ -142,10 +148,17 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase assertAcked(client().admin().indices().prepareClose(indexName)); logger.info("--> restore index from the snapshot"); - assertSuccessfulRestore(client().admin().cluster().prepareRestoreSnapshot(repoName, snapshotName + "-" + iterationToRestore).setWaitForCompletion(true)); + assertSuccessfulRestore(client().admin().cluster().prepareRestoreSnapshot(repoName, snapshotName + "-" + iterationToRestore) + .setWaitForCompletion(true)); + ensureGreen(); assertHitCount(client().prepareSearch(indexName).setSize(0).get(), docCounts[iterationToRestore]); } + + for (int i = 0; i < iterationCount; i++) { + logger.info("--> delete snapshot {}:{}", repoName, snapshotName + "-" + i); + assertAcked(client().admin().cluster().prepareDeleteSnapshot(repoName, snapshotName + "-" + i).get()); + } } protected void addRandomDocuments(String name, int numDocs) throws ExecutionException, InterruptedException { diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java similarity index 97% rename from core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java rename to test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java index 80432d628ef..be7431795b2 100644 --- a/core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java @@ -16,12 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test; +package org.elasticsearch.repositories; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.test.ESTestCase; import org.junit.Test; import java.io.IOException; From fc6df23feab09c55c93b27e727ef78ba772f5586 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Thu, 19 May 2016 09:24:45 -0400 Subject: [PATCH 26/36] Rename AggregatorBuilder and all of its subclasses to AggregationBuilder, in keeping consistent with the Java APIs. Closes #18377 Closes #18367 --- .../percolate/PercolateRequestBuilder.java | 6 +- .../percolate/PercolateSourceBuilder.java | 8 +- .../action/search/SearchRequestBuilder.java | 4 +- .../elasticsearch/search/SearchModule.java | 170 +++++++-------- ...orBuilder.java => AggregationBuilder.java} | 14 +- .../aggregations/AggregationBuilders.java | 202 +++++++++--------- .../search/aggregations/Aggregator.java | 4 +- .../aggregations/AggregatorFactories.java | 50 ++--- .../aggregations/AggregatorParsers.java | 2 +- ...r.java => ChildrenAggregationBuilder.java} | 16 +- ...der.java => FilterAggregationBuilder.java} | 17 +- ...er.java => FiltersAggregationBuilder.java} | 28 +-- ...er.java => GeoGridAggregationBuilder.java} | 16 +- .../bucket/geogrid/GeoHashGridAggregator.java | 4 +- .../geogrid/GeoHashGridAggregatorFactory.java | 2 +- .../bucket/geogrid/GeoHashGridParser.java | 4 +- ...der.java => GlobalAggregationBuilder.java} | 14 +- .../histogram/AbstractHistogramBuilder.java | 6 +- ...a => DateHistogramAggregationBuilder.java} | 18 +- .../bucket/histogram/DateHistogramParser.java | 8 +- ....java => HistogramAggregationBuilder.java} | 8 +- .../bucket/histogram/HistogramParser.java | 2 +- ...er.java => MissingAggregationBuilder.java} | 10 +- .../bucket/missing/MissingParser.java | 6 +- ...der.java => NestedAggregationBuilder.java} | 16 +- ...a => ReverseNestedAggregationBuilder.java} | 18 +- .../bucket/range/AbstractRangeBuilder.java | 6 +- ...lder.java => RangeAggregationBuilder.java} | 20 +- .../bucket/range/RangeParser.java | 2 +- ....java => DateRangeAggregationBuilder.java} | 42 ++-- .../bucket/range/date/DateRangeParser.java | 6 +- ...ava => GeoDistanceAggregationBuilder.java} | 36 ++-- .../range/geodistance/GeoDistanceParser.java | 6 +- ...er.java => IpRangeAggregationBuilder.java} | 34 +-- .../bucket/range/ip/IpRangeParser.java | 8 +- ...ava => DiversifiedAggregationBuilder.java} | 20 +- .../sampler/DiversifiedSamplerParser.java | 6 +- ...er.java => SamplerAggregationBuilder.java} | 18 +- ...> SignificantTermsAggregationBuilder.java} | 32 +-- .../SignificantTermsAggregatorFactory.java | 2 +- .../significant/SignificantTermsParser.java | 22 +- .../significant/UnmappedSignificantTerms.java | 2 +- .../bucket/terms/AbstractTermsParser.java | 20 +- ...lder.java => TermsAggregationBuilder.java} | 34 +-- .../bucket/terms/TermsAggregator.java | 8 +- .../bucket/terms/TermsAggregatorFactory.java | 2 +- .../bucket/terms/TermsParser.java | 27 +-- ...uilder.java => AvgAggregationBuilder.java} | 10 +- .../aggregations/metrics/avg/AvgParser.java | 6 +- ...ava => CardinalityAggregationBuilder.java} | 14 +- .../cardinality/CardinalityParser.java | 12 +- ....java => GeoBoundsAggregationBuilder.java} | 14 +- .../metrics/geobounds/GeoBoundsParser.java | 6 +- ...ava => GeoCentroidAggregationBuilder.java} | 12 +- .../geocentroid/GeoCentroidParser.java | 6 +- ...uilder.java => MaxAggregationBuilder.java} | 10 +- .../aggregations/metrics/max/MaxParser.java | 6 +- ...uilder.java => MinAggregationBuilder.java} | 10 +- .../aggregations/metrics/min/MinParser.java | 6 +- .../AbstractPercentilesParser.java | 14 +- ...=> PercentileRanksAggregationBuilder.java} | 20 +- .../percentiles/PercentileRanksParser.java | 9 +- ...ava => PercentilesAggregationBuilder.java} | 20 +- .../percentiles/PercentilesParser.java | 9 +- ... => ScriptedMetricAggregationBuilder.java} | 26 +-- ...lder.java => StatsAggregationBuilder.java} | 10 +- .../metrics/stats/StatsParser.java | 6 +- ...a => ExtendedStatsAggregationBuilder.java} | 16 +- .../stats/extended/ExtendedStatsParser.java | 6 +- ...uilder.java => SumAggregationBuilder.java} | 10 +- .../aggregations/metrics/sum/SumParser.java | 6 +- ...er.java => TopHitsAggregationBuilder.java} | 62 +++--- ...java => ValueCountAggregationBuilder.java} | 10 +- .../metrics/valuecount/ValueCountParser.java | 6 +- .../support/AbstractValuesSourceParser.java | 10 +- ...va => ValuesSourceAggregationBuilder.java} | 18 +- .../search/builder/SearchSourceBuilder.java | 4 +- .../aggregations/BaseAggregationTestCase.java | 8 +- .../aggregations/bucket/ChildrenTests.java | 8 +- .../bucket/DateHistogramTests.java | 8 +- .../aggregations/bucket/DateRangeIT.java | 4 +- .../aggregations/bucket/DateRangeTests.java | 8 +- .../bucket/DiversifiedSamplerIT.java | 18 +- .../bucket/DiversifiedSamplerTests.java | 8 +- .../bucket/GeoDistanceRangeTests.java | 8 +- .../aggregations/bucket/GeoHashGridTests.java | 8 +- .../aggregations/bucket/GlobalTests.java | 8 +- .../aggregations/bucket/HistogramTests.java | 8 +- .../search/aggregations/bucket/IpRangeIT.java | 1 - .../aggregations/bucket/IpRangeTests.java | 8 +- .../aggregations/bucket/NaNSortingIT.java | 20 +- .../aggregations/bucket/RangeTests.java | 8 +- .../search/aggregations/bucket/SamplerIT.java | 8 +- .../aggregations/bucket/SamplerTests.java | 8 +- .../bucket/SignificantTermsTests.java | 8 +- .../aggregations/bucket/TermsTests.java | 8 +- .../bucket/nested/NestedAggregatorTests.java | 2 +- .../bucket/nested/NestedTests.java | 6 +- .../bucket/nested/ReverseNestedTests.java | 6 +- .../SignificanceHeuristicTests.java | 4 +- .../AbstractNumericMetricTestCase.java | 4 +- .../search/aggregations/metrics/AvgTests.java | 8 +- .../metrics/ExtendedStatsTests.java | 8 +- .../aggregations/metrics/FilterTests.java | 8 +- .../aggregations/metrics/FiltersTests.java | 16 +- .../aggregations/metrics/GeoBoundsTests.java | 8 +- .../metrics/GeoCentroidTests.java | 8 +- .../search/aggregations/metrics/MaxTests.java | 8 +- .../search/aggregations/metrics/MinTests.java | 8 +- .../aggregations/metrics/MissingTests.java | 8 +- .../metrics/PercentileRanksTests.java | 8 +- .../metrics/PercentilesTests.java | 8 +- .../metrics/ScriptedMetricTests.java | 8 +- .../aggregations/metrics/StatsTests.java | 8 +- .../search/aggregations/metrics/SumTests.java | 8 +- .../aggregations/metrics/TopHitsTests.java | 8 +- .../aggregations/metrics/ValueCountTests.java | 8 +- .../metrics/cardinality/CardinalityTests.java | 6 +- .../PipelineAggregationHelperTests.java | 20 +- .../pipeline/moving/avg/MovAvgIT.java | 8 +- .../pipeline/serialdiff/SerialDiffIT.java | 6 +- .../messy/tests/EquivalenceTests.java | 4 +- .../messy/tests/MinDocCountTests.java | 8 +- .../tests/TDigestPercentileRanksTests.java | 6 +- .../messy/tests/TDigestPercentilesTests.java | 6 +- 125 files changed, 890 insertions(+), 875 deletions(-) rename core/src/main/java/org/elasticsearch/search/aggregations/{AggregatorBuilder.java => AggregationBuilder.java} (93%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/{ChildrenAggregatorBuilder.java => ChildrenAggregationBuilder.java} (92%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/{FilterAggregatorBuilder.java => FilterAggregationBuilder.java} (86%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/{FiltersAggregatorBuilder.java => FiltersAggregationBuilder.java} (92%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/{GeoGridAggregatorBuilder.java => GeoGridAggregationBuilder.java} (93%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/{GlobalAggregatorBuilder.java => GlobalAggregationBuilder.java} (84%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/{DateHistogramAggregatorBuilder.java => DateHistogramAggregationBuilder.java} (86%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/{HistogramAggregatorBuilder.java => HistogramAggregationBuilder.java} (90%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/{MissingAggregatorBuilder.java => MissingAggregationBuilder.java} (91%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/{NestedAggregatorBuilder.java => NestedAggregationBuilder.java} (89%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/{ReverseNestedAggregatorBuilder.java => ReverseNestedAggregationBuilder.java} (87%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/{RangeAggregatorBuilder.java => RangeAggregationBuilder.java} (85%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/{DateRangeAggregatorBuilder.java => DateRangeAggregationBuilder.java} (81%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/{GeoDistanceAggregatorBuilder.java => GeoDistanceAggregationBuilder.java} (85%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/{IpRangeAggregatorBuilder.java => IpRangeAggregationBuilder.java} (90%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/{DiversifiedAggregatorBuilder.java => DiversifiedAggregationBuilder.java} (88%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/{SamplerAggregatorBuilder.java => SamplerAggregationBuilder.java} (87%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/{SignificantTermsAggregatorBuilder.java => SignificantTermsAggregationBuilder.java} (87%) rename core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/{TermsAggregatorBuilder.java => TermsAggregationBuilder.java} (89%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/{AvgAggregatorBuilder.java => AvgAggregationBuilder.java} (91%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/{CardinalityAggregatorBuilder.java => CardinalityAggregationBuilder.java} (90%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/{GeoBoundsAggregatorBuilder.java => GeoBoundsAggregationBuilder.java} (88%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/{GeoCentroidAggregatorBuilder.java => GeoCentroidAggregationBuilder.java} (90%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/{MaxAggregatorBuilder.java => MaxAggregationBuilder.java} (91%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/{MinAggregatorBuilder.java => MinAggregationBuilder.java} (91%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/{PercentileRanksAggregatorBuilder.java => PercentileRanksAggregationBuilder.java} (91%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/{PercentilesAggregatorBuilder.java => PercentilesAggregationBuilder.java} (91%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/{ScriptedMetricAggregatorBuilder.java => ScriptedMetricAggregationBuilder.java} (93%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/{StatsAggregatorBuilder.java => StatsAggregationBuilder.java} (91%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/{ExtendedStatsAggregatorBuilder.java => ExtendedStatsAggregationBuilder.java} (88%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/{SumAggregatorBuilder.java => SumAggregationBuilder.java} (91%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/{TopHitsAggregatorBuilder.java => TopHitsAggregationBuilder.java} (92%) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/{ValueCountAggregatorBuilder.java => ValueCountAggregationBuilder.java} (90%) rename core/src/main/java/org/elasticsearch/search/aggregations/support/{ValuesSourceAggregatorBuilder.java => ValuesSourceAggregationBuilder.java} (95%) diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java index 9286601da69..9490abd0b68 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; @@ -165,9 +165,9 @@ public class PercolateRequestBuilder extends ActionRequestBuilder aggregationBuilder) { + public PercolateRequestBuilder addAggregation(AggregationBuilder aggregationBuilder) { sourceBuilder().addAggregation(aggregationBuilder); return this; } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java index a6ee99a476c..5c69d3be50b 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; @@ -53,7 +53,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes { private List> sorts; private Boolean trackScores; private HighlightBuilder highlightBuilder; - private List> aggregationBuilders; + private List> aggregationBuilders; private List> pipelineAggregationBuilders; /** @@ -126,7 +126,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes { /** * Add an aggregation definition. */ - public PercolateSourceBuilder addAggregation(AggregatorBuilder aggregationBuilder) { + public PercolateSourceBuilder addAggregation(AggregationBuilder aggregationBuilder) { if (aggregationBuilders == null) { aggregationBuilders = new ArrayList<>(); } @@ -175,7 +175,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes { builder.field("aggregations"); builder.startObject(); if (aggregationBuilders != null) { - for (AggregatorBuilder aggregation : aggregationBuilders) { + for (AggregationBuilder aggregation : aggregationBuilders) { aggregation.toXContent(builder, params); } } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 9830f7be203..5732d43b4c7 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -28,7 +28,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; @@ -373,7 +373,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder aggregation) { + public SearchRequestBuilder addAggregation(AggregationBuilder aggregation) { sourceBuilder().aggregation(aggregation); return this; } diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 59e9c94c27d..e76597c256c 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -98,51 +98,51 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.action.SearchTransportService; import org.elasticsearch.search.aggregations.AggregationPhase; import org.elasticsearch.search.aggregations.Aggregator; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorParsers; -import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.children.InternalChildren; -import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; -import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filters.InternalFilters; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridParser; import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramParser; -import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramParser; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing; -import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.missing.MissingParser; import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNested; -import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.InternalRange; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.RangeParser; -import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeParser; import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRange; -import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser; import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistance; -import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.InternalBinaryRange; import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeParser; -import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedSamplerParser; import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.UnmappedSampler; import org.elasticsearch.search.aggregations.bucket.significant.SignificantLongTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsParser; import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; @@ -156,50 +156,50 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsParser; import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.avg.AvgParser; import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityParser; import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsParser; import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidParser; import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroid; import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.MaxParser; import org.elasticsearch.search.aggregations.metrics.min.InternalMin; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.min.MinParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser; import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks; import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles; import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks; import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles; import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.stats.InternalStats; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.stats.StatsParser; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsParser; import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats; import org.elasticsearch.search.aggregations.metrics.sum.InternalSum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.SumParser; import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountParser; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -424,10 +424,10 @@ public class SearchModule extends AbstractModule { * @param aggregationName names by which the aggregation may be parsed. The first name is special because it is the name that the reader * is registered under. */ - public > void registerAggregation(Writeable.Reader reader, Aggregator.Parser aggregationParser, - ParseField aggregationName) { + public > void registerAggregation(Writeable.Reader reader, Aggregator.Parser aggregationParser, + ParseField aggregationName) { aggregationParserRegistry.register(aggregationParser, aggregationName); - namedWriteableRegistry.register(AggregatorBuilder.class, aggregationName.getPreferredName(), reader); + namedWriteableRegistry.register(AggregationBuilder.class, aggregationName.getPreferredName(), reader); } /** @@ -481,55 +481,57 @@ public class SearchModule extends AbstractModule { } protected void configureAggs() { - registerAggregation(AvgAggregatorBuilder::new, new AvgParser(), AvgAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(SumAggregatorBuilder::new, new SumParser(), SumAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(MinAggregatorBuilder::new, new MinParser(), MinAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(MaxAggregatorBuilder::new, new MaxParser(), MaxAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(StatsAggregatorBuilder::new, new StatsParser(), StatsAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(ExtendedStatsAggregatorBuilder::new, new ExtendedStatsParser(), - ExtendedStatsAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(ValueCountAggregatorBuilder::new, new ValueCountParser(), ValueCountAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(PercentilesAggregatorBuilder::new, new PercentilesParser(), - PercentilesAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(PercentileRanksAggregatorBuilder::new, new PercentileRanksParser(), - PercentileRanksAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(CardinalityAggregatorBuilder::new, new CardinalityParser(), - CardinalityAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(GlobalAggregatorBuilder::new, GlobalAggregatorBuilder::parse, GlobalAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(MissingAggregatorBuilder::new, new MissingParser(), MissingAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(FilterAggregatorBuilder::new, FilterAggregatorBuilder::parse, FilterAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(FiltersAggregatorBuilder::new, FiltersAggregatorBuilder::parse, - FiltersAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(SamplerAggregatorBuilder::new, SamplerAggregatorBuilder::parse, - SamplerAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(DiversifiedAggregatorBuilder::new, new DiversifiedSamplerParser(), - DiversifiedAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(TermsAggregatorBuilder::new, new TermsParser(), TermsAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(SignificantTermsAggregatorBuilder::new, + registerAggregation(AvgAggregationBuilder::new, new AvgParser(), AvgAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(SumAggregationBuilder::new, new SumParser(), SumAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(MinAggregationBuilder::new, new MinParser(), MinAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(MaxAggregationBuilder::new, new MaxParser(), MaxAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(StatsAggregationBuilder::new, new StatsParser(), StatsAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(ExtendedStatsAggregationBuilder::new, new ExtendedStatsParser(), + ExtendedStatsAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(ValueCountAggregationBuilder::new, new ValueCountParser(), ValueCountAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(PercentilesAggregationBuilder::new, new PercentilesParser(), + PercentilesAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(PercentileRanksAggregationBuilder::new, new PercentileRanksParser(), + PercentileRanksAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(CardinalityAggregationBuilder::new, new CardinalityParser(), + CardinalityAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(GlobalAggregationBuilder::new, GlobalAggregationBuilder::parse, + GlobalAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(MissingAggregationBuilder::new, new MissingParser(), MissingAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(FilterAggregationBuilder::new, FilterAggregationBuilder::parse, + FilterAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(FiltersAggregationBuilder::new, FiltersAggregationBuilder::parse, + FiltersAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(SamplerAggregationBuilder::new, SamplerAggregationBuilder::parse, + SamplerAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(DiversifiedAggregationBuilder::new, new DiversifiedSamplerParser(), + DiversifiedAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(TermsAggregationBuilder::new, new TermsParser(), TermsAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(SignificantTermsAggregationBuilder::new, new SignificantTermsParser(significanceHeuristicParserRegistry, queryParserRegistry), - SignificantTermsAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(RangeAggregatorBuilder::new, new RangeParser(), RangeAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(DateRangeAggregatorBuilder::new, new DateRangeParser(), DateRangeAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(IpRangeAggregatorBuilder::new, new IpRangeParser(), IpRangeAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(HistogramAggregatorBuilder::new, new HistogramParser(), HistogramAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(DateHistogramAggregatorBuilder::new, new DateHistogramParser(), - DateHistogramAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(GeoDistanceAggregatorBuilder::new, new GeoDistanceParser(), - GeoDistanceAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(GeoGridAggregatorBuilder::new, new GeoHashGridParser(), GeoGridAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(NestedAggregatorBuilder::new, NestedAggregatorBuilder::parse, NestedAggregatorBuilder.AGGREGATION_FIELD_NAME); - registerAggregation(ReverseNestedAggregatorBuilder::new, ReverseNestedAggregatorBuilder::parse, - ReverseNestedAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(TopHitsAggregatorBuilder::new, TopHitsAggregatorBuilder::parse, - TopHitsAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(GeoBoundsAggregatorBuilder::new, new GeoBoundsParser(), GeoBoundsAggregatorBuilder.AGGREGATION_NAME_FIED); - registerAggregation(GeoCentroidAggregatorBuilder::new, new GeoCentroidParser(), - GeoCentroidAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(ScriptedMetricAggregatorBuilder::new, ScriptedMetricAggregatorBuilder::parse, - ScriptedMetricAggregatorBuilder.AGGREGATION_NAME_FIELD); - registerAggregation(ChildrenAggregatorBuilder::new, ChildrenAggregatorBuilder::parse, - ChildrenAggregatorBuilder.AGGREGATION_NAME_FIELD); - + SignificantTermsAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(RangeAggregationBuilder::new, new RangeParser(), RangeAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(DateRangeAggregationBuilder::new, new DateRangeParser(), DateRangeAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(IpRangeAggregationBuilder::new, new IpRangeParser(), IpRangeAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(HistogramAggregationBuilder::new, new HistogramParser(), HistogramAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(DateHistogramAggregationBuilder::new, new DateHistogramParser(), + DateHistogramAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(GeoDistanceAggregationBuilder::new, new GeoDistanceParser(), + GeoDistanceAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(GeoGridAggregationBuilder::new, new GeoHashGridParser(), GeoGridAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(NestedAggregationBuilder::new, NestedAggregationBuilder::parse, + NestedAggregationBuilder.AGGREGATION_FIELD_NAME); + registerAggregation(ReverseNestedAggregationBuilder::new, ReverseNestedAggregationBuilder::parse, + ReverseNestedAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(TopHitsAggregationBuilder::new, TopHitsAggregationBuilder::parse, + TopHitsAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(GeoBoundsAggregationBuilder::new, new GeoBoundsParser(), GeoBoundsAggregationBuilder.AGGREGATION_NAME_FIED); + registerAggregation(GeoCentroidAggregationBuilder::new, new GeoCentroidParser(), + GeoCentroidAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(ScriptedMetricAggregationBuilder::new, ScriptedMetricAggregationBuilder::parse, + ScriptedMetricAggregationBuilder.AGGREGATION_NAME_FIELD); + registerAggregation(ChildrenAggregationBuilder::new, ChildrenAggregationBuilder::parse, + ChildrenAggregationBuilder.AGGREGATION_NAME_FIELD); registerPipelineAggregation(DerivativePipelineAggregatorBuilder::new, DerivativePipelineAggregatorBuilder::parse, DerivativePipelineAggregatorBuilder.AGGREGATION_NAME_FIELD); registerPipelineAggregation(MaxBucketPipelineAggregatorBuilder::new, MaxBucketPipelineAggregatorBuilder.PARSER, diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java similarity index 93% rename from core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java index 159ab5a8a10..e0336247c75 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java @@ -36,7 +36,9 @@ import java.util.Objects; /** * A factory that knows how to create an {@link Aggregator} of a specific type. */ -public abstract class AggregatorBuilder> extends ToXContentToBytes implements NamedWriteable, ToXContent { +public abstract class AggregationBuilder> + extends ToXContentToBytes + implements NamedWriteable, ToXContent { protected String name; protected Type type; @@ -44,12 +46,12 @@ public abstract class AggregatorBuilder> extend protected Map metaData; /** - * Constructs a new aggregator factory. + * Constructs a new aggregation builder. * * @param name The aggregation name * @param type The aggregation type */ - public AggregatorBuilder(String name, Type type) { + public AggregationBuilder(String name, Type type) { if (name == null) { throw new IllegalArgumentException("[name] must not be null: [" + name + "]"); } @@ -63,7 +65,7 @@ public abstract class AggregatorBuilder> extend /** * Read from a stream. */ - protected AggregatorBuilder(StreamInput in, Type type) throws IOException { + protected AggregationBuilder(StreamInput in, Type type) throws IOException { name = in.readString(); this.type = type; factoriesBuilder = new AggregatorFactories.Builder(in); @@ -84,7 +86,7 @@ public abstract class AggregatorBuilder> extend * Add a sub aggregation to this aggregation. */ @SuppressWarnings("unchecked") - public AB subAggregation(AggregatorBuilder aggregation) { + public AB subAggregation(AggregationBuilder aggregation) { if (aggregation == null) { throw new IllegalArgumentException("[aggregation] must not be null: [" + name + "]"); } @@ -178,7 +180,7 @@ public abstract class AggregatorBuilder> extend if (getClass() != obj.getClass()) return false; @SuppressWarnings("unchecked") - AggregatorBuilder other = (AggregatorBuilder) obj; + AggregationBuilder other = (AggregationBuilder) obj; if (!Objects.equals(name, other.name)) return false; if (!Objects.equals(type, other.type)) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java index b76d5d17891..b1818971d6b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java @@ -22,65 +22,65 @@ import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.bucket.children.Children; -import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; -import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filters.Filters; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; -import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid; import org.elasticsearch.search.aggregations.bucket.global.Global; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.missing.Missing; -import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.nested.Nested; -import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested; -import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.Range; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder; -import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.min.Min; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.stats.Stats; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.tophits.TopHits; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; /** * Utility class to create aggregations. @@ -93,234 +93,234 @@ public class AggregationBuilders { /** * Create a new {@link ValueCount} aggregation with the given name. */ - public static ValueCountAggregatorBuilder count(String name) { - return new ValueCountAggregatorBuilder(name, null); + public static ValueCountAggregationBuilder count(String name) { + return new ValueCountAggregationBuilder(name, null); } /** * Create a new {@link Avg} aggregation with the given name. */ - public static AvgAggregatorBuilder avg(String name) { - return new AvgAggregatorBuilder(name); + public static AvgAggregationBuilder avg(String name) { + return new AvgAggregationBuilder(name); } /** * Create a new {@link Max} aggregation with the given name. */ - public static MaxAggregatorBuilder max(String name) { - return new MaxAggregatorBuilder(name); + public static MaxAggregationBuilder max(String name) { + return new MaxAggregationBuilder(name); } /** * Create a new {@link Min} aggregation with the given name. */ - public static MinAggregatorBuilder min(String name) { - return new MinAggregatorBuilder(name); + public static MinAggregationBuilder min(String name) { + return new MinAggregationBuilder(name); } /** * Create a new {@link Sum} aggregation with the given name. */ - public static SumAggregatorBuilder sum(String name) { - return new SumAggregatorBuilder(name); + public static SumAggregationBuilder sum(String name) { + return new SumAggregationBuilder(name); } /** * Create a new {@link Stats} aggregation with the given name. */ - public static StatsAggregatorBuilder stats(String name) { - return new StatsAggregatorBuilder(name); + public static StatsAggregationBuilder stats(String name) { + return new StatsAggregationBuilder(name); } /** * Create a new {@link ExtendedStats} aggregation with the given name. */ - public static ExtendedStatsAggregatorBuilder extendedStats(String name) { - return new ExtendedStatsAggregatorBuilder(name); + public static ExtendedStatsAggregationBuilder extendedStats(String name) { + return new ExtendedStatsAggregationBuilder(name); } /** * Create a new {@link Filter} aggregation with the given name. */ - public static FilterAggregatorBuilder filter(String name, QueryBuilder filter) { - return new FilterAggregatorBuilder(name, filter); + public static FilterAggregationBuilder filter(String name, QueryBuilder filter) { + return new FilterAggregationBuilder(name, filter); } /** * Create a new {@link Filters} aggregation with the given name. */ - public static FiltersAggregatorBuilder filters(String name, KeyedFilter... filters) { - return new FiltersAggregatorBuilder(name, filters); + public static FiltersAggregationBuilder filters(String name, KeyedFilter... filters) { + return new FiltersAggregationBuilder(name, filters); } /** * Create a new {@link Filters} aggregation with the given name. */ - public static FiltersAggregatorBuilder filters(String name, QueryBuilder... filters) { - return new FiltersAggregatorBuilder(name, filters); + public static FiltersAggregationBuilder filters(String name, QueryBuilder... filters) { + return new FiltersAggregationBuilder(name, filters); } /** * Create a new {@link Sampler} aggregation with the given name. */ - public static SamplerAggregatorBuilder sampler(String name) { - return new SamplerAggregatorBuilder(name); + public static SamplerAggregationBuilder sampler(String name) { + return new SamplerAggregationBuilder(name); } /** * Create a new {@link Sampler} aggregation with the given name. */ - public static DiversifiedAggregatorBuilder diversifiedSampler(String name) { - return new DiversifiedAggregatorBuilder(name); + public static DiversifiedAggregationBuilder diversifiedSampler(String name) { + return new DiversifiedAggregationBuilder(name); } /** * Create a new {@link Global} aggregation with the given name. */ - public static GlobalAggregatorBuilder global(String name) { - return new GlobalAggregatorBuilder(name); + public static GlobalAggregationBuilder global(String name) { + return new GlobalAggregationBuilder(name); } /** * Create a new {@link Missing} aggregation with the given name. */ - public static MissingAggregatorBuilder missing(String name) { - return new MissingAggregatorBuilder(name, null); + public static MissingAggregationBuilder missing(String name) { + return new MissingAggregationBuilder(name, null); } /** * Create a new {@link Nested} aggregation with the given name. */ - public static NestedAggregatorBuilder nested(String name, String path) { - return new NestedAggregatorBuilder(name, path); + public static NestedAggregationBuilder nested(String name, String path) { + return new NestedAggregationBuilder(name, path); } /** * Create a new {@link ReverseNested} aggregation with the given name. */ - public static ReverseNestedAggregatorBuilder reverseNested(String name) { - return new ReverseNestedAggregatorBuilder(name); + public static ReverseNestedAggregationBuilder reverseNested(String name) { + return new ReverseNestedAggregationBuilder(name); } /** * Create a new {@link Children} aggregation with the given name. */ - public static ChildrenAggregatorBuilder children(String name, String childType) { - return new ChildrenAggregatorBuilder(name, childType); + public static ChildrenAggregationBuilder children(String name, String childType) { + return new ChildrenAggregationBuilder(name, childType); } /** * Create a new {@link GeoDistance} aggregation with the given name. */ - public static GeoDistanceAggregatorBuilder geoDistance(String name, GeoPoint origin) { - return new GeoDistanceAggregatorBuilder(name, origin); + public static GeoDistanceAggregationBuilder geoDistance(String name, GeoPoint origin) { + return new GeoDistanceAggregationBuilder(name, origin); } /** * Create a new {@link Histogram} aggregation with the given name. */ - public static HistogramAggregatorBuilder histogram(String name) { - return new HistogramAggregatorBuilder(name); + public static HistogramAggregationBuilder histogram(String name) { + return new HistogramAggregationBuilder(name); } /** * Create a new {@link GeoHashGrid} aggregation with the given name. */ - public static GeoGridAggregatorBuilder geohashGrid(String name) { - return new GeoGridAggregatorBuilder(name); + public static GeoGridAggregationBuilder geohashGrid(String name) { + return new GeoGridAggregationBuilder(name); } /** * Create a new {@link SignificantTerms} aggregation with the given name. */ - public static SignificantTermsAggregatorBuilder significantTerms(String name) { - return new SignificantTermsAggregatorBuilder(name, null); + public static SignificantTermsAggregationBuilder significantTerms(String name) { + return new SignificantTermsAggregationBuilder(name, null); } /** - * Create a new {@link DateHistogramAggregatorBuilder} aggregation with the given + * Create a new {@link DateHistogramAggregationBuilder} aggregation with the given * name. */ - public static DateHistogramAggregatorBuilder dateHistogram(String name) { - return new DateHistogramAggregatorBuilder(name); + public static DateHistogramAggregationBuilder dateHistogram(String name) { + return new DateHistogramAggregationBuilder(name); } /** * Create a new {@link Range} aggregation with the given name. */ - public static RangeAggregatorBuilder range(String name) { - return new RangeAggregatorBuilder(name); + public static RangeAggregationBuilder range(String name) { + return new RangeAggregationBuilder(name); } /** - * Create a new {@link DateRangeAggregatorBuilder} aggregation with the + * Create a new {@link DateRangeAggregationBuilder} aggregation with the * given name. */ - public static DateRangeAggregatorBuilder dateRange(String name) { - return new DateRangeAggregatorBuilder(name); + public static DateRangeAggregationBuilder dateRange(String name) { + return new DateRangeAggregationBuilder(name); } /** - * Create a new {@link IpRangeAggregatorBuilder} aggregation with the + * Create a new {@link IpRangeAggregationBuilder} aggregation with the * given name. */ - public static IpRangeAggregatorBuilder ipRange(String name) { - return new IpRangeAggregatorBuilder(name); + public static IpRangeAggregationBuilder ipRange(String name) { + return new IpRangeAggregationBuilder(name); } /** * Create a new {@link Terms} aggregation with the given name. */ - public static TermsAggregatorBuilder terms(String name) { - return new TermsAggregatorBuilder(name, null); + public static TermsAggregationBuilder terms(String name) { + return new TermsAggregationBuilder(name, null); } /** * Create a new {@link Percentiles} aggregation with the given name. */ - public static PercentilesAggregatorBuilder percentiles(String name) { - return new PercentilesAggregatorBuilder(name); + public static PercentilesAggregationBuilder percentiles(String name) { + return new PercentilesAggregationBuilder(name); } /** * Create a new {@link PercentileRanks} aggregation with the given name. */ - public static PercentileRanksAggregatorBuilder percentileRanks(String name) { - return new PercentileRanksAggregatorBuilder(name); + public static PercentileRanksAggregationBuilder percentileRanks(String name) { + return new PercentileRanksAggregationBuilder(name); } /** * Create a new {@link Cardinality} aggregation with the given name. */ - public static CardinalityAggregatorBuilder cardinality(String name) { - return new CardinalityAggregatorBuilder(name, null); + public static CardinalityAggregationBuilder cardinality(String name) { + return new CardinalityAggregationBuilder(name, null); } /** * Create a new {@link TopHits} aggregation with the given name. */ - public static TopHitsAggregatorBuilder topHits(String name) { - return new TopHitsAggregatorBuilder(name); + public static TopHitsAggregationBuilder topHits(String name) { + return new TopHitsAggregationBuilder(name); } /** * Create a new {@link GeoBounds} aggregation with the given name. */ - public static GeoBoundsAggregatorBuilder geoBounds(String name) { - return new GeoBoundsAggregatorBuilder(name); + public static GeoBoundsAggregationBuilder geoBounds(String name) { + return new GeoBoundsAggregationBuilder(name); } /** * Create a new {@link GeoCentroid} aggregation with the given name. */ - public static GeoCentroidAggregatorBuilder geoCentroid(String name) { - return new GeoCentroidAggregatorBuilder(name); + public static GeoCentroidAggregationBuilder geoCentroid(String name) { + return new GeoCentroidAggregationBuilder(name); } /** * Create a new {@link ScriptedMetric} aggregation with the given name. */ - public static ScriptedMetricAggregatorBuilder scriptedMetric(String name) { - return new ScriptedMetricAggregatorBuilder(name); + public static ScriptedMetricAggregationBuilder scriptedMetric(String name) { + return new ScriptedMetricAggregationBuilder(name); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java index 426f148e38e..faceada6415 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java @@ -42,7 +42,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable { /** * Parses the aggregation request and creates the appropriate aggregator factory for it. * - * @see AggregatorBuilder + * @see AggregationBuilder */ @FunctionalInterface public interface Parser { @@ -55,7 +55,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable { * @return The resolved aggregator factory or {@code null} in case the aggregation should be skipped * @throws java.io.IOException When parsing fails */ - AggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException; + AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException; } /** diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index f1236cd5cce..4e07ffcc4d8 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -124,7 +124,7 @@ public class AggregatorFactories { public static class Builder extends ToXContentToBytes implements Writeable { private final Set names = new HashSet<>(); - private final List> aggregatorBuilders = new ArrayList<>(); + private final List> aggregationBuilders = new ArrayList<>(); private final List> pipelineAggregatorBuilders = new ArrayList<>(); private boolean skipResolveOrder; @@ -140,7 +140,7 @@ public class AggregatorFactories { public Builder(StreamInput in) throws IOException { int factoriesSize = in.readVInt(); for (int i = 0; i < factoriesSize; i++) { - addAggregator(in.readNamedWriteable(AggregatorBuilder.class)); + addAggregator(in.readNamedWriteable(AggregationBuilder.class)); } int pipelineFactoriesSize = in.readVInt(); for (int i = 0; i < pipelineFactoriesSize; i++) { @@ -150,8 +150,8 @@ public class AggregatorFactories { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(this.aggregatorBuilders.size()); - for (AggregatorBuilder factory : aggregatorBuilders) { + out.writeVInt(this.aggregationBuilders.size()); + for (AggregationBuilder factory : aggregationBuilders) { out.writeNamedWriteable(factory); } out.writeVInt(this.pipelineAggregatorBuilders.size()); @@ -164,11 +164,11 @@ public class AggregatorFactories { throw new UnsupportedOperationException("This needs to be removed"); } - public Builder addAggregator(AggregatorBuilder factory) { + public Builder addAggregator(AggregationBuilder factory) { if (!names.add(factory.name)) { throw new IllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]"); } - aggregatorBuilders.add(factory); + aggregationBuilders.add(factory); return this; } @@ -186,30 +186,30 @@ public class AggregatorFactories { } public AggregatorFactories build(AggregationContext context, AggregatorFactory parent) throws IOException { - if (aggregatorBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) { + if (aggregationBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) { return EMPTY; } List> orderedpipelineAggregators = null; if (skipResolveOrder) { orderedpipelineAggregators = new ArrayList<>(pipelineAggregatorBuilders); } else { - orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregatorBuilders); + orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregationBuilders); } - AggregatorFactory[] aggFactories = new AggregatorFactory[aggregatorBuilders.size()]; - for (int i = 0; i < aggregatorBuilders.size(); i++) { - aggFactories[i] = aggregatorBuilders.get(i).build(context, parent); + AggregatorFactory[] aggFactories = new AggregatorFactory[aggregationBuilders.size()]; + for (int i = 0; i < aggregationBuilders.size(); i++) { + aggFactories[i] = aggregationBuilders.get(i).build(context, parent); } return new AggregatorFactories(parent, aggFactories, orderedpipelineAggregators); } private List> resolvePipelineAggregatorOrder( - List> pipelineAggregatorBuilders, List> aggBuilders) { + List> pipelineAggregatorBuilders, List> aggBuilders) { Map> pipelineAggregatorBuildersMap = new HashMap<>(); for (PipelineAggregatorBuilder builder : pipelineAggregatorBuilders) { pipelineAggregatorBuildersMap.put(builder.getName(), builder); } - Map> aggBuildersMap = new HashMap<>(); - for (AggregatorBuilder aggBuilder : aggBuilders) { + Map> aggBuildersMap = new HashMap<>(); + for (AggregationBuilder aggBuilder : aggBuilders) { aggBuildersMap.put(aggBuilder.name, aggBuilder); } List> orderedPipelineAggregatorrs = new LinkedList<>(); @@ -223,7 +223,7 @@ public class AggregatorFactories { return orderedPipelineAggregatorrs; } - private void resolvePipelineAggregatorOrder(Map> aggBuildersMap, + private void resolvePipelineAggregatorOrder(Map> aggBuildersMap, Map> pipelineAggregatorBuildersMap, List> orderedPipelineAggregators, List> unmarkedBuilders, Set> temporarilyMarked, PipelineAggregatorBuilder builder) { @@ -238,7 +238,7 @@ public class AggregatorFactories { if (bucketsPath.equals("_count") || bucketsPath.equals("_key")) { continue; } else if (aggBuildersMap.containsKey(firstAggName)) { - AggregatorBuilder aggBuilder = aggBuildersMap.get(firstAggName); + AggregationBuilder aggBuilder = aggBuildersMap.get(firstAggName); for (int i = 1; i < bucketsPathElements.size(); i++) { PathElement pathElement = bucketsPathElements.get(i); String aggName = pathElement.name; @@ -247,9 +247,9 @@ public class AggregatorFactories { } else { // Check the non-pipeline sub-aggregator // factories - AggregatorBuilder[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories(); + AggregationBuilder[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories(); boolean foundSubBuilder = false; - for (AggregatorBuilder subBuilder : subBuilders) { + for (AggregationBuilder subBuilder : subBuilders) { if (aggName.equals(subBuilder.name)) { aggBuilder = subBuilder; foundSubBuilder = true; @@ -289,8 +289,8 @@ public class AggregatorFactories { } } - AggregatorBuilder[] getAggregatorFactories() { - return this.aggregatorBuilders.toArray(new AggregatorBuilder[this.aggregatorBuilders.size()]); + AggregationBuilder[] getAggregatorFactories() { + return this.aggregationBuilders.toArray(new AggregationBuilder[this.aggregationBuilders.size()]); } List> getPipelineAggregatorFactories() { @@ -298,14 +298,14 @@ public class AggregatorFactories { } public int count() { - return aggregatorBuilders.size() + pipelineAggregatorBuilders.size(); + return aggregationBuilders.size() + pipelineAggregatorBuilders.size(); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - if (aggregatorBuilders != null) { - for (AggregatorBuilder subAgg : aggregatorBuilders) { + if (aggregationBuilders != null) { + for (AggregationBuilder subAgg : aggregationBuilders) { subAgg.toXContent(builder, params); } } @@ -320,7 +320,7 @@ public class AggregatorFactories { @Override public int hashCode() { - return Objects.hash(aggregatorBuilders, pipelineAggregatorBuilders); + return Objects.hash(aggregationBuilders, pipelineAggregatorBuilders); } @Override @@ -330,7 +330,7 @@ public class AggregatorFactories { if (getClass() != obj.getClass()) return false; Builder other = (Builder) obj; - if (!Objects.equals(aggregatorBuilders, other.aggregatorBuilders)) + if (!Objects.equals(aggregationBuilders, other.aggregationBuilders)) return false; if (!Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders)) return false; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java index 2fd1f63d620..55345d6e5ec 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java @@ -104,7 +104,7 @@ public class AggregatorParsers { + token + "], expected a [" + XContentParser.Token.START_OBJECT + "]."); } - AggregatorBuilder aggFactory = null; + AggregationBuilder aggFactory = null; PipelineAggregatorBuilder pipelineAggregatorFactory = null; AggregatorFactories.Builder subFactories = null; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregationBuilder.java similarity index 92% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregationBuilder.java index c5982d1000e..3749d2b2edd 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenAggregationBuilder.java @@ -36,7 +36,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.ParentChild; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -44,7 +44,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; -public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = InternalChildren.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -59,7 +59,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class FilterAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalFilter.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -51,7 +50,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder { +public class FiltersAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalFilters.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -62,11 +62,11 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder filters) { + private FiltersAggregationBuilder(String name, List filters) { super(name, InternalFilters.TYPE); // internally we want to have a fixed order of filters, regardless of the order of the filters in the request this.filters = new ArrayList<>(filters); @@ -80,7 +80,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder keyedFilters = new ArrayList<>(filters.length); for (int i = 0; i < filters.length; i++) { @@ -93,7 +93,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder { +public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = InternalGeoHashGrid.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -53,14 +53,14 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder pipelineAggregators, Map metaData) throws IOException { super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java index ba4f84017c1..1b2c4c26372 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java @@ -25,7 +25,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.NonCollectingAggregator; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder.CellIdSource; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder.CellIdSource; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java index 2ecf4953e78..1ae31e09ba0 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java @@ -45,10 +45,10 @@ public class GeoHashGridParser extends GeoPointValuesSourceParser { } @Override - protected GeoGridAggregatorBuilder createFactory( + protected GeoGridAggregationBuilder createFactory( String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - GeoGridAggregatorBuilder factory = new GeoGridAggregatorBuilder(aggregationName); + GeoGridAggregationBuilder factory = new GeoGridAggregationBuilder(aggregationName); Integer precision = (Integer) otherOptions.get(GeoHashGridParams.FIELD_PRECISION); if (precision != null) { factory.precision(precision); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java similarity index 84% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java index 7a60dcdab93..0f7e0713598 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregationBuilder.java @@ -24,25 +24,25 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; -public class GlobalAggregatorBuilder extends AggregatorBuilder { +public class GlobalAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalGlobal.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public GlobalAggregatorBuilder(String name) { + public GlobalAggregationBuilder(String name) { super(name, InternalGlobal.TYPE); } /** * Read from a stream. */ - public GlobalAggregatorBuilder(StreamInput in) throws IOException { + public GlobalAggregationBuilder(StreamInput in) throws IOException { super(in, InternalGlobal.TYPE); } @@ -64,9 +64,9 @@ public class GlobalAggregatorBuilder extends AggregatorBuilder> - extends ValuesSourceAggregatorBuilder { + extends ValuesSourceAggregationBuilder { protected long interval; protected long offset = 0; @@ -200,4 +200,4 @@ public abstract class AbstractHistogramBuilder { +public class DateHistogramAggregationBuilder extends AbstractHistogramBuilder { public static final String NAME = InternalDateHistogram.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private DateHistogramInterval dateHistogramInterval; - public DateHistogramAggregatorBuilder(String name) { + public DateHistogramAggregationBuilder(String name) { super(name, InternalDateHistogram.HISTOGRAM_FACTORY); } /** * Read from a stream. */ - public DateHistogramAggregatorBuilder(StreamInput in) throws IOException { + public DateHistogramAggregationBuilder(StreamInput in) throws IOException { super(in, InternalDateHistogram.HISTOGRAM_FACTORY); dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new); } @@ -61,7 +61,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder otherOptions) { - DateHistogramAggregatorBuilder factory = new DateHistogramAggregatorBuilder(aggregationName); + protected DateHistogramAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder(aggregationName); Object interval = otherOptions.get(Rounding.Interval.INTERVAL_FIELD); if (interval == null) { throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]"); @@ -89,6 +89,6 @@ public class DateHistogramParser extends HistogramParser { @Override protected long parseStringOffset(String offset) throws IOException { - return DateHistogramAggregatorBuilder.parseStringOffset(offset); + return DateHistogramAggregationBuilder.parseStringOffset(offset); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java similarity index 90% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java index 49bbd7160cf..54d52466bbb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java @@ -29,18 +29,18 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import java.io.IOException; -public class HistogramAggregatorBuilder extends AbstractHistogramBuilder { +public class HistogramAggregationBuilder extends AbstractHistogramBuilder { public static final String NAME = InternalHistogram.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public HistogramAggregatorBuilder(String name) { + public HistogramAggregationBuilder(String name) { super(name, InternalHistogram.HISTOGRAM_FACTORY); } /** * Read from a stream. */ - public HistogramAggregatorBuilder(StreamInput in) throws IOException { + public HistogramAggregationBuilder(StreamInput in) throws IOException { super(in, InternalHistogram.HISTOGRAM_FACTORY); } @@ -55,4 +55,4 @@ public class HistogramAggregatorBuilder extends AbstractHistogramBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - HistogramAggregatorBuilder factory = new HistogramAggregatorBuilder(aggregationName); + HistogramAggregationBuilder factory = new HistogramAggregationBuilder(aggregationName); Long interval = (Long) otherOptions.get(Rounding.Interval.INTERVAL_FIELD); if (interval == null) { throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]"); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java index 34263980bf4..f10f7683841 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregationBuilder.java @@ -28,25 +28,25 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class MissingAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = InternalMissing.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public MissingAggregatorBuilder(String name, ValueType targetValueType) { + public MissingAggregationBuilder(String name, ValueType targetValueType) { super(name, InternalMissing.TYPE, ValuesSourceType.ANY, targetValueType); } /** * Read from a stream. */ - public MissingAggregatorBuilder(StreamInput in) throws IOException { + public MissingAggregationBuilder(StreamInput in) throws IOException { super(in, InternalMissing.TYPE, ValuesSourceType.ANY); } @@ -85,4 +85,4 @@ public class MissingAggregatorBuilder extends ValuesSourceAggregatorBuilder otherOptions) { - return new MissingAggregatorBuilder(aggregationName, targetValueType); + protected MissingAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new MissingAggregationBuilder(aggregationName, targetValueType); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java similarity index 89% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java index f01a78e9211..33771910f16 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregationBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -34,7 +34,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; import java.util.Objects; -public class NestedAggregatorBuilder extends AggregatorBuilder { +public class NestedAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalNested.TYPE.name(); public static final ParseField AGGREGATION_FIELD_NAME = new ParseField(NAME); @@ -47,7 +47,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder { +public class ReverseNestedAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalReverseNested.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private String path; - public ReverseNestedAggregatorBuilder(String name) { + public ReverseNestedAggregationBuilder(String name) { super(name, InternalReverseNested.TYPE); } /** * Read from a stream. */ - public ReverseNestedAggregatorBuilder(StreamInput in) throws IOException { + public ReverseNestedAggregationBuilder(StreamInput in) throws IOException { super(in, InternalReverseNested.TYPE); path = in.readOptionalString(); } @@ -62,7 +62,7 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder, R extends Range> - extends ValuesSourceAggregatorBuilder { + extends ValuesSourceAggregationBuilder { protected final InternalRange.Factory rangeFactory; protected List ranges = new ArrayList<>(); @@ -103,4 +103,4 @@ public abstract class AbstractRangeBuilder { +public class RangeAggregationBuilder extends AbstractRangeBuilder { public static final String NAME = InternalRange.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public RangeAggregatorBuilder(String name) { + public RangeAggregationBuilder(String name) { super(name, InternalRange.FACTORY); } /** * Read from a stream. */ - public RangeAggregatorBuilder(StreamInput in) throws IOException { + public RangeAggregationBuilder(StreamInput in) throws IOException { super(in, InternalRange.FACTORY, Range::new); } @@ -55,7 +55,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilderfrom and * to. */ - public RangeAggregatorBuilder addRange(double from, double to) { + public RangeAggregationBuilder addRange(double from, double to) { return addRange(null, from, to); } @@ -77,7 +77,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - RangeAggregatorBuilder factory = new RangeAggregatorBuilder(aggregationName); + RangeAggregationBuilder factory = new RangeAggregationBuilder(aggregationName); @SuppressWarnings("unchecked") List ranges = (List) otherOptions.get(RangeAggregator.RANGES_FIELD); for (Range range : ranges) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregationBuilder.java similarity index 81% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregationBuilder.java index 9c28461df6c..392744a4f1c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregationBuilder.java @@ -33,18 +33,18 @@ import org.joda.time.DateTime; import java.io.IOException; -public class DateRangeAggregatorBuilder extends AbstractRangeBuilder { +public class DateRangeAggregationBuilder extends AbstractRangeBuilder { public static final String NAME = InternalDateRange.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public DateRangeAggregatorBuilder(String name) { + public DateRangeAggregationBuilder(String name) { super(name, InternalDateRange.FACTORY); } /** * Read from a stream. */ - public DateRangeAggregatorBuilder(StreamInput in) throws IOException { + public DateRangeAggregationBuilder(StreamInput in) throws IOException { super(in, InternalDateRange.FACTORY, Range::new); } @@ -63,7 +63,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilderfrom and to. */ - public DateRangeAggregatorBuilder addRange(String from, String to) { + public DateRangeAggregationBuilder addRange(String from, String to) { return addRange(null, from, to); } @@ -84,7 +84,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilderfrom and to. */ - public DateRangeAggregatorBuilder addRange(double from, double to) { + public DateRangeAggregationBuilder addRange(double from, double to) { return addRange(null, from, to); } @@ -149,7 +149,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilderfrom and to. */ - public DateRangeAggregatorBuilder addRange(DateTime from, DateTime to) { + public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) { return addRange(null, from, to); } @@ -222,7 +222,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder otherOptions) { - DateRangeAggregatorBuilder factory = new DateRangeAggregatorBuilder(aggregationName); + protected DateRangeAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + DateRangeAggregationBuilder factory = new DateRangeAggregationBuilder(aggregationName); @SuppressWarnings("unchecked") List ranges = (List) otherOptions.get(RangeAggregator.RANGES_FIELD); for (Range range : ranges) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregationBuilder.java similarity index 85% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregationBuilder.java index a72b4fd322e..e82a769431a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceAggregationBuilder.java @@ -33,7 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.Range; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; @@ -42,7 +42,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Objects; -public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = InternalGeoDistance.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -52,12 +52,12 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< private GeoDistance distanceType = GeoDistance.DEFAULT; private boolean keyed = false; - public GeoDistanceAggregatorBuilder(String name, GeoPoint origin) { + public GeoDistanceAggregationBuilder(String name, GeoPoint origin) { this(name, origin, InternalGeoDistance.FACTORY); } - private GeoDistanceAggregatorBuilder(String name, GeoPoint origin, - InternalRange.Factory rangeFactory) { + private GeoDistanceAggregationBuilder(String name, GeoPoint origin, + InternalRange.Factory rangeFactory) { super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType()); if (origin == null) { throw new IllegalArgumentException("[origin] must not be null: [" + name + "]"); @@ -68,7 +68,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< /** * Read from a stream. */ - public GeoDistanceAggregatorBuilder(StreamInput in) throws IOException { + public GeoDistanceAggregationBuilder(StreamInput in) throws IOException { super(in, InternalGeoDistance.FACTORY.type(), InternalGeoDistance.FACTORY.getValueSourceType(), InternalGeoDistance.FACTORY.getValueType()); origin = new GeoPoint(in.readDouble(), in.readDouble()); @@ -95,7 +95,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< unit.writeTo(out); } - public GeoDistanceAggregatorBuilder addRange(Range range) { + public GeoDistanceAggregationBuilder addRange(Range range) { if (range == null) { throw new IllegalArgumentException("[range] must not be null: [" + name + "]"); } @@ -113,7 +113,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * @param to * the upper bound on the distances, exclusive */ - public GeoDistanceAggregatorBuilder addRange(String key, double from, double to) { + public GeoDistanceAggregationBuilder addRange(String key, double from, double to) { ranges.add(new Range(key, from, to)); return this; } @@ -123,7 +123,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * automatically generated based on from and * to. */ - public GeoDistanceAggregatorBuilder addRange(double from, double to) { + public GeoDistanceAggregationBuilder addRange(double from, double to) { return addRange(null, from, to); } @@ -135,7 +135,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * @param to * the upper bound on the distances, exclusive */ - public GeoDistanceAggregatorBuilder addUnboundedTo(String key, double to) { + public GeoDistanceAggregationBuilder addUnboundedTo(String key, double to) { ranges.add(new Range(key, null, to)); return this; } @@ -144,7 +144,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * Same as {@link #addUnboundedTo(String, double)} but the key will be * computed automatically. */ - public GeoDistanceAggregatorBuilder addUnboundedTo(double to) { + public GeoDistanceAggregationBuilder addUnboundedTo(double to) { return addUnboundedTo(null, to); } @@ -156,7 +156,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * @param from * the lower bound on the distances, inclusive */ - public GeoDistanceAggregatorBuilder addUnboundedFrom(String key, double from) { + public GeoDistanceAggregationBuilder addUnboundedFrom(String key, double from) { addRange(new Range(key, from, null)); return this; } @@ -165,7 +165,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< * Same as {@link #addUnboundedFrom(String, double)} but the key will be * computed automatically. */ - public GeoDistanceAggregatorBuilder addUnboundedFrom(double from) { + public GeoDistanceAggregationBuilder addUnboundedFrom(double from) { return addUnboundedFrom(null, from); } @@ -178,7 +178,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< return NAME; } - public GeoDistanceAggregatorBuilder unit(DistanceUnit unit) { + public GeoDistanceAggregationBuilder unit(DistanceUnit unit) { if (unit == null) { throw new IllegalArgumentException("[unit] must not be null: [" + name + "]"); } @@ -190,7 +190,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< return unit; } - public GeoDistanceAggregatorBuilder distanceType(GeoDistance distanceType) { + public GeoDistanceAggregationBuilder distanceType(GeoDistance distanceType) { if (distanceType == null) { throw new IllegalArgumentException("[distanceType] must not be null: [" + name + "]"); } @@ -202,7 +202,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< return distanceType; } - public GeoDistanceAggregatorBuilder keyed(boolean keyed) { + public GeoDistanceAggregationBuilder keyed(boolean keyed) { this.keyed = keyed; return this; } @@ -236,7 +236,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< @Override protected boolean innerEquals(Object obj) { - GeoDistanceAggregatorBuilder other = (GeoDistanceAggregatorBuilder) obj; + GeoDistanceAggregationBuilder other = (GeoDistanceAggregationBuilder) obj; return Objects.equals(origin, other.origin) && Objects.equals(ranges, other.ranges) && Objects.equals(keyed, other.keyed) @@ -244,4 +244,4 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder< && Objects.equals(unit, other.unit); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java index ed6d6a67e2a..b98757aae5d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java @@ -85,10 +85,10 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser { } @Override - protected GeoDistanceAggregatorBuilder createFactory( + protected GeoDistanceAggregationBuilder createFactory( String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { GeoPoint origin = (GeoPoint) otherOptions.get(ORIGIN_FIELD); - GeoDistanceAggregatorBuilder factory = new GeoDistanceAggregatorBuilder(aggregationName, origin); + GeoDistanceAggregationBuilder factory = new GeoDistanceAggregationBuilder(aggregationName, origin); @SuppressWarnings("unchecked") List ranges = (List) otherOptions.get(RangeAggregator.RANGES_FIELD); for (Range range : ranges) { @@ -171,4 +171,4 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser { } return false; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregationBuilder.java similarity index 90% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregationBuilder.java index c56a2952f8d..243db5f75e3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregationBuilder.java @@ -44,14 +44,14 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; -public final class IpRangeAggregatorBuilder - extends ValuesSourceAggregatorBuilder { +public final class IpRangeAggregationBuilder + extends ValuesSourceAggregationBuilder { private static final String NAME = "ip_range"; public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private static final InternalAggregation.Type TYPE = new InternalAggregation.Type(NAME); @@ -163,7 +163,7 @@ public final class IpRangeAggregatorBuilder private boolean keyed = false; private List ranges = new ArrayList<>(); - public IpRangeAggregatorBuilder(String name) { + public IpRangeAggregationBuilder(String name) { super(name, TYPE, ValuesSourceType.BYTES, ValueType.IP); } @@ -172,7 +172,7 @@ public final class IpRangeAggregatorBuilder return NAME; } - public IpRangeAggregatorBuilder keyed(boolean keyed) { + public IpRangeAggregationBuilder keyed(boolean keyed) { this.keyed = keyed; return this; } @@ -187,7 +187,7 @@ public final class IpRangeAggregatorBuilder } /** Add a new {@link Range} to this aggregation. */ - public IpRangeAggregatorBuilder addRange(Range range) { + public IpRangeAggregationBuilder addRange(Range range) { ranges.add(range); return this; } @@ -202,7 +202,7 @@ public final class IpRangeAggregatorBuilder * @param to * the upper bound on the distances, exclusive */ - public IpRangeAggregatorBuilder addRange(String key, String from, String to) { + public IpRangeAggregationBuilder addRange(String key, String from, String to) { addRange(new Range(key, from, to)); return this; } @@ -210,7 +210,7 @@ public final class IpRangeAggregatorBuilder /** * Add a new range to this aggregation using the CIDR notation. */ - public IpRangeAggregatorBuilder addMaskRange(String key, String mask) { + public IpRangeAggregationBuilder addMaskRange(String key, String mask) { return addRange(new Range(key, mask)); } @@ -218,7 +218,7 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addMaskRange(String, String)} but uses the mask itself as * a key. */ - public IpRangeAggregatorBuilder addMaskRange(String mask) { + public IpRangeAggregationBuilder addMaskRange(String mask) { return addRange(new Range(mask, mask)); } @@ -226,7 +226,7 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addRange(String, String, String)} but the key will be * automatically generated. */ - public IpRangeAggregatorBuilder addRange(String from, String to) { + public IpRangeAggregationBuilder addRange(String from, String to) { return addRange(null, from, to); } @@ -234,7 +234,7 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addRange(String, String, String)} but there will be no * lower bound. */ - public IpRangeAggregatorBuilder addUnboundedTo(String key, String to) { + public IpRangeAggregationBuilder addUnboundedTo(String key, String to) { addRange(new Range(key, null, to)); return this; } @@ -243,7 +243,7 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addUnboundedTo(String, String)} but the key will be * generated automatically. */ - public IpRangeAggregatorBuilder addUnboundedTo(String to) { + public IpRangeAggregationBuilder addUnboundedTo(String to) { return addUnboundedTo(null, to); } @@ -251,13 +251,13 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addRange(String, String, String)} but there will be no * upper bound. */ - public IpRangeAggregatorBuilder addUnboundedFrom(String key, String from) { + public IpRangeAggregationBuilder addUnboundedFrom(String key, String from) { addRange(new Range(key, from, null)); return this; } @Override - public IpRangeAggregatorBuilder script(Script script) { + public IpRangeAggregationBuilder script(Script script) { throw new IllegalArgumentException("[ip_range] does not support scripts"); } @@ -265,11 +265,11 @@ public final class IpRangeAggregatorBuilder * Same as {@link #addUnboundedFrom(String, String)} but the key will be * generated automatically. */ - public IpRangeAggregatorBuilder addUnboundedFrom(String from) { + public IpRangeAggregationBuilder addUnboundedFrom(String from) { return addUnboundedFrom(null, from); } - public IpRangeAggregatorBuilder(StreamInput in) throws IOException { + public IpRangeAggregationBuilder(StreamInput in) throws IOException { super(in, TYPE, ValuesSourceType.BYTES, ValueType.IP); final int numRanges = in.readVInt(); for (int i = 0; i < numRanges; ++i) { @@ -323,7 +323,7 @@ public final class IpRangeAggregatorBuilder @Override protected boolean innerEquals(Object obj) { - IpRangeAggregatorBuilder that = (IpRangeAggregatorBuilder) obj; + IpRangeAggregationBuilder that = (IpRangeAggregationBuilder) obj; return keyed == that.keyed && ranges.equals(that.ranges); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeParser.java index 64ed77d42f3..8445fb2d459 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeParser.java @@ -30,10 +30,10 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.BytesValuesSourceParser; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; -import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder.Range; +import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder.Range; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceType; /** @@ -48,10 +48,10 @@ public class IpRangeParser extends BytesValuesSourceParser { } @Override - protected ValuesSourceAggregatorBuilder createFactory( + protected ValuesSourceAggregationBuilder createFactory( String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - IpRangeAggregatorBuilder range = new IpRangeAggregatorBuilder(aggregationName); + IpRangeAggregationBuilder range = new IpRangeAggregationBuilder(aggregationName); @SuppressWarnings("unchecked") Iterable ranges = (Iterable) otherOptions.get(RangeAggregator.RANGES_FIELD); if (otherOptions.containsKey(RangeAggregator.RANGES_FIELD)) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java similarity index 88% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java index f0c923e6efc..804574eea10 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java @@ -28,7 +28,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -36,25 +36,25 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; -public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = "diversified_sampler"; public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); public static final Type TYPE = new Type(NAME); public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1; - private int shardSize = SamplerAggregatorBuilder.DEFAULT_SHARD_SAMPLE_SIZE; + private int shardSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE; private int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT; private String executionHint = null; - public DiversifiedAggregatorBuilder(String name) { + public DiversifiedAggregationBuilder(String name) { super(name, TYPE, ValuesSourceType.ANY, null); } /** * Read from a stream. */ - public DiversifiedAggregatorBuilder(StreamInput in) throws IOException { + public DiversifiedAggregationBuilder(StreamInput in) throws IOException { super(in, TYPE, ValuesSourceType.ANY, null); shardSize = in.readVInt(); maxDocsPerValue = in.readVInt(); @@ -71,7 +71,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder< /** * Set the max num docs to be returned from each shard. */ - public DiversifiedAggregatorBuilder shardSize(int shardSize) { + public DiversifiedAggregationBuilder shardSize(int shardSize) { if (shardSize < 0) { throw new IllegalArgumentException( "[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]"); @@ -90,7 +90,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder< /** * Set the max num docs to be returned per value. */ - public DiversifiedAggregatorBuilder maxDocsPerValue(int maxDocsPerValue) { + public DiversifiedAggregationBuilder maxDocsPerValue(int maxDocsPerValue) { if (maxDocsPerValue < 0) { throw new IllegalArgumentException( "[maxDocsPerValue] must be greater than or equal to 0. Found [" + maxDocsPerValue + "] in [" + name + "]"); @@ -109,7 +109,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder< /** * Set the execution hint. */ - public DiversifiedAggregatorBuilder executionHint(String executionHint) { + public DiversifiedAggregationBuilder executionHint(String executionHint) { this.executionHint = executionHint; return this; } @@ -145,7 +145,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder< @Override protected boolean innerEquals(Object obj) { - DiversifiedAggregatorBuilder other = (DiversifiedAggregatorBuilder) obj; + DiversifiedAggregationBuilder other = (DiversifiedAggregationBuilder) obj; return Objects.equals(shardSize, other.shardSize) && Objects.equals(maxDocsPerValue, other.maxDocsPerValue) && Objects.equals(executionHint, other.executionHint); @@ -155,4 +155,4 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder< public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java index cb87e53f2c0..f495071f6d2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java @@ -38,9 +38,9 @@ public class DiversifiedSamplerParser extends AnyValuesSourceParser { } @Override - protected DiversifiedAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - DiversifiedAggregatorBuilder factory = new DiversifiedAggregatorBuilder(aggregationName); + protected DiversifiedAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + DiversifiedAggregationBuilder factory = new DiversifiedAggregationBuilder(aggregationName); Integer shardSize = (Integer) otherOptions.get(SamplerAggregator.SHARD_SIZE_FIELD); if (shardSize != null) { factory.shardSize(shardSize); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java similarity index 87% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java index 2cc3bb4c303..1220a2ddd42 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -34,7 +34,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; import java.util.Objects; -public class SamplerAggregatorBuilder extends AggregatorBuilder { +public class SamplerAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalSampler.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -42,14 +42,14 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder { +public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = SignificantStringTerms.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -62,14 +62,14 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS); private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC; - public SignificantTermsAggregatorBuilder(String name, ValueType valueType) { + public SignificantTermsAggregationBuilder(String name, ValueType valueType) { super(name, SignificantStringTerms.TYPE, ValuesSourceType.ANY, valueType); } /** * Read from a Stream. */ - public SignificantTermsAggregatorBuilder(StreamInput in) throws IOException { + public SignificantTermsAggregationBuilder(StreamInput in) throws IOException { super(in, SignificantStringTerms.TYPE, ValuesSourceType.ANY); bucketCountThresholds = new BucketCountThresholds(in); executionHint = in.readOptionalString(); @@ -100,7 +100,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui return bucketCountThresholds; } - public SignificantTermsAggregatorBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) { + public SignificantTermsAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) { if (bucketCountThresholds == null) { throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]"); } @@ -112,7 +112,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui * Sets the size - indicating how many term buckets should be returned * (defaults to 10) */ - public SignificantTermsAggregatorBuilder size(int size) { + public SignificantTermsAggregationBuilder size(int size) { if (size < 0) { throw new IllegalArgumentException("[size] must be greater than or equal to 0. Found [" + size + "] in [" + name + "]"); } @@ -126,7 +126,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui * search execution). The higher the shard size is, the more accurate the * results are. */ - public SignificantTermsAggregatorBuilder shardSize(int shardSize) { + public SignificantTermsAggregationBuilder shardSize(int shardSize) { if (shardSize < 0) { throw new IllegalArgumentException( "[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]"); @@ -139,7 +139,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui * Set the minimum document count terms should have in order to appear in * the response. */ - public SignificantTermsAggregatorBuilder minDocCount(long minDocCount) { + public SignificantTermsAggregationBuilder minDocCount(long minDocCount) { if (minDocCount < 0) { throw new IllegalArgumentException( "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]"); @@ -152,7 +152,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui * Set the minimum document count terms should have on the shard in order to * appear in the response. */ - public SignificantTermsAggregatorBuilder shardMinDocCount(long shardMinDocCount) { + public SignificantTermsAggregationBuilder shardMinDocCount(long shardMinDocCount) { if (shardMinDocCount < 0) { throw new IllegalArgumentException( "[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]"); @@ -164,7 +164,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui /** * Expert: sets an execution hint to the aggregation. */ - public SignificantTermsAggregatorBuilder executionHint(String executionHint) { + public SignificantTermsAggregationBuilder executionHint(String executionHint) { this.executionHint = executionHint; return this; } @@ -176,7 +176,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui return executionHint; } - public SignificantTermsAggregatorBuilder backgroundFilter(QueryBuilder backgroundFilter) { + public SignificantTermsAggregationBuilder backgroundFilter(QueryBuilder backgroundFilter) { if (backgroundFilter == null) { throw new IllegalArgumentException("[backgroundFilter] must not be null: [" + name + "]"); } @@ -191,7 +191,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui /** * Set terms to include and exclude from the aggregation results */ - public SignificantTermsAggregatorBuilder includeExclude(IncludeExclude includeExclude) { + public SignificantTermsAggregationBuilder includeExclude(IncludeExclude includeExclude) { this.includeExclude = includeExclude; return this; } @@ -203,7 +203,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui return includeExclude; } - public SignificantTermsAggregatorBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) { + public SignificantTermsAggregationBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) { if (significanceHeuristic == null) { throw new IllegalArgumentException("[significanceHeuristic] must not be null: [" + name + "]"); } @@ -226,7 +226,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { bucketCountThresholds.toXContent(builder, params); if (executionHint != null) { - builder.field(TermsAggregatorBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint); + builder.field(TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint); } if (filterBuilder != null) { builder.field(BACKGROUND_FILTER.getPreferredName(), filterBuilder); @@ -245,7 +245,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui @Override protected boolean innerEquals(Object obj) { - SignificantTermsAggregatorBuilder other = (SignificantTermsAggregatorBuilder) obj; + SignificantTermsAggregationBuilder other = (SignificantTermsAggregationBuilder) obj; return Objects.equals(bucketCountThresholds, other.bucketCountThresholds) && Objects.equals(executionHint, other.executionHint) && Objects.equals(filterBuilder, other.filterBuilder) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 4b9e3acb873..ab30e1b2d4a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -178,7 +178,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac numberOfAggregatorsCreated++; BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds); - if (bucketCountThresholds.getShardSize() == SignificantTermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { + if (bucketCountThresholds.getShardSize() == SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { // The user has not made a shardSize selection . // Use default heuristic to avoid any wrong-ranking caused by // distributed counting diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java index 60805bea692..33db8f97335 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java @@ -53,10 +53,11 @@ public class SignificantTermsParser extends AbstractTermsParser { } @Override - protected SignificantTermsAggregatorBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint, - IncludeExclude incExc, Map otherOptions) { - SignificantTermsAggregatorBuilder factory = new SignificantTermsAggregatorBuilder(aggregationName, targetValueType); + protected SignificantTermsAggregationBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, BucketCountThresholds bucketCountThresholds, + SubAggCollectionMode collectMode, String executionHint, + IncludeExclude incExc, Map otherOptions) { + SignificantTermsAggregationBuilder factory = new SignificantTermsAggregationBuilder(aggregationName, targetValueType); if (bucketCountThresholds != null) { factory.bucketCountThresholds(bucketCountThresholds); } @@ -66,11 +67,12 @@ public class SignificantTermsParser extends AbstractTermsParser { if (incExc != null) { factory.includeExclude(incExc); } - QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER); + QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregationBuilder.BACKGROUND_FILTER); if (backgroundFilter != null) { factory.backgroundFilter(backgroundFilter); } - SignificanceHeuristic significanceHeuristic = (SignificanceHeuristic) otherOptions.get(SignificantTermsAggregatorBuilder.HEURISTIC); + SignificanceHeuristic significanceHeuristic = + (SignificanceHeuristic) otherOptions.get(SignificantTermsAggregationBuilder.HEURISTIC); if (significanceHeuristic != null) { factory.significanceHeuristic(significanceHeuristic); } @@ -85,12 +87,12 @@ public class SignificantTermsParser extends AbstractTermsParser { .lookupReturningNullIfNotFound(currentFieldName, parseFieldMatcher); if (significanceHeuristicParser != null) { SignificanceHeuristic significanceHeuristic = significanceHeuristicParser.parse(parser, parseFieldMatcher); - otherOptions.put(SignificantTermsAggregatorBuilder.HEURISTIC, significanceHeuristic); + otherOptions.put(SignificantTermsAggregationBuilder.HEURISTIC, significanceHeuristic); return true; - } else if (parseFieldMatcher.match(currentFieldName, SignificantTermsAggregatorBuilder.BACKGROUND_FILTER)) { + } else if (parseFieldMatcher.match(currentFieldName, SignificantTermsAggregationBuilder.BACKGROUND_FILTER)) { QueryParseContext queryParseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); QueryBuilder filter = queryParseContext.parseInnerQueryBuilder(); - otherOptions.put(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER, filter); + otherOptions.put(SignificantTermsAggregationBuilder.BACKGROUND_FILTER, filter); return true; } } @@ -99,6 +101,6 @@ public class SignificantTermsParser extends AbstractTermsParser { @Override protected BucketCountThresholds getDefaultBucketCountThresholds() { - return new TermsAggregator.BucketCountThresholds(SignificantTermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS); + return new TermsAggregator.BucketCountThresholds(SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java index 5369e269058..b5781aa34be 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java @@ -60,7 +60,7 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms pipelineAggregators, Map metaData) { //We pass zero for index/subset sizes because for the purpose of significant term analysis // we assume an unmapped index's size is irrelevant to the proceedings. - super(0, 0, name, DocValueFormat.RAW, requiredSize, minDocCount, SignificantTermsAggregatorBuilder.DEFAULT_SIGNIFICANCE_HEURISTIC, + super(0, 0, name, DocValueFormat.RAW, requiredSize, minDocCount, SignificantTermsAggregationBuilder.DEFAULT_SIGNIFICANCE_HEURISTIC, BUCKETS, pipelineAggregators, metaData); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParser.java index a15c7d28427..3f27c4f1c6f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParser.java @@ -29,7 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; @@ -50,8 +50,10 @@ public abstract class AbstractTermsParser extends AnyValuesSourceParser { } @Override - protected final ValuesSourceAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { + protected final ValuesSourceAggregationBuilder createFactory(String aggregationName, + ValuesSourceType valuesSourceType, + ValueType targetValueType, + Map otherOptions) { BucketCountThresholds bucketCountThresholds = getDefaultBucketCountThresholds(); Integer requiredSize = (Integer) otherOptions.get(REQUIRED_SIZE_FIELD_NAME); if (requiredSize != null && requiredSize != -1) { @@ -77,10 +79,14 @@ public abstract class AbstractTermsParser extends AnyValuesSourceParser { otherOptions); } - protected abstract ValuesSourceAggregatorBuilder doCreateFactory(String aggregationName, - ValuesSourceType valuesSourceType, - ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint, - IncludeExclude incExc, Map otherOptions); + protected abstract ValuesSourceAggregationBuilder doCreateFactory(String aggregationName, + ValuesSourceType valuesSourceType, + ValueType targetValueType, + BucketCountThresholds bucketCountThresholds, + SubAggCollectionMode collectMode, + String executionHint, + IncludeExclude incExc, + Map otherOptions); @Override protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser, diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java similarity index 89% rename from core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java index 53887d8b20c..f4cb133c499 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java @@ -30,7 +30,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -38,7 +38,7 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = StringTerms.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -61,14 +61,14 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder orders) { + public TermsAggregationBuilder order(List orders) { if (orders == null) { throw new IllegalArgumentException("[orders] must not be null: [" + name + "]"); } @@ -190,7 +190,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder otherOptions) { - TermsAggregatorBuilder factory = new TermsAggregatorBuilder(aggregationName, targetValueType); + protected TermsAggregationBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, BucketCountThresholds bucketCountThresholds, + SubAggCollectionMode collectMode, String executionHint, + IncludeExclude incExc, Map otherOptions) { + TermsAggregationBuilder factory = new TermsAggregationBuilder(aggregationName, targetValueType); @SuppressWarnings("unchecked") - List orderElements = (List) otherOptions.get(TermsAggregatorBuilder.ORDER_FIELD); + List orderElements = (List) otherOptions.get(TermsAggregationBuilder.ORDER_FIELD); if (orderElements != null) { List orders = new ArrayList<>(orderElements.size()); for (OrderElement orderElement : orderElements) { @@ -66,7 +67,7 @@ public class TermsParser extends AbstractTermsParser { if (incExc != null) { factory.includeExclude(incExc); } - Boolean showTermDocCountError = (Boolean) otherOptions.get(TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR); + Boolean showTermDocCountError = (Boolean) otherOptions.get(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR); if (showTermDocCountError != null) { factory.showTermDocCountError(showTermDocCountError); } @@ -77,12 +78,12 @@ public class TermsParser extends AbstractTermsParser { public boolean parseSpecial(String aggregationName, XContentParser parser, ParseFieldMatcher parseFieldMatcher, Token token, String currentFieldName, Map otherOptions) throws IOException { if (token == XContentParser.Token.START_OBJECT) { - if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.ORDER_FIELD)) { - otherOptions.put(TermsAggregatorBuilder.ORDER_FIELD, Collections.singletonList(parseOrderParam(aggregationName, parser))); + if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) { + otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, Collections.singletonList(parseOrderParam(aggregationName, parser))); return true; } } else if (token == XContentParser.Token.START_ARRAY) { - if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.ORDER_FIELD)) { + if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) { List orderElements = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { @@ -93,12 +94,12 @@ public class TermsParser extends AbstractTermsParser { "Order elements must be of type object in [" + aggregationName + "] found token of type [" + token + "]."); } } - otherOptions.put(TermsAggregatorBuilder.ORDER_FIELD, orderElements); + otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, orderElements); return true; } } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR)) { - otherOptions.put(TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR, parser.booleanValue()); + if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR)) { + otherOptions.put(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR, parser.booleanValue()); return true; } } @@ -158,7 +159,7 @@ public class TermsParser extends AbstractTermsParser { @Override public TermsAggregator.BucketCountThresholds getDefaultBucketCountThresholds() { - return new TermsAggregator.BucketCountThresholds(TermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS); + return new TermsAggregator.BucketCountThresholds(TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS); } static Terms.Order resolveOrder(String key, boolean asc) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregationBuilder.java index da3733d13a9..ce098177a0b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregationBuilder.java @@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class AvgAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly { +public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalAvg.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public AvgAggregatorBuilder(String name) { + public AvgAggregationBuilder(String name) { super(name, InternalAvg.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public AvgAggregatorBuilder(StreamInput in) throws IOException { + public AvgAggregationBuilder(StreamInput in) throws IOException { super(in, InternalAvg.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @@ -80,4 +80,4 @@ public class AvgAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java index edb3d8f6620..b4f9261b1eb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java @@ -44,8 +44,8 @@ public class AvgParser extends NumericValuesSourceParser { } @Override - protected AvgAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new AvgAggregatorBuilder(aggregationName); + protected AvgAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new AvgAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregationBuilder.java similarity index 90% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregationBuilder.java index c9465cfb94d..a7850c23475 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregationBuilder.java @@ -28,14 +28,16 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; -public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly { +public final class CardinalityAggregationBuilder + extends ValuesSourceAggregationBuilder.LeafOnly { + public static final String NAME = InternalCardinality.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -43,14 +45,14 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu private Long precisionThreshold = null; - public CardinalityAggregatorBuilder(String name, ValueType targetValueType) { + public CardinalityAggregationBuilder(String name, ValueType targetValueType) { super(name, InternalCardinality.TYPE, ValuesSourceType.ANY, targetValueType); } /** * Read from a stream. */ - public CardinalityAggregatorBuilder(StreamInput in) throws IOException { + public CardinalityAggregationBuilder(StreamInput in) throws IOException { super(in, InternalCardinality.TYPE, ValuesSourceType.ANY); if (in.readBoolean()) { precisionThreshold = in.readLong(); @@ -75,7 +77,7 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu * Set a precision threshold. Higher values improve accuracy but also * increase memory usage. */ - public CardinalityAggregatorBuilder precisionThreshold(long precisionThreshold) { + public CardinalityAggregationBuilder precisionThreshold(long precisionThreshold) { if (precisionThreshold < 0) { throw new IllegalArgumentException( "[precisionThreshold] must be greater than or equal to 0. Found [" + precisionThreshold + "] in [" + name + "]"); @@ -122,7 +124,7 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu @Override protected boolean innerEquals(Object obj) { - CardinalityAggregatorBuilder other = (CardinalityAggregatorBuilder) obj; + CardinalityAggregationBuilder other = (CardinalityAggregationBuilder) obj; return Objects.equals(precisionThreshold, other.precisionThreshold); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java index 3272d90eede..3a2e6a2072a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java @@ -40,10 +40,10 @@ public class CardinalityParser extends AnyValuesSourceParser { } @Override - protected CardinalityAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - CardinalityAggregatorBuilder factory = new CardinalityAggregatorBuilder(aggregationName, targetValueType); - Long precisionThreshold = (Long) otherOptions.get(CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD); + protected CardinalityAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + CardinalityAggregationBuilder factory = new CardinalityAggregationBuilder(aggregationName, targetValueType); + Long precisionThreshold = (Long) otherOptions.get(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD); if (precisionThreshold != null) { factory.precisionThreshold(precisionThreshold); } @@ -54,8 +54,8 @@ public class CardinalityParser extends AnyValuesSourceParser { protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser, ParseFieldMatcher parseFieldMatcher, Map otherOptions) throws IOException { if (token.isValue()) { - if (parseFieldMatcher.match(currentFieldName, CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD)) { - otherOptions.put(CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD, parser.longValue()); + if (parseFieldMatcher.match(currentFieldName, CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD)) { + otherOptions.put(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD, parser.longValue()); return true; } else if (parseFieldMatcher.match(currentFieldName, REHASH)) { // ignore diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregationBuilder.java similarity index 88% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregationBuilder.java index ea4681ed686..eff020ec610 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregationBuilder.java @@ -28,27 +28,27 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; -public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder { +public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = InternalGeoBounds.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIED = new ParseField(NAME); private boolean wrapLongitude = true; - public GeoBoundsAggregatorBuilder(String name) { + public GeoBoundsAggregationBuilder(String name) { super(name, InternalGeoBounds.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); } /** * Read from a stream. */ - public GeoBoundsAggregatorBuilder(StreamInput in) throws IOException { + public GeoBoundsAggregationBuilder(StreamInput in) throws IOException { super(in, InternalGeoBounds.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); wrapLongitude = in.readBoolean(); } @@ -61,7 +61,7 @@ public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder otherOptions) { - GeoBoundsAggregatorBuilder factory = new GeoBoundsAggregatorBuilder(aggregationName); + protected GeoBoundsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + GeoBoundsAggregationBuilder factory = new GeoBoundsAggregationBuilder(aggregationName); Boolean wrapLongitude = (Boolean) otherOptions.get(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD); if (wrapLongitude != null) { factory.wrapLongitude(wrapLongitude); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregationBuilder.java similarity index 90% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregationBuilder.java index ea8e54cdba9..f9bf2e0a346 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregationBuilder.java @@ -28,25 +28,25 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class GeoCentroidAggregatorBuilder - extends ValuesSourceAggregatorBuilder.LeafOnly { +public class GeoCentroidAggregationBuilder + extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalGeoCentroid.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public GeoCentroidAggregatorBuilder(String name) { + public GeoCentroidAggregationBuilder(String name) { super(name, InternalGeoCentroid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); } /** * Read from a stream. */ - public GeoCentroidAggregatorBuilder(StreamInput in) throws IOException { + public GeoCentroidAggregationBuilder(StreamInput in) throws IOException { super(in, InternalGeoCentroid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); } @@ -80,4 +80,4 @@ public class GeoCentroidAggregatorBuilder public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java index b056920b141..6c9e9ba67b0 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java @@ -46,8 +46,8 @@ public class GeoCentroidParser extends GeoPointValuesSourceParser { } @Override - protected GeoCentroidAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new GeoCentroidAggregatorBuilder(aggregationName); + protected GeoCentroidAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new GeoCentroidAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregationBuilder.java index 7258e77aea5..9fa919fcf9e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregationBuilder.java @@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class MaxAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly { +public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalMax.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public MaxAggregatorBuilder(String name) { + public MaxAggregationBuilder(String name) { super(name, InternalMax.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public MaxAggregatorBuilder(StreamInput in) throws IOException { + public MaxAggregationBuilder(StreamInput in) throws IOException { super(in, InternalMax.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @@ -80,4 +80,4 @@ public class MaxAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java index 41c321acf33..d2ddd4daa08 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java @@ -44,8 +44,8 @@ public class MaxParser extends NumericValuesSourceParser { } @Override - protected MaxAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new MaxAggregatorBuilder(aggregationName); + protected MaxAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new MaxAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregationBuilder.java index c51e97f0538..af4f204bddb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregationBuilder.java @@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class MinAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly { +public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalMin.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public MinAggregatorBuilder(String name) { + public MinAggregationBuilder(String name) { super(name, InternalMin.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public MinAggregatorBuilder(StreamInput in) throws IOException { + public MinAggregationBuilder(StreamInput in) throws IOException { super(in, InternalMin.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @@ -80,4 +80,4 @@ public class MinAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java index 9f9eafc5035..194c08fc49b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java @@ -45,8 +45,8 @@ public class MinParser extends NumericValuesSourceParser { } @Override - protected MinAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new MinAggregatorBuilder(aggregationName); + protected MinAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new MinAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java index f29615a593f..ec145754a04 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesParser.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; @@ -115,8 +115,8 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse } @Override - protected ValuesSourceAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { + protected ValuesSourceAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { PercentilesMethod method = (PercentilesMethod) otherOptions.getOrDefault(METHOD_FIELD, PercentilesMethod.TDIGEST); double[] cdfValues = (double[]) otherOptions.get(keysField()); @@ -126,10 +126,10 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse return buildFactory(aggregationName, cdfValues, method, compression, numberOfSignificantValueDigits, keyed); } - protected abstract ValuesSourceAggregatorBuilder buildFactory(String aggregationName, double[] cdfValues, - PercentilesMethod method, - Double compression, - Integer numberOfSignificantValueDigits, Boolean keyed); + protected abstract ValuesSourceAggregationBuilder buildFactory(String aggregationName, double[] cdfValues, + PercentilesMethod method, + Double compression, + Integer numberOfSignificantValueDigits, Boolean keyed); protected abstract ParseField keysField(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregationBuilder.java index ea03451cf67..d36dcdecb7b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregationBuilder.java @@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder.LeafOnly; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder.LeafOnly; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -41,7 +41,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Objects; -public class PercentileRanksAggregatorBuilder extends LeafOnly { +public class PercentileRanksAggregationBuilder extends LeafOnly { public static final String NAME = InternalTDigestPercentileRanks.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -51,14 +51,14 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly 5) { throw new IllegalArgumentException("[numberOfSignificantValueDigits] must be between 0 and 5: [" + name + "]"); } @@ -135,7 +135,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly buildFactory(String aggregationName, double[] keys, PercentilesMethod method, - Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) { - PercentileRanksAggregatorBuilder factory = new PercentileRanksAggregatorBuilder(aggregationName); + protected ValuesSourceAggregationBuilder buildFactory(String aggregationName, double[] keys, PercentilesMethod method, + Double compression, Integer numberOfSignificantValueDigits, + Boolean keyed) { + PercentileRanksAggregationBuilder factory = new PercentileRanksAggregationBuilder(aggregationName); if (keys != null) { factory.values(keys); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java index 45f3d37732f..d2d1e8d9f8a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java @@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder.LeafOnly; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder.LeafOnly; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -41,7 +41,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Objects; -public class PercentilesAggregatorBuilder extends LeafOnly { +public class PercentilesAggregationBuilder extends LeafOnly { public static final String NAME = InternalTDigestPercentiles.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -51,14 +51,14 @@ public class PercentilesAggregatorBuilder extends LeafOnly 5) { throw new IllegalArgumentException("[numberOfSignificantValueDigits] must be between 0 and 5: [" + name + "]"); } @@ -135,7 +135,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly buildFactory(String aggregationName, double[] keys, PercentilesMethod method, - Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) { - PercentilesAggregatorBuilder factory = new PercentilesAggregatorBuilder(aggregationName); + protected ValuesSourceAggregationBuilder buildFactory(String aggregationName, double[] keys, PercentilesMethod method, + Double compression, Integer numberOfSignificantValueDigits, + Boolean keyed) { + PercentilesAggregationBuilder factory = new PercentilesAggregationBuilder(aggregationName); if (keys != null) { factory.percentiles(keys); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java similarity index 93% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java index 753052b4fe0..d5bdf2f5626 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -40,7 +40,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder { +public class ScriptedMetricAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalScriptedMetric.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -58,14 +58,14 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder params; - public ScriptedMetricAggregatorBuilder(String name) { + public ScriptedMetricAggregationBuilder(String name) { super(name, InternalScriptedMetric.TYPE); } /** * Read from a stream. */ - public ScriptedMetricAggregatorBuilder(StreamInput in) throws IOException { + public ScriptedMetricAggregationBuilder(StreamInput in) throws IOException { super(in, InternalScriptedMetric.TYPE); initScript = in.readOptionalWriteable(Script::new); mapScript = in.readOptionalWriteable(Script::new); @@ -92,7 +92,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilderinit script. */ - public ScriptedMetricAggregatorBuilder initScript(Script initScript) { + public ScriptedMetricAggregationBuilder initScript(Script initScript) { if (initScript == null) { throw new IllegalArgumentException("[initScript] must not be null: [" + name + "]"); } @@ -110,7 +110,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuildermap script. */ - public ScriptedMetricAggregatorBuilder mapScript(Script mapScript) { + public ScriptedMetricAggregationBuilder mapScript(Script mapScript) { if (mapScript == null) { throw new IllegalArgumentException("[mapScript] must not be null: [" + name + "]"); } @@ -128,7 +128,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuildercombine script. */ - public ScriptedMetricAggregatorBuilder combineScript(Script combineScript) { + public ScriptedMetricAggregationBuilder combineScript(Script combineScript) { if (combineScript == null) { throw new IllegalArgumentException("[combineScript] must not be null: [" + name + "]"); } @@ -146,7 +146,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilderreduce script. */ - public ScriptedMetricAggregatorBuilder reduceScript(Script reduceScript) { + public ScriptedMetricAggregationBuilder reduceScript(Script reduceScript) { if (reduceScript == null) { throw new IllegalArgumentException("[reduceScript] must not be null: [" + name + "]"); } @@ -165,7 +165,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilderinit, * map and combine phases. */ - public ScriptedMetricAggregatorBuilder params(Map params) { + public ScriptedMetricAggregationBuilder params(Map params) { if (params == null) { throw new IllegalArgumentException("[params] must not be null: [" + name + "]"); } @@ -214,7 +214,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder { +public class StatsAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalStats.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public StatsAggregatorBuilder(String name) { + public StatsAggregationBuilder(String name) { super(name, InternalStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public StatsAggregatorBuilder(StreamInput in) throws IOException { + public StatsAggregationBuilder(StreamInput in) throws IOException { super(in, InternalStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @@ -80,4 +80,4 @@ public class StatsAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOn public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java index a57ba89b676..eacfc0068b4 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java @@ -44,8 +44,8 @@ public class StatsParser extends NumericValuesSourceParser { } @Override - protected StatsAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new StatsAggregatorBuilder(aggregationName); + protected StatsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new StatsAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregationBuilder.java similarity index 88% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregationBuilder.java index e6f49d719d6..a2b961f1fc3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregationBuilder.java @@ -29,28 +29,28 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Objects; -public class ExtendedStatsAggregatorBuilder - extends ValuesSourceAggregatorBuilder.LeafOnly { +public class ExtendedStatsAggregationBuilder + extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalExtendedStats.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private double sigma = 2.0; - public ExtendedStatsAggregatorBuilder(String name) { + public ExtendedStatsAggregationBuilder(String name) { super(name, InternalExtendedStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public ExtendedStatsAggregatorBuilder(StreamInput in) throws IOException { + public ExtendedStatsAggregationBuilder(StreamInput in) throws IOException { super(in, InternalExtendedStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); sigma = in.readDouble(); } @@ -60,7 +60,7 @@ public class ExtendedStatsAggregatorBuilder out.writeDouble(sigma); } - public ExtendedStatsAggregatorBuilder sigma(double sigma) { + public ExtendedStatsAggregationBuilder sigma(double sigma) { if (sigma < 0.0) { throw new IllegalArgumentException("[sigma] must be greater than or equal to 0. Found [" + sigma + "] in [" + name + "]"); } @@ -91,7 +91,7 @@ public class ExtendedStatsAggregatorBuilder @Override protected boolean innerEquals(Object obj) { - ExtendedStatsAggregatorBuilder other = (ExtendedStatsAggregatorBuilder) obj; + ExtendedStatsAggregationBuilder other = (ExtendedStatsAggregationBuilder) obj; return Objects.equals(sigma, other.sigma); } @@ -99,4 +99,4 @@ public class ExtendedStatsAggregatorBuilder public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java index 76e6beac2da..c650847360f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java @@ -50,9 +50,9 @@ public class ExtendedStatsParser extends NumericValuesSourceParser { } @Override - protected ExtendedStatsAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - ExtendedStatsAggregatorBuilder factory = new ExtendedStatsAggregatorBuilder(aggregationName); + protected ExtendedStatsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + ExtendedStatsAggregationBuilder factory = new ExtendedStatsAggregationBuilder(aggregationName); Double sigma = (Double) otherOptions.get(ExtendedStatsAggregator.SIGMA_FIELD); if (sigma != null) { factory.sigma(sigma); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregationBuilder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregationBuilder.java index 30246452330..25dd1a3f214 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregationBuilder.java @@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -public class SumAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly { +public class SumAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalSum.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public SumAggregatorBuilder(String name) { + public SumAggregationBuilder(String name) { super(name, InternalSum.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ - public SumAggregatorBuilder(StreamInput in) throws IOException { + public SumAggregationBuilder(StreamInput in) throws IOException { super(in, InternalSum.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @@ -80,4 +80,4 @@ public class SumAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java index 7971adba4eb..6edc6cc8905 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java @@ -44,8 +44,8 @@ public class SumParser extends NumericValuesSourceParser { } @Override - protected SumAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions) { - return new SumAggregatorBuilder(aggregationName); + protected SumAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions) { + return new SumAggregationBuilder(aggregationName); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java similarity index 92% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java index 8f15437cc54..7195482f147 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationInitializationException; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -51,7 +51,7 @@ import java.util.List; import java.util.Objects; import java.util.Set; -public class TopHitsAggregatorBuilder extends AggregatorBuilder { +public class TopHitsAggregationBuilder extends AggregationBuilder { public static final String NAME = InternalTopHits.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); @@ -67,14 +67,14 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder scriptFields; private FetchSourceContext fetchSourceContext; - public TopHitsAggregatorBuilder(String name) { + public TopHitsAggregationBuilder(String name) { super(name, InternalTopHits.TYPE); } /** * Read from a stream. */ - public TopHitsAggregatorBuilder(StreamInput in) throws IOException { + public TopHitsAggregationBuilder(StreamInput in) throws IOException { super(in, InternalTopHits.TYPE); explain = in.readBoolean(); fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); @@ -159,7 +159,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder0. */ - public TopHitsAggregatorBuilder from(int from) { + public TopHitsAggregationBuilder from(int from) { if (from < 0) { throw new IllegalArgumentException("[from] must be greater than or equal to 0. Found [" + from + "] in [" + name + "]"); } @@ -177,7 +177,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder10. */ - public TopHitsAggregatorBuilder size(int size) { + public TopHitsAggregationBuilder size(int size) { if (size < 0) { throw new IllegalArgumentException("[size] must be greater than or equal to 0. Found [" + size + "] in [" + name + "]"); } @@ -200,7 +200,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder sort) { + public TopHitsAggregationBuilder sort(SortBuilder sort) { if (sort == null) { throw new IllegalArgumentException("[sort] must not be null: [" + name + "]"); } @@ -248,7 +248,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder> sorts) { + public TopHitsAggregationBuilder sorts(List> sorts) { if (sorts == null) { throw new IllegalArgumentException("[sorts] must not be null: [" + name + "]"); } @@ -271,7 +271,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder fields) { + public TopHitsAggregationBuilder fields(List fields) { if (fields == null) { throw new IllegalArgumentException("[fields] must not be null: [" + name + "]"); } @@ -385,7 +385,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder fieldDataFields) { + public TopHitsAggregationBuilder fieldDataFields(List fieldDataFields) { if (fieldDataFields == null) { throw new IllegalArgumentException("[fieldDataFields] must not be null: [" + name + "]"); } @@ -442,7 +442,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder scriptFields) { + public TopHitsAggregationBuilder scriptFields(List scriptFields) { if (scriptFields == null) { throw new IllegalArgumentException("[scriptFields] must not be null: [" + name + "]"); } @@ -497,7 +497,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilderfalse. */ - public TopHitsAggregatorBuilder trackScores(boolean trackScores) { + public TopHitsAggregationBuilder trackScores(boolean trackScores) { this.trackScores = trackScores; return this; } @@ -544,7 +544,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder { +public class ValueCountAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly { public static final String NAME = InternalValueCount.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); - public ValueCountAggregatorBuilder(String name, ValueType targetValueType) { + public ValueCountAggregationBuilder(String name, ValueType targetValueType) { super(name, InternalValueCount.TYPE, ValuesSourceType.ANY, targetValueType); } /** * Read from a stream. */ - public ValueCountAggregatorBuilder(StreamInput in) throws IOException { + public ValueCountAggregationBuilder(StreamInput in) throws IOException { super(in, InternalValueCount.TYPE, ValuesSourceType.ANY); } @@ -84,4 +84,4 @@ public class ValueCountAggregatorBuilder extends ValuesSourceAggregatorBuilder.L public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java index 2f0e7e6f263..fe8a34f242b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; @@ -46,8 +46,8 @@ public class ValueCountParser extends AnyValuesSourceParser { } @Override - protected ValuesSourceAggregatorBuilder createFactory( + protected ValuesSourceAggregationBuilder createFactory( String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - return new ValueCountAggregatorBuilder(aggregationName, targetValueType); + return new ValueCountAggregationBuilder(aggregationName, targetValueType); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java index 030f5143af3..51d2ea2e8c9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/AbstractValuesSourceParser.java @@ -84,7 +84,7 @@ public abstract class AbstractValuesSourceParser } @Override - public final ValuesSourceAggregatorBuilder parse(String aggregationName, QueryParseContext context) + public final ValuesSourceAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { XContentParser parser = context.parser(); @@ -147,7 +147,7 @@ public abstract class AbstractValuesSourceParser } } - ValuesSourceAggregatorBuilder factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType, + ValuesSourceAggregationBuilder factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType, otherOptions); if (field != null) { factory.field(field); @@ -171,7 +171,7 @@ public abstract class AbstractValuesSourceParser } /** - * Creates a {@link ValuesSourceAggregatorBuilder} from the information + * Creates a {@link ValuesSourceAggregationBuilder} from the information * gathered by the subclass. Options parsed in * {@link AbstractValuesSourceParser} itself will be added to the factory * after it has been returned by this method. @@ -189,8 +189,8 @@ public abstract class AbstractValuesSourceParser * method * @return the created factory */ - protected abstract ValuesSourceAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, - ValueType targetValueType, Map otherOptions); + protected abstract ValuesSourceAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, + ValueType targetValueType, Map otherOptions); /** * Allows subclasses of {@link AbstractValuesSourceParser} to parse extra diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java similarity index 95% rename from core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorBuilder.java rename to core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java index c92faa09613..78d2a2da10f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java @@ -31,7 +31,7 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationInitializationException; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -46,11 +46,11 @@ import java.util.Objects; /** * */ -public abstract class ValuesSourceAggregatorBuilder> - extends AggregatorBuilder { +public abstract class ValuesSourceAggregationBuilder> + extends AggregationBuilder { - public static abstract class LeafOnly> - extends ValuesSourceAggregatorBuilder { + public static abstract class LeafOnly> + extends ValuesSourceAggregationBuilder { protected LeafOnly(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) { super(name, type, valuesSourceType, targetValueType); @@ -87,7 +87,7 @@ public abstract class ValuesSourceAggregatorBuilder config; - protected ValuesSourceAggregatorBuilder(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) { + protected ValuesSourceAggregationBuilder(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) { super(name, type); if (valuesSourceType == null) { throw new IllegalArgumentException("[valuesSourceType] must not be null: [" + name + "]"); @@ -99,7 +99,7 @@ public abstract class ValuesSourceAggregatorBuilder other = (ValuesSourceAggregatorBuilder) obj; + ValuesSourceAggregationBuilder other = (ValuesSourceAggregationBuilder) obj; if (!Objects.equals(field, other.field)) return false; if (!Objects.equals(format, other.format)) diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 429aa36e56f..dc45a99fa87 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -41,7 +41,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.Script; -import org.elasticsearch.search.aggregations.AggregatorBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; @@ -600,7 +600,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ /** * Add an aggregation to perform as part of the search. */ - public SearchSourceBuilder aggregation(AggregatorBuilder aggregation) { + public SearchSourceBuilder aggregation(AggregationBuilder aggregation) { if (aggregations == null) { aggregations = AggregatorFactories.builder(); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index 6c221e4eb36..e986ab1288f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -80,7 +80,7 @@ import static org.elasticsearch.cluster.service.ClusterServiceUtils.createCluste import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; -public abstract class BaseAggregationTestCase> extends ESTestCase { +public abstract class BaseAggregationTestCase> extends ESTestCase { protected static final String STRING_FIELD_NAME = "mapped_string"; protected static final String INT_FIELD_NAME = "mapped_int"; @@ -238,7 +238,7 @@ public abstract class BaseAggregationTestCase> assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals(testAgg.type.name(), parser.currentName()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); - AggregatorBuilder newAgg = aggParsers.parser(testAgg.getType(), ParseFieldMatcher.STRICT).parse(testAgg.name, parseContext); + AggregationBuilder newAgg = aggParsers.parser(testAgg.getType(), ParseFieldMatcher.STRICT).parse(testAgg.name, parseContext); assertSame(XContentParser.Token.END_OBJECT, parser.currentToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); @@ -258,7 +258,7 @@ public abstract class BaseAggregationTestCase> try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(testAgg); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { - AggregatorBuilder deserialized = in.readNamedWriteable(AggregatorBuilder.class); + AggregationBuilder deserialized = in.readNamedWriteable(AggregationBuilder.class); assertEquals(testAgg, deserialized); assertEquals(testAgg.hashCode(), deserialized.hashCode()); assertNotSame(testAgg, deserialized); @@ -299,7 +299,7 @@ public abstract class BaseAggregationTestCase> agg.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { @SuppressWarnings("unchecked") - AB secondAgg = (AB) namedWriteableRegistry.getReader(AggregatorBuilder.class, agg.getWriteableName()).read(in); + AB secondAgg = (AB) namedWriteableRegistry.getReader(AggregationBuilder.class, agg.getWriteableName()).read(in); return secondAgg; } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java index 3f8684c36db..c7844f29d05 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java @@ -20,15 +20,15 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder; -public class ChildrenTests extends BaseAggregationTestCase { +public class ChildrenTests extends BaseAggregationTestCase { @Override - protected ChildrenAggregatorBuilder createTestAggregatorBuilder() { + protected ChildrenAggregationBuilder createTestAggregatorBuilder() { String name = randomAsciiOfLengthBetween(3, 20); String childType = randomAsciiOfLengthBetween(5, 40); - ChildrenAggregatorBuilder factory = new ChildrenAggregatorBuilder(name, childType); + ChildrenAggregationBuilder factory = new ChildrenAggregationBuilder(name, childType); return factory; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java index 58641a43b68..74ea18cc1d1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java @@ -20,16 +20,16 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; -public class DateHistogramTests extends BaseAggregationTestCase { +public class DateHistogramTests extends BaseAggregationTestCase { @Override - protected DateHistogramAggregatorBuilder createTestAggregatorBuilder() { - DateHistogramAggregatorBuilder factory = new DateHistogramAggregatorBuilder("foo"); + protected DateHistogramAggregationBuilder createTestAggregatorBuilder() { + DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder("foo"); factory.field(INT_FIELD_NAME); if (randomBoolean()) { factory.interval(randomIntBetween(1, 100000)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index b1dc61a9b9e..a95b3cd4871 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -27,7 +27,7 @@ import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsM import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; -import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; @@ -117,7 +117,7 @@ public class DateRangeIT extends ESIntegTestCase { public void testDateMath() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "date"); - DateRangeAggregatorBuilder rangeBuilder = dateRange("range"); + DateRangeAggregationBuilder rangeBuilder = dateRange("range"); if (randomBoolean()) { rangeBuilder.field("date"); } else { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java index 71b61c0e6e6..eba3790330b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java @@ -21,17 +21,17 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; -import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder; import org.joda.time.DateTimeZone; -public class DateRangeTests extends BaseAggregationTestCase { +public class DateRangeTests extends BaseAggregationTestCase { private final static String[] timeZoneIds = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]); @Override - protected DateRangeAggregatorBuilder createTestAggregatorBuilder() { + protected DateRangeAggregationBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); - DateRangeAggregatorBuilder factory = new DateRangeAggregatorBuilder("foo"); + DateRangeAggregationBuilder factory = new DateRangeAggregationBuilder("foo"); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java index 6635a674b6a..96fbf17480c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -22,12 +22,12 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.test.ESIntegTestCase; @@ -128,7 +128,7 @@ public class DiversifiedSamplerIT extends ESIntegTestCase { public void testSimpleDiversity() throws Exception { int MAX_DOCS_PER_AUTHOR = 1; - DiversifiedAggregatorBuilder sampleAgg = new DiversifiedAggregatorBuilder("sample").shardSize(100); + DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("test") @@ -151,9 +151,9 @@ public class DiversifiedSamplerIT extends ESIntegTestCase { public void testNestedDiversity() throws Exception { // Test multiple samples gathered under buckets made by a parent agg int MAX_DOCS_PER_AUTHOR = 1; - TermsAggregatorBuilder rootTerms = terms("genres").field("genre"); + TermsAggregationBuilder rootTerms = terms("genres").field("genre"); - DiversifiedAggregatorBuilder sampleAgg = new DiversifiedAggregatorBuilder("sample").shardSize(100); + DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); @@ -178,11 +178,11 @@ public class DiversifiedSamplerIT extends ESIntegTestCase { // Test samples nested under samples int MAX_DOCS_PER_AUTHOR = 1; int MAX_DOCS_PER_GENRE = 2; - DiversifiedAggregatorBuilder rootSample = new DiversifiedAggregatorBuilder("genreSample").shardSize(100) + DiversifiedAggregationBuilder rootSample = new DiversifiedAggregationBuilder("genreSample").shardSize(100) .field("genre") .maxDocsPerValue(MAX_DOCS_PER_GENRE); - DiversifiedAggregatorBuilder sampleAgg = new DiversifiedAggregatorBuilder("sample").shardSize(100); + DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); sampleAgg.subAggregation(terms("genres").field("genre")); @@ -210,7 +210,7 @@ public class DiversifiedSamplerIT extends ESIntegTestCase { public void testPartiallyUnmappedDiversifyField() throws Exception { // One of the indexes is missing the "author" field used for // diversifying results - DiversifiedAggregatorBuilder sampleAgg = new DiversifiedAggregatorBuilder("sample").shardSize(100).field("author") + DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100).field("author") .maxDocsPerValue(1); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped_author", "test").setSearchType(SearchType.QUERY_AND_FETCH) @@ -226,7 +226,7 @@ public class DiversifiedSamplerIT extends ESIntegTestCase { public void testWhollyUnmappedDiversifyField() throws Exception { //All of the indices are missing the "author" field used for diversifying results int MAX_DOCS_PER_AUTHOR = 1; - DiversifiedAggregatorBuilder sampleAgg = new DiversifiedAggregatorBuilder("sample").shardSize(100); + DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped", "idx_unmapped_author").setSearchType(SearchType.QUERY_AND_FETCH) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerTests.java index 512d7a8d69b..b68caad0ea7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerTests.java @@ -21,14 +21,14 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator.ExecutionMode; -public class DiversifiedSamplerTests extends BaseAggregationTestCase { +public class DiversifiedSamplerTests extends BaseAggregationTestCase { @Override - protected final DiversifiedAggregatorBuilder createTestAggregatorBuilder() { - DiversifiedAggregatorBuilder factory = new DiversifiedAggregatorBuilder("foo"); + protected final DiversifiedAggregationBuilder createTestAggregatorBuilder() { + DiversifiedAggregationBuilder factory = new DiversifiedAggregationBuilder("foo"); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java index 89dd3e3b137..65b80537c7a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java @@ -23,17 +23,17 @@ import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.Range; import org.elasticsearch.test.geo.RandomShapeGenerator; -public class GeoDistanceRangeTests extends BaseAggregationTestCase { +public class GeoDistanceRangeTests extends BaseAggregationTestCase { @Override - protected GeoDistanceAggregatorBuilder createTestAggregatorBuilder() { + protected GeoDistanceAggregationBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); GeoPoint origin = RandomShapeGenerator.randomPoint(random()); - GeoDistanceAggregatorBuilder factory = new GeoDistanceAggregatorBuilder("foo", origin); + GeoDistanceAggregationBuilder factory = new GeoDistanceAggregationBuilder("foo", origin); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java index 34e3e266d6a..c3c8f6902b3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java @@ -20,14 +20,14 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; -public class GeoHashGridTests extends BaseAggregationTestCase { +public class GeoHashGridTests extends BaseAggregationTestCase { @Override - protected GeoGridAggregatorBuilder createTestAggregatorBuilder() { + protected GeoGridAggregationBuilder createTestAggregatorBuilder() { String name = randomAsciiOfLengthBetween(3, 20); - GeoGridAggregatorBuilder factory = new GeoGridAggregatorBuilder(name); + GeoGridAggregationBuilder factory = new GeoGridAggregationBuilder(name); if (randomBoolean()) { int precision = randomIntBetween(1, 12); factory.precision(precision); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalTests.java index ca2b5c9d6c0..a874eff839b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalTests.java @@ -20,13 +20,13 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; -public class GlobalTests extends BaseAggregationTestCase { +public class GlobalTests extends BaseAggregationTestCase { @Override - protected GlobalAggregatorBuilder createTestAggregatorBuilder() { - return new GlobalAggregatorBuilder(randomAsciiOfLengthBetween(3, 20)); + protected GlobalAggregationBuilder createTestAggregatorBuilder() { + return new GlobalAggregationBuilder(randomAsciiOfLengthBetween(3, 20)); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java index 1cd930ecc31..ac0d6d0df8b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java @@ -22,13 +22,13 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; -import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; -public class HistogramTests extends BaseAggregationTestCase { +public class HistogramTests extends BaseAggregationTestCase { @Override - protected HistogramAggregatorBuilder createTestAggregatorBuilder() { - HistogramAggregatorBuilder factory = new HistogramAggregatorBuilder("foo"); + protected HistogramAggregationBuilder createTestAggregatorBuilder() { + HistogramAggregationBuilder factory = new HistogramAggregationBuilder("foo"); factory.field(INT_FIELD_NAME); factory.interval(randomIntBetween(1, 100000)); if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java index 468e8503b0a..3a2abda6aa7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java @@ -37,7 +37,6 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.AggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java index db31f576e0c..5d86571f08f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java @@ -24,9 +24,9 @@ import java.net.UnknownHostException; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder; -public class IpRangeTests extends BaseAggregationTestCase { +public class IpRangeTests extends BaseAggregationTestCase { private static String randomIp(boolean v4) { try { @@ -45,9 +45,9 @@ public class IpRangeTests extends BaseAggregationTestCase> builder(); + public abstract ValuesSourceAggregationBuilder.LeafOnly> builder(); public String sortKey() { return name; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java index b9c60dab1be..22b4eae8421 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeTests.java @@ -21,14 +21,14 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; -public class RangeTests extends BaseAggregationTestCase { +public class RangeTests extends BaseAggregationTestCase { @Override - protected RangeAggregatorBuilder createTestAggregatorBuilder() { + protected RangeAggregationBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); - RangeAggregatorBuilder factory = new RangeAggregatorBuilder("foo"); + RangeAggregationBuilder factory = new RangeAggregationBuilder("foo"); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java index f42d213bb57..980d792013a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; import org.elasticsearch.search.aggregations.metrics.max.Max; @@ -123,7 +123,7 @@ public class SamplerIT extends ESIntegTestCase { } public void testSimpleSampler() throws Exception { - SamplerAggregatorBuilder sampleAgg = sampler("sample").shardSize(100); + SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg).execute().actionGet(); @@ -140,7 +140,7 @@ public class SamplerIT extends ESIntegTestCase { } public void testUnmappedChildAggNoDiversity() throws Exception { - SamplerAggregatorBuilder sampleAgg = sampler("sample").shardSize(100); + SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped") .setSearchType(SearchType.QUERY_AND_FETCH) @@ -157,7 +157,7 @@ public class SamplerIT extends ESIntegTestCase { } public void testPartiallyUnmappedChildAggNoDiversity() throws Exception { - SamplerAggregatorBuilder sampleAgg = sampler("sample").shardSize(100); + SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped", "test") .setSearchType(SearchType.QUERY_AND_FETCH) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java index 8d792fd72ff..e4de490f6b2 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerTests.java @@ -20,13 +20,13 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; -public class SamplerTests extends BaseAggregationTestCase { +public class SamplerTests extends BaseAggregationTestCase { @Override - protected final SamplerAggregatorBuilder createTestAggregatorBuilder() { - SamplerAggregatorBuilder factory = new SamplerAggregatorBuilder("foo"); + protected final SamplerAggregationBuilder createTestAggregatorBuilder() { + SamplerAggregationBuilder factory = new SamplerAggregationBuilder("foo"); if (randomBoolean()) { factory.shardSize(randomIntBetween(1, 1000)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java index 373eb0e6e96..897125ee2fa 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java @@ -24,7 +24,7 @@ import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; @@ -37,7 +37,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude import java.util.SortedSet; import java.util.TreeSet; -public class SignificantTermsTests extends BaseAggregationTestCase { +public class SignificantTermsTests extends BaseAggregationTestCase { private static final String[] executionHints; @@ -50,9 +50,9 @@ public class SignificantTermsTests extends BaseAggregationTestCase { +public class TermsTests extends BaseAggregationTestCase { private static final String[] executionHints; @@ -46,9 +46,9 @@ public class TermsTests extends BaseAggregationTestCase } @Override - protected TermsAggregatorBuilder createTestAggregatorBuilder() { + protected TermsAggregationBuilder createTestAggregatorBuilder() { String name = randomAsciiOfLengthBetween(3, 20); - TermsAggregatorBuilder factory = new TermsAggregatorBuilder(name, null); + TermsAggregationBuilder factory = new TermsAggregationBuilder(name, null); String field = randomAsciiOfLengthBetween(3, 20); int randomFieldBranch = randomInt(2); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index dbe10e2ff9d..272aa70d48b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -123,7 +123,7 @@ public class NestedAggregatorTests extends ESSingleNodeTestCase { AggregationContext context = new AggregationContext(searchContext); AggregatorFactories.Builder builder = AggregatorFactories.builder(); - NestedAggregatorBuilder factory = new NestedAggregatorBuilder("test", "nested_field"); + NestedAggregationBuilder factory = new NestedAggregationBuilder("test", "nested_field"); builder.addAggregator(factory); AggregatorFactories factories = builder.build(context, null); searchContext.aggregations(new SearchContextAggregations(factories)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java index 6ea5b3791d8..29dde100a08 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java @@ -21,11 +21,11 @@ package org.elasticsearch.search.aggregations.bucket.nested; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -public class NestedTests extends BaseAggregationTestCase { +public class NestedTests extends BaseAggregationTestCase { @Override - protected NestedAggregatorBuilder createTestAggregatorBuilder() { - return new NestedAggregatorBuilder(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(3, 40)); + protected NestedAggregationBuilder createTestAggregatorBuilder() { + return new NestedAggregationBuilder(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(3, 40)); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedTests.java index 1a45c550bc1..97dbf3718af 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedTests.java @@ -21,11 +21,11 @@ package org.elasticsearch.search.aggregations.bucket.nested; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -public class ReverseNestedTests extends BaseAggregationTestCase { +public class ReverseNestedTests extends BaseAggregationTestCase { @Override - protected ReverseNestedAggregatorBuilder createTestAggregatorBuilder() { - ReverseNestedAggregatorBuilder factory = new ReverseNestedAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected ReverseNestedAggregationBuilder createTestAggregatorBuilder() { + ReverseNestedAggregationBuilder factory = new ReverseNestedAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); if (randomBoolean()) { factory.path(randomAsciiOfLengthBetween(3, 40)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index df449aeeaf2..8dc015b30ed 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -257,7 +257,7 @@ public class SignificanceHeuristicTests extends ESTestCase { protected SignificanceHeuristic parseFromBuilder(ParseFieldRegistry significanceHeuristicParserRegistry, SearchContext searchContext, SignificanceHeuristic significanceHeuristic) throws IOException { - SignificantTermsAggregatorBuilder stBuilder = significantTerms("testagg"); + SignificantTermsAggregationBuilder stBuilder = significantTerms("testagg"); stBuilder.significanceHeuristic(significanceHeuristic).field("text").minDocCount(200); XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder(); stBuilder.internalXContent(stXContentBuilder, null); @@ -271,7 +271,7 @@ public class SignificanceHeuristicTests extends ESTestCase { IndicesQueriesRegistry registry = new IndicesQueriesRegistry(); QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT); stParser.nextToken(); - SignificantTermsAggregatorBuilder aggregatorFactory = (SignificantTermsAggregatorBuilder) new SignificantTermsParser( + SignificantTermsAggregationBuilder aggregatorFactory = (SignificantTermsAggregationBuilder) new SignificantTermsParser( significanceHeuristicParserRegistry, registry).parse("testagg", parseContext); stParser.nextToken(); assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java index 58d7fa70d62..f1ccf344a7c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractNumericMetricTestCase.java @@ -22,9 +22,9 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; -public abstract class AbstractNumericMetricTestCase> +public abstract class AbstractNumericMetricTestCase> extends BaseAggregationTestCase { @Override diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java index 61e685169f6..df90dc4f7c3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -public class AvgTests extends AbstractNumericMetricTestCase { +public class AvgTests extends AbstractNumericMetricTestCase { @Override - protected AvgAggregatorBuilder doCreateTestAggregatorFactory() { - return new AvgAggregatorBuilder("foo"); + protected AvgAggregationBuilder doCreateTestAggregatorFactory() { + return new AvgAggregationBuilder("foo"); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java index 4a7ca7e8b38..3f78cc17aa9 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; -public class ExtendedStatsTests extends AbstractNumericMetricTestCase { +public class ExtendedStatsTests extends AbstractNumericMetricTestCase { @Override - protected ExtendedStatsAggregatorBuilder doCreateTestAggregatorFactory() { - ExtendedStatsAggregatorBuilder factory = new ExtendedStatsAggregatorBuilder("foo"); + protected ExtendedStatsAggregationBuilder doCreateTestAggregatorFactory() { + ExtendedStatsAggregationBuilder factory = new ExtendedStatsAggregationBuilder("foo"); if (randomBoolean()) { factory.sigma(randomDoubleBetween(0.0, 10.0, true)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java index 8a6a4373691..1b563d531a8 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; -public class FilterTests extends BaseAggregationTestCase { +public class FilterTests extends BaseAggregationTestCase { @Override - protected FilterAggregatorBuilder createTestAggregatorBuilder() { - FilterAggregatorBuilder factory = new FilterAggregatorBuilder(randomAsciiOfLengthBetween(1, 20), + protected FilterAggregationBuilder createTestAggregatorBuilder() { + FilterAggregationBuilder factory = new FilterAggregationBuilder(randomAsciiOfLengthBetween(1, 20), QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20))); // NORELEASE make RandomQueryBuilder work outside of the // AbstractQueryTestCase diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java index cd2dae53327..89fc38b7cd8 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java @@ -24,15 +24,15 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; -import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder; -public class FiltersTests extends BaseAggregationTestCase { +public class FiltersTests extends BaseAggregationTestCase { @Override - protected FiltersAggregatorBuilder createTestAggregatorBuilder() { + protected FiltersAggregationBuilder createTestAggregatorBuilder() { int size = randomIntBetween(1, 20); - FiltersAggregatorBuilder factory; + FiltersAggregationBuilder factory; if (randomBoolean()) { KeyedFilter[] filters = new KeyedFilter[size]; int i = 0; @@ -40,13 +40,13 @@ public class FiltersTests extends BaseAggregationTestCase { +public class GeoBoundsTests extends BaseAggregationTestCase { @Override - protected GeoBoundsAggregatorBuilder createTestAggregatorBuilder() { - GeoBoundsAggregatorBuilder factory = new GeoBoundsAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected GeoBoundsAggregationBuilder createTestAggregatorBuilder() { + GeoBoundsAggregationBuilder factory = new GeoBoundsAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); String field = randomAsciiOfLengthBetween(3, 20); factory.field(field); if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java index c912c18a82e..1ea21a1ff1d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; -public class GeoCentroidTests extends BaseAggregationTestCase { +public class GeoCentroidTests extends BaseAggregationTestCase { @Override - protected GeoCentroidAggregatorBuilder createTestAggregatorBuilder() { - GeoCentroidAggregatorBuilder factory = new GeoCentroidAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected GeoCentroidAggregationBuilder createTestAggregatorBuilder() { + GeoCentroidAggregationBuilder factory = new GeoCentroidAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java index a9fe4654c9d..6ffd824aa3c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -public class MaxTests extends AbstractNumericMetricTestCase { +public class MaxTests extends AbstractNumericMetricTestCase { @Override - protected MaxAggregatorBuilder doCreateTestAggregatorFactory() { - return new MaxAggregatorBuilder("foo"); + protected MaxAggregationBuilder doCreateTestAggregatorFactory() { + return new MaxAggregationBuilder("foo"); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java index 54512f579f3..eed4059ade7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -public class MinTests extends AbstractNumericMetricTestCase { +public class MinTests extends AbstractNumericMetricTestCase { @Override - protected MinAggregatorBuilder doCreateTestAggregatorFactory() { - return new MinAggregatorBuilder("foo"); + protected MinAggregationBuilder doCreateTestAggregatorFactory() { + return new MinAggregationBuilder("foo"); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java index 3f49da5eb6e..979747ade2e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MissingTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; -public class MissingTests extends BaseAggregationTestCase { +public class MissingTests extends BaseAggregationTestCase { @Override - protected final MissingAggregatorBuilder createTestAggregatorBuilder() { - MissingAggregatorBuilder factory = new MissingAggregatorBuilder("foo", null); + protected final MissingAggregationBuilder createTestAggregatorBuilder() { + MissingAggregationBuilder factory = new MissingAggregationBuilder("foo", null); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java index 4636e4ed174..1907733fbd8 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; -public class PercentileRanksTests extends BaseAggregationTestCase { +public class PercentileRanksTests extends BaseAggregationTestCase { @Override - protected PercentileRanksAggregatorBuilder createTestAggregatorBuilder() { - PercentileRanksAggregatorBuilder factory = new PercentileRanksAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected PercentileRanksAggregationBuilder createTestAggregatorBuilder() { + PercentileRanksAggregationBuilder factory = new PercentileRanksAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); if (randomBoolean()) { factory.keyed(randomBoolean()); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java index 674197cffbf..b5539f8c1be 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; -public class PercentilesTests extends BaseAggregationTestCase { +public class PercentilesTests extends BaseAggregationTestCase { @Override - protected PercentilesAggregatorBuilder createTestAggregatorBuilder() { - PercentilesAggregatorBuilder factory = new PercentilesAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected PercentilesAggregationBuilder createTestAggregatorBuilder() { + PercentilesAggregationBuilder factory = new PercentilesAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); if (randomBoolean()) { factory.keyed(randomBoolean()); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java index a4e12b56d8e..e4f96fae762 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java @@ -22,16 +22,16 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; import java.util.HashMap; import java.util.Map; -public class ScriptedMetricTests extends BaseAggregationTestCase { +public class ScriptedMetricTests extends BaseAggregationTestCase { @Override - protected ScriptedMetricAggregatorBuilder createTestAggregatorBuilder() { - ScriptedMetricAggregatorBuilder factory = new ScriptedMetricAggregatorBuilder(randomAsciiOfLengthBetween(1, 20)); + protected ScriptedMetricAggregationBuilder createTestAggregatorBuilder() { + ScriptedMetricAggregationBuilder factory = new ScriptedMetricAggregationBuilder(randomAsciiOfLengthBetween(1, 20)); if (randomBoolean()) { factory.initScript(randomScript("initScript")); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java index 5db4e1e332b..76a8e9aa98a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; -public class StatsTests extends AbstractNumericMetricTestCase { +public class StatsTests extends AbstractNumericMetricTestCase { @Override - protected StatsAggregatorBuilder doCreateTestAggregatorFactory() { - return new StatsAggregatorBuilder("foo"); + protected StatsAggregationBuilder doCreateTestAggregatorFactory() { + return new StatsAggregationBuilder("foo"); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java index a6d9f0bd270..edc6d4edef0 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -public class SumTests extends AbstractNumericMetricTestCase { +public class SumTests extends AbstractNumericMetricTestCase { @Override - protected SumAggregatorBuilder doCreateTestAggregatorFactory() { - return new SumAggregatorBuilder("foo"); + protected SumAggregationBuilder doCreateTestAggregatorFactory() { + return new SumAggregationBuilder("foo"); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java index 00bb04dde9f..c79ab04e492 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationInitializationException; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilderTests; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; @@ -38,11 +38,11 @@ import java.util.List; import static org.hamcrest.Matchers.containsString; -public class TopHitsTests extends BaseAggregationTestCase { +public class TopHitsTests extends BaseAggregationTestCase { @Override - protected final TopHitsAggregatorBuilder createTestAggregatorBuilder() { - TopHitsAggregatorBuilder factory = new TopHitsAggregatorBuilder("foo"); + protected final TopHitsAggregationBuilder createTestAggregatorBuilder() { + TopHitsAggregationBuilder factory = new TopHitsAggregationBuilder("foo"); if (randomBoolean()) { factory.from(randomIntBetween(0, 10000)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java index c9b601c4e8b..99d4d41839c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; -public class ValueCountTests extends BaseAggregationTestCase { +public class ValueCountTests extends BaseAggregationTestCase { @Override - protected final ValueCountAggregatorBuilder createTestAggregatorBuilder() { - ValueCountAggregatorBuilder factory = new ValueCountAggregatorBuilder("foo", null); + protected final ValueCountAggregationBuilder createTestAggregatorBuilder() { + ValueCountAggregationBuilder factory = new ValueCountAggregationBuilder("foo", null); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java index a769a71b8e5..ab0377c6331 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java @@ -22,11 +22,11 @@ package org.elasticsearch.search.aggregations.metrics.cardinality; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -public class CardinalityTests extends BaseAggregationTestCase { +public class CardinalityTests extends BaseAggregationTestCase { @Override - protected final CardinalityAggregatorBuilder createTestAggregatorBuilder() { - CardinalityAggregatorBuilder factory = new CardinalityAggregatorBuilder("foo", null); + protected final CardinalityAggregationBuilder createTestAggregatorBuilder() { + CardinalityAggregationBuilder factory = new CardinalityAggregationBuilder("foo", null); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java index 43b3b4d357c..ce9394692de 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java @@ -20,11 +20,11 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -109,27 +109,27 @@ public class PipelineAggregationHelperTests extends ESTestCase { * @param values Array of values to compute metric for * @param metric A metric builder which defines what kind of metric should be returned for the values */ - public static double calculateMetric(double[] values, ValuesSourceAggregatorBuilder metric) { + public static double calculateMetric(double[] values, ValuesSourceAggregationBuilder metric) { - if (metric instanceof MinAggregatorBuilder) { + if (metric instanceof MinAggregationBuilder) { double accumulator = Double.POSITIVE_INFINITY; for (double value : values) { accumulator = Math.min(accumulator, value); } return accumulator; - } else if (metric instanceof MaxAggregatorBuilder) { + } else if (metric instanceof MaxAggregationBuilder) { double accumulator = Double.NEGATIVE_INFINITY; for (double value : values) { accumulator = Math.max(accumulator, value); } return accumulator; - } else if (metric instanceof SumAggregatorBuilder) { + } else if (metric instanceof SumAggregationBuilder) { double accumulator = 0; for (double value : values) { accumulator += value; } return accumulator; - } else if (metric instanceof AvgAggregatorBuilder) { + } else if (metric instanceof AvgAggregationBuilder) { double accumulator = 0; for (double value : values) { accumulator += value; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 94ac6fc10ab..c16d8e8062e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -39,7 +39,7 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; @@ -79,7 +79,7 @@ public class MovAvgIT extends ESIntegTestCase { static int period; static HoltWintersModel.SeasonalityType seasonalityType; static BucketHelpers.GapPolicy gapPolicy; - static ValuesSourceAggregatorBuilder> metric; + static ValuesSourceAggregationBuilder> metric; static List mockHisto; static Map> testValues; @@ -1289,8 +1289,8 @@ public class MovAvgIT extends ESIntegTestCase { } } - private ValuesSourceAggregatorBuilder> randomMetric(String name, - String field) { + private ValuesSourceAggregationBuilder> randomMetric(String name, + String field) { int rand = randomIntBetween(0,3); switch (rand) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java index 66961c2fcbc..1b263d1af09 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregationHelperTests; import org.elasticsearch.search.aggregations.pipeline.SimpleValue; import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; @@ -61,7 +61,7 @@ public class SerialDiffIT extends ESIntegTestCase { static int numBuckets; static int lag; static BucketHelpers.GapPolicy gapPolicy; - static ValuesSourceAggregatorBuilder> metric; + static ValuesSourceAggregationBuilder> metric; static List mockHisto; static Map> testValues; @@ -81,7 +81,7 @@ public class SerialDiffIT extends ESIntegTestCase { } } - private ValuesSourceAggregatorBuilder> randomMetric(String name, String field) { + private ValuesSourceAggregationBuilder> randomMetric(String name, String field) { int rand = randomIntBetween(0,3); switch (rand) { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java index 6f83746d4ce..deed43abf12 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.elasticsearch.search.aggregations.metrics.sum.Sum; @@ -122,7 +122,7 @@ public class EquivalenceTests extends ESIntegTestCase { } } - RangeAggregatorBuilder query = range("range").field("values"); + RangeAggregationBuilder query = range("range").field("values"); for (int i = 0; i < ranges.length; ++i) { String key = Integer.toString(i); if (ranges[i][0] == Double.NEGATIVE_INFINITY) { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java index 1c17c1966e5..662d4d2f30c 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.DateTime; @@ -113,17 +113,17 @@ public class MinDocCountTests extends AbstractTermsTestCase { private enum Script { NO { @Override - TermsAggregatorBuilder apply(TermsAggregatorBuilder builder, String field) { + TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field) { return builder.field(field); } }, YES { @Override - TermsAggregatorBuilder apply(TermsAggregatorBuilder builder, String field) { + TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field) { return builder.script(new org.elasticsearch.script.Script("doc['" + field + "'].values")); } }; - abstract TermsAggregatorBuilder apply(TermsAggregatorBuilder builder, String field); + abstract TermsAggregationBuilder apply(TermsAggregationBuilder builder, String field); } // check that terms2 is a subset of terms1 diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java index 2e59b798297..7e76b3f03eb 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; import java.util.Arrays; @@ -88,7 +88,7 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { return percents; } - private static PercentileRanksAggregatorBuilder randomCompression(PercentileRanksAggregatorBuilder builder) { + private static PercentileRanksAggregationBuilder randomCompression(PercentileRanksAggregationBuilder builder) { if (randomBoolean()) { builder.compression(randomIntBetween(20, 120) + randomDouble()); } @@ -462,4 +462,4 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { } } -} \ No newline at end of file +} diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java index 69d3c281ca8..712c9ebd951 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; import java.util.Arrays; @@ -87,7 +87,7 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { return percentiles; } - private static PercentilesAggregatorBuilder randomCompression(PercentilesAggregatorBuilder builder) { + private static PercentilesAggregationBuilder randomCompression(PercentilesAggregationBuilder builder) { if (randomBoolean()) { builder.compression(randomIntBetween(20, 120) + randomDouble()); } @@ -446,4 +446,4 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { } } -} \ No newline at end of file +} From 3cf4214255e872612d9f99e1900ccadd55437e87 Mon Sep 17 00:00:00 2001 From: Jeff Evans Date: Thu, 19 May 2016 08:47:07 -0500 Subject: [PATCH 27/36] Add better error message when analyzer created without tokenizer or analyzer type (#18455) Closes #15492 --- .../index/analysis/AnalysisRegistry.java | 4 ++-- .../index/analysis/AnalysisServiceTests.java | 16 ++++++++++++++++ 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index c33eef4ac61..2d73df76f07 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -321,7 +321,7 @@ public final class AnalysisRegistry implements Closeable { if (currentSettings.get("tokenizer") != null) { factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings); } else { - throw new IllegalArgumentException(toBuild + " [" + name + "] must have a type associated with it"); + throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer"); } } else if (typeName.equals("custom")) { factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings); @@ -335,7 +335,7 @@ public final class AnalysisRegistry implements Closeable { factories.put(name, factory); } else { if (typeName == null) { - throw new IllegalArgumentException(toBuild + " [" + name + "] must have a type associated with it"); + throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer"); } AnalysisModule.AnalysisProvider type = providerMap.get(typeName); if (type == null) { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java index b667c256019..b72996bd1a1 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java @@ -41,6 +41,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class AnalysisServiceTests extends ESTestCase { @@ -183,4 +184,19 @@ public class AnalysisServiceTests extends ESTestCase { assertSame(analysisService.analyzer(preBuiltAnalyzers.name()), otherAnalysisSergice.analyzer(preBuiltAnalyzers.name())); } } + + public void testNoTypeOrTokenizerErrorMessage() throws IOException { + Version version = VersionUtils.randomVersion(random()); + Settings settings = Settings + .builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, version) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .putArray("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"}) + .putArray("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"}) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new AnalysisRegistry(null, new Environment(settings)).build(idxSettings)); + assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer")); + } } From a01ecb20ea3edaee2db670763a3bb58d79ab5f03 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 13 May 2016 15:27:02 +0200 Subject: [PATCH 28/36] Port Delete By Query to Reindex infrastructure closes #16883 --- docs/reference/docs.asciidoc | 2 + docs/reference/docs/delete-by-query.asciidoc | 318 ++++++++++ docs/reference/docs/update-by-query.asciidoc | 4 +- .../AbstractAsyncBulkByScrollAction.java | 16 +- .../AbstractAsyncBulkIndexByScrollAction.java | 542 +++++++++++++----- .../AbstractBaseReindexRestHandler.java | 111 ++-- .../AbstractBulkByQueryRestHandler.java | 118 ++++ .../index/reindex/DeleteByQueryAction.java | 43 ++ .../index/reindex/DeleteByQueryRequest.java | 79 +++ .../reindex/DeleteByQueryRequestBuilder.java | 51 ++ .../index/reindex/ReindexPlugin.java | 2 + .../reindex/RestDeleteByQueryAction.java | 77 +++ .../index/reindex/RestReindexAction.java | 46 +- .../index/reindex/RestRethrottleAction.java | 1 + .../reindex/RestUpdateByQueryAction.java | 75 +-- .../reindex/TransportDeleteByQueryAction.java | 109 ++++ .../index/reindex/TransportReindexAction.java | 188 +++--- .../reindex/TransportUpdateByQueryAction.java | 99 ++-- ...BulkIndexByScrollActionScriptTestCase.java | 30 +- ...tAsyncBulkIndexByScrollActionTestCase.java | 2 - ...lkIndexbyScrollActionMetadataTestCase.java | 6 +- .../reindex/AsyncBulkByScrollActionTests.java | 2 +- .../BulkIndexByScrollResponseMatcher.java | 13 + .../reindex/DeleteByQueryBasicTests.java | 211 +++++++ .../reindex/DeleteByQueryCancelTests.java | 184 ++++++ .../reindex/DeleteByQueryConcurrentTests.java | 119 ++++ .../index/reindex/ReindexMetadataTests.java | 12 +- .../index/reindex/ReindexScriptTests.java | 11 +- .../index/reindex/ReindexTestCase.java | 4 + .../reindex/UpdateByQueryMetadataTests.java | 5 +- .../reindex/UpdateByQueryWithScriptTests.java | 8 +- .../test/delete_by_query/10_basic.yaml | 304 ++++++++++ .../test/delete_by_query/20_validation.yaml | 99 ++++ .../test/delete_by_query/30_by_type.yaml | 72 +++ .../test/delete_by_query/50_consistency.yaml | 62 ++ .../test/delete_by_query/70_throttle.yaml | 202 +++++++ .../rest-api-spec/test/reindex/10_basic.yaml | 3 + .../test/reindex/20_validation.yaml | 2 +- .../test/update_by_query/10_basic.yaml | 2 + .../test/update_by_query/10_script.yaml | 4 +- .../rest-api-spec/api/delete_by_query.json | 207 +++++++ .../rest-api-spec/api/reindex.rethrottle.json | 2 +- 42 files changed, 3007 insertions(+), 440 deletions(-) create mode 100644 docs/reference/docs/delete-by-query.asciidoc create mode 100644 modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java create mode 100644 modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java create mode 100644 modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java create mode 100644 modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java create mode 100644 modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java create mode 100644 modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java create mode 100644 modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryBasicTests.java create mode 100644 modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryCancelTests.java create mode 100644 modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryConcurrentTests.java create mode 100644 modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml create mode 100644 modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml create mode 100644 modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/30_by_type.yaml create mode 100644 modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml create mode 100644 modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json diff --git a/docs/reference/docs.asciidoc b/docs/reference/docs.asciidoc index 465d2e60c77..f3b30e7f0c3 100644 --- a/docs/reference/docs.asciidoc +++ b/docs/reference/docs.asciidoc @@ -27,6 +27,8 @@ include::docs/get.asciidoc[] include::docs/delete.asciidoc[] +include::docs/delete-by-query.asciidoc[] + include::docs/update.asciidoc[] include::docs/update-by-query.asciidoc[] diff --git a/docs/reference/docs/delete-by-query.asciidoc b/docs/reference/docs/delete-by-query.asciidoc new file mode 100644 index 00000000000..1562d8c515d --- /dev/null +++ b/docs/reference/docs/delete-by-query.asciidoc @@ -0,0 +1,318 @@ +[[docs-delete-by-query]] +== Delete By Query API + +experimental[The delete-by-query API is new and should still be considered experimental. The API may change in ways that are not backwards compatible] + +The simplest usage of `_delete_by_query` just performs a deletion on every +document that match a query. Here is the API: + +[source,js] +-------------------------------------------------- +POST twitter/_delete_by_query +{ + "query": { <1> + "match": { + "message": "some message" + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:big_twitter] + +<1> The query must be passed as a value to the `query` key, in the same +way as the <>. You can also use the `q` +parameter in the same way as the search api. + +That will return something like this: + +[source,js] +-------------------------------------------------- +{ + "took" : 147, + "timed_out": false, + "deleted": 119, + "batches": 1, + "version_conflicts": 0, + "noops": 0, + "retries": { + "bulk": 0, + "search": 0 + }, + "throttled_millis": 0, + "requests_per_second": "unlimited", + "throttled_until_millis": 0, + "total": 119, + "failures" : [ ] +} +-------------------------------------------------- +// TESTRESPONSE[s/"took" : 147/"took" : "$body.took"/] + +`_delete_by_query` gets a snapshot of the index when it starts and deletes what +it finds using `internal` versioning. That means that you'll get a version +conflict if the document changes between the time when the snapshot was taken +and when the delete request is processed. When the versions match the document +is deleted. + +During the `_delete_by_query` execution, multiple search requests are sequentially +executed in order to find all the matching documents to delete. Every time a batch +of documents is found, a corresponding bulk request is executed to delete all +these documents. In case a search or bulk request got rejected, `_delete_by_query` + relies on a default policy to retry rejected requests (up to 10 times, with + exponential back off). Reaching the maximum retries limit causes the `_delete_by_query` + to abort and all failures are returned in the `failures` of the response. + The deletions that have been performed still stick. In other words, the process + is not rolled back, only aborted. While the first failure causes the abort all + failures that are returned by the failing bulk request are returned in the `failures` + element so it's possible for there to be quite a few. + +If you'd like to count version conflicts rather than cause them to abort then +set `conflicts=proceed` on the url or `"conflicts": "proceed"` in the request body. + +Back to the API format, you can limit `_delete_by_query` to a single type. This +will only delete `tweet` documents from the `twitter` index: + +[source,js] +-------------------------------------------------- +POST twitter/tweet/_delete_by_query?conflicts=proceed +{ + "query": { + "match_all": {} + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + +It's also possible to delete documents of multiple indexes and multiple +types at once, just like the search API: + +[source,js] +-------------------------------------------------- +POST twitter,blog/tweet,post/_delete_by_query +{ + "query": { + "match_all": {} + } +} +-------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT twitter\nPUT blog\nGET _cluster\/health?wait_for_status=yellow\n/] + +If you provide `routing` then the routing is copied to the scroll query, +limiting the process to the shards that match that routing value: + +[source,js] +-------------------------------------------------- +POST twitter/_delete_by_query?routing=1 +{ + "query": { + "range" : { + "age" : { + "gte" : 10 + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + +By default `_delete_by_query` uses scroll batches of 1000. You can change the +batch size with the `scroll_size` URL parameter: + +[source,js] +-------------------------------------------------- +POST twitter/_delete_by_query?scroll_size=5000 +{ + "query": { + "term": { + "user": "kimchy" + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + + +[float] +=== URL Parameters + +In addition to the standard parameters like `pretty`, the Delete By Query API +also supports `refresh`, `wait_for_completion`, `consistency`, and `timeout`. + +Sending the `refresh` will refresh all shards involved in the delete by query +once the request completes. This is different than the Delete API's `refresh` +parameter which causes just the shard that received the delete request +to be refreshed. + +If the request contains `wait_for_completion=false` then Elasticsearch will +perform some preflight checks, launch the request, and then return a `task` +which can be used with <> to cancel +or get the status of the task. For now, once the request is finished the task +is gone and the only place to look for the ultimate result of the task is in +the Elasticsearch log file. This will be fixed soon. + +`consistency` controls how many copies of a shard must respond to each write +request. `timeout` controls how long each write request waits for unavailable +shards to become available. Both work exactly how they work in the +<>. + +`requests_per_second` can be set to any decimal number (`1.4`, `6`, `1000`, etc) +and throttles the number of requests per second that the delete by query issues. +The throttling is done waiting between bulk batches so that it can manipulate +the scroll timeout. The wait time is the difference between the time it took the +batch to complete and the time `requests_per_second * requests_in_the_batch`. +Since the batch isn't broken into multiple bulk requests large batch sizes will +cause Elasticsearch to create many requests and then wait for a while before +starting the next set. This is "bursty" instead of "smooth". The default is +`unlimited` which is also the only non-number value that it accepts. + +[float] +=== Response body + +The JSON response looks like this: + +[source,js] +-------------------------------------------------- +{ + "took" : 639, + "deleted": 0, + "batches": 1, + "version_conflicts": 2, + "retries": 0, + "throttled_millis": 0, + "failures" : [ ] +} +-------------------------------------------------- + +`took`:: + +The number of milliseconds from start to end of the whole operation. + +`deleted`:: + +The number of documents that were successfully deleted. + +`batches`:: + +The number of scroll responses pulled back by the the delete by query. + +`version_conflicts`:: + +The number of version conflicts that the delete by query hit. + +`retries`:: + +The number of retries that the delete by query did in response to a full queue. + +`throttled_millis`:: + +Number of milliseconds the request slept to conform to `requests_per_second`. + +`failures`:: + +Array of all indexing failures. If this is non-empty then the request aborted +because of those failures. See `conflicts` for how to prevent version conflicts +from aborting the operation. + + +[float] +[[docs-delete-by-query-task-api]] +=== Works with the Task API + +While Delete By Query is running you can fetch their status using the +<>: + +[source,js] +-------------------------------------------------- +GET _tasks?detailed=true&action=*/delete/byquery +-------------------------------------------------- +// CONSOLE + +The responses looks like: + +[source,js] +-------------------------------------------------- +{ + "nodes" : { + "r1A2WoRbTwKZ516z6NEs5A" : { + "name" : "Tyrannus", + "transport_address" : "127.0.0.1:9300", + "host" : "127.0.0.1", + "ip" : "127.0.0.1:9300", + "attributes" : { + "testattr" : "test", + "portsfile" : "true" + }, + "tasks" : { + "r1A2WoRbTwKZ516z6NEs5A:36619" : { + "node" : "r1A2WoRbTwKZ516z6NEs5A", + "id" : 36619, + "type" : "transport", + "action" : "indices:data/write/delete/byquery", + "status" : { <1> + "total" : 6154, + "updated" : 0, + "created" : 0, + "deleted" : 3500, + "batches" : 36, + "version_conflicts" : 0, + "noops" : 0, + "retries": 0, + "throttled_millis": 0 + }, + "description" : "" + } + } + } + } +} +-------------------------------------------------- + +<1> this object contains the actual status. It is just like the response json +with the important addition of the `total` field. `total` is the total number +of operations that the reindex expects to perform. You can estimate the +progress by adding the `updated`, `created`, and `deleted` fields. The request +will finish when their sum is equal to the `total` field. + + +[float] +[[docs-delete-by-query-cancel-task-api]] +=== Works with the Cancel Task API + +Any Delete By Query can be canceled using the <>: + +[source,js] +-------------------------------------------------- +POST _tasks/taskid:1/_cancel +-------------------------------------------------- +// CONSOLE + +The `task_id` can be found using the tasks API above. + +Cancelation should happen quickly but might take a few seconds. The task status +API above will continue to list the task until it is wakes to cancel itself. + + +[float] +[[docs-delete-by-query-rethrottle]] +=== Rethrottling + +The value of `requests_per_second` can be changed on a running delete by query +using the `_rethrottle` API: + +[source,js] +-------------------------------------------------- +POST _delete_by_query/taskid:1/_rethrottle?requests_per_second=unlimited +-------------------------------------------------- +// CONSOLE + +The `task_id` can be found using the tasks API above. + +Just like when setting it on the `_delete_by_query` API `requests_per_second` +can be either `unlimited` to disable throttling or any decimal number like `1.7` +or `12` to throttle to that level. Rethrottling that speeds up the query takes +effect immediately but rethrotting that slows down the query will take effect +on after completing the current batch. This prevents scroll timeouts. diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index e5a62ddf33a..ac4da4251be 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -61,7 +61,7 @@ and the time when it attempted to update the document. This is fine because that update will have picked up the online mapping update. Back to the API format, you can limit `_update_by_query` to a single type. This -will only update `tweet`s from the `twitter` index: +will only update `tweet` documents from the `twitter` index: [source,js] -------------------------------------------------- @@ -119,7 +119,7 @@ Just as in <> you can set `ctx.op = "noop"` if your script decides that it doesn't have to make any changes. That will cause `_update_by_query` to omit that document from its updates. Setting `ctx.op` to anything else is an error. If you want to delete by a query you can use the -{plugins}/plugins-delete-by-query.html[Delete by Query plugin] instead. Setting any +<> instead. Setting any other field in `ctx` is an error. Note that we stopped specifying `conflicts=proceed`. In this case we want a diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index f8561d17acd..ee2f5484737 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -75,7 +75,7 @@ import static org.elasticsearch.search.sort.SortBuilders.fieldSort; * Abstract base for scrolling across a search and executing bulk actions on all results. All package private methods are package private so * their tests can use them. Most methods run in the listener thread pool because the are meant to be fast and don't expect to block. */ -public abstract class AbstractAsyncBulkByScrollAction, Response> { +public abstract class AbstractAsyncBulkByScrollAction> { /** * The request for this action. Named mainRequest because we create lots of request variables all representing child * requests of this mainRequest. @@ -92,12 +92,13 @@ public abstract class AbstractAsyncBulkByScrollAction listener; + private final ActionListener listener; private final BackoffPolicy backoffPolicy; private final Retry bulkRetry; public AbstractAsyncBulkByScrollAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, ActionListener listener) { + ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, + ActionListener listener) { this.task = task; this.logger = logger; this.client = client; @@ -111,8 +112,13 @@ public abstract class AbstractAsyncBulkByScrollAction docs); - protected abstract Response buildResponse(TimeValue took, List indexingFailures, List searchFailures, - boolean timedOut); + /** + * Build the response for reindex actions. + */ + protected BulkIndexByScrollResponse buildResponse(TimeValue took, List indexingFailures, + List searchFailures, boolean timedOut) { + return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); + } /** * Start the action by firing the initial search request. diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java index df9245346b3..0e3f3678851 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java @@ -20,15 +20,16 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.internal.IdFieldMapper; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; @@ -40,6 +41,7 @@ import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; @@ -47,9 +49,9 @@ import org.elasticsearch.search.SearchHitField; import org.elasticsearch.threadpool.ThreadPool; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.BiFunction; import static java.util.Collections.emptyMap; @@ -57,91 +59,106 @@ import static java.util.Collections.emptyMap; * Abstract base for scrolling across a search and executing bulk indexes on all * results. */ -public abstract class AbstractAsyncBulkIndexByScrollAction> - extends AbstractAsyncBulkByScrollAction { +public abstract class AbstractAsyncBulkIndexByScrollAction> + extends AbstractAsyncBulkByScrollAction { - private final ScriptService scriptService; - private final CompiledScript script; + protected final ScriptService scriptService; + protected final ClusterState clusterState; - public AbstractAsyncBulkIndexByScrollAction(BulkByScrollTask task, ESLogger logger, ScriptService scriptService, ClusterState state, - ParentTaskAssigningClient client, ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, - ActionListener listener) { + /** + * This BiFunction is used to apply various changes depending of the Reindex action and the search hit, + * from copying search hit metadata (parent, routing, etc) to potentially transforming the + * {@link RequestWrapper} completely. + */ + private final BiFunction, SearchHit, RequestWrapper> scriptApplier; + + public AbstractAsyncBulkIndexByScrollAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, + ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, + ActionListener listener, + ScriptService scriptService, ClusterState clusterState) { super(task, logger, client, threadPool, mainRequest, firstSearchRequest, listener); this.scriptService = scriptService; - if (mainRequest.getScript() == null) { - script = null; - } else { - script = scriptService.compile(mainRequest.getScript(), ScriptContext.Standard.UPDATE, emptyMap(), state); - } - } - - @Override - protected BulkIndexByScrollResponse buildResponse(TimeValue took, List indexingFailures, - List searchFailures, boolean timedOut) { - return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); + this.clusterState = clusterState; + this.scriptApplier = Objects.requireNonNull(buildScriptApplier(), "script applier must not be null"); } /** - * Build the IndexRequest for a single search hit. This shouldn't handle - * metadata or the script. That will be handled by copyMetadata and - * applyScript functions that can be overridden. + * Build the {@link BiFunction} to apply to all {@link RequestWrapper}. */ - protected abstract IndexRequest buildIndexRequest(SearchHit doc); + protected BiFunction, SearchHit, RequestWrapper> buildScriptApplier() { + // The default script applier executes a no-op + return (request, searchHit) -> request; + } @Override protected BulkRequest buildBulk(Iterable docs) { BulkRequest bulkRequest = new BulkRequest(); - ExecutableScript executableScript = null; - Map scriptCtx = null; - for (SearchHit doc : docs) { - if (doc.hasSource()) { - /* - * Either the document didn't store _source or we didn't fetch it for some reason. Since we don't allow the user to - * change the "fields" part of the search request it is unlikely that we got here because we didn't fetch _source. - * Thus the error message assumes that it wasn't stored. - */ - throw new IllegalArgumentException("[" + doc.index() + "][" + doc.type() + "][" + doc.id() + "] didn't store _source"); - } - IndexRequest index = buildIndexRequest(doc); - copyMetadata(index, doc); - if (script != null) { - if (executableScript == null) { - executableScript = scriptService.executable(script, mainRequest.getScript().getParams()); - scriptCtx = new HashMap<>(); - } - if (false == applyScript(index, doc, executableScript, scriptCtx)) { - continue; + if (accept(doc)) { + RequestWrapper request = scriptApplier.apply(copyMetadata(buildRequest(doc), doc), doc); + if (request != null) { + bulkRequest.add(request.self()); } } - bulkRequest.add(index); } - return bulkRequest; } /** - * Copies the metadata from a hit to the index request. + * Used to accept or ignore a search hit. Ignored search hits will be excluded + * from the bulk request. It is also where we fail on invalid search hits, like + * when the document has no source but it's required. */ - protected void copyMetadata(IndexRequest index, SearchHit doc) { - index.parent(fieldValue(doc, ParentFieldMapper.NAME)); - copyRouting(index, doc); - // Comes back as a Long but needs to be a string - Long timestamp = fieldValue(doc, TimestampFieldMapper.NAME); - if (timestamp != null) { - index.timestamp(timestamp.toString()); - } - Long ttl = fieldValue(doc, TTLFieldMapper.NAME); - if (ttl != null) { - index.ttl(ttl); + protected boolean accept(SearchHit doc) { + if (doc.hasSource()) { + /* + * Either the document didn't store _source or we didn't fetch it for some reason. Since we don't allow the user to + * change the "fields" part of the search request it is unlikely that we got here because we didn't fetch _source. + * Thus the error message assumes that it wasn't stored. + */ + throw new IllegalArgumentException("[" + doc.index() + "][" + doc.type() + "][" + doc.id() + "] didn't store _source"); } + return true; } /** - * Part of copyMetadata but called out individual for easy overwriting. + * Build the {@link RequestWrapper} for a single search hit. This shouldn't handle + * metadata or scripting. That will be handled by copyMetadata and + * apply functions that can be overridden. */ - protected void copyRouting(IndexRequest index, SearchHit doc) { - index.routing(fieldValue(doc, RoutingFieldMapper.NAME)); + protected abstract RequestWrapper buildRequest(SearchHit doc); + + /** + * Copies the metadata from a hit to the request. + */ + protected RequestWrapper copyMetadata(RequestWrapper request, SearchHit doc) { + copyParent(request, fieldValue(doc, ParentFieldMapper.NAME)); + copyRouting(request, fieldValue(doc, RoutingFieldMapper.NAME)); + + // Comes back as a Long but needs to be a string + Long timestamp = fieldValue(doc, TimestampFieldMapper.NAME); + if (timestamp != null) { + request.setTimestamp(timestamp.toString()); + } + Long ttl = fieldValue(doc, TTLFieldMapper.NAME); + if (ttl != null) { + request.setTtl(ttl); + } + return request; + } + + /** + * Copy the parent from a search hit to the request. + */ + protected void copyParent(RequestWrapper request, String parent) { + request.setParent(parent); + } + + /** + * Copy the routing from a search hit to the request. + */ + protected void copyRouting(RequestWrapper request, String routing) { + request.setRouting(routing); } protected T fieldValue(SearchHit doc, String fieldName) { @@ -150,106 +167,327 @@ public abstract class AbstractAsyncBulkIndexByScrollAction ctx) { - if (script == null) { - return true; - } - ctx.put(IndexFieldMapper.NAME, doc.index()); - ctx.put(TypeFieldMapper.NAME, doc.type()); - ctx.put(IdFieldMapper.NAME, doc.id()); - Long oldVersion = doc.getVersion(); - ctx.put(VersionFieldMapper.NAME, oldVersion); - String oldParent = fieldValue(doc, ParentFieldMapper.NAME); - ctx.put(ParentFieldMapper.NAME, oldParent); - String oldRouting = fieldValue(doc, RoutingFieldMapper.NAME); - ctx.put(RoutingFieldMapper.NAME, oldRouting); - Long oldTimestamp = fieldValue(doc, TimestampFieldMapper.NAME); - ctx.put(TimestampFieldMapper.NAME, oldTimestamp); - Long oldTTL = fieldValue(doc, TTLFieldMapper.NAME); - ctx.put(TTLFieldMapper.NAME, oldTTL); - ctx.put(SourceFieldMapper.NAME, index.sourceAsMap()); - ctx.put("op", "update"); - script.setNextVar("ctx", ctx); - script.run(); - Map resultCtx = (Map) script.unwrap(ctx); - String newOp = (String) resultCtx.remove("op"); - if (newOp == null) { - throw new IllegalArgumentException("Script cleared op!"); - } - if ("noop".equals(newOp)) { - task.countNoop(); - return false; - } - if (false == "update".equals(newOp)) { - throw new IllegalArgumentException("Invalid op [" + newOp + ']'); - } + interface RequestWrapper> { - /* - * It'd be lovely to only set the source if we know its been modified - * but it isn't worth keeping two copies of it around just to check! - */ - index.source((Map) resultCtx.remove(SourceFieldMapper.NAME)); + void setIndex(String index); - Object newValue = ctx.remove(IndexFieldMapper.NAME); - if (false == doc.index().equals(newValue)) { - scriptChangedIndex(index, newValue); - } - newValue = ctx.remove(TypeFieldMapper.NAME); - if (false == doc.type().equals(newValue)) { - scriptChangedType(index, newValue); - } - newValue = ctx.remove(IdFieldMapper.NAME); - if (false == doc.id().equals(newValue)) { - scriptChangedId(index, newValue); - } - newValue = ctx.remove(VersionFieldMapper.NAME); - if (false == Objects.equals(oldVersion, newValue)) { - scriptChangedVersion(index, newValue); - } - newValue = ctx.remove(ParentFieldMapper.NAME); - if (false == Objects.equals(oldParent, newValue)) { - scriptChangedParent(index, newValue); - } - /* - * Its important that routing comes after parent in case you want to - * change them both. - */ - newValue = ctx.remove(RoutingFieldMapper.NAME); - if (false == Objects.equals(oldRouting, newValue)) { - scriptChangedRouting(index, newValue); - } - newValue = ctx.remove(TimestampFieldMapper.NAME); - if (false == Objects.equals(oldTimestamp, newValue)) { - scriptChangedTimestamp(index, newValue); - } - newValue = ctx.remove(TTLFieldMapper.NAME); - if (false == Objects.equals(oldTTL, newValue)) { - scriptChangedTTL(index, newValue); - } - if (false == ctx.isEmpty()) { - throw new IllegalArgumentException("Invalid fields added to ctx [" + String.join(",", ctx.keySet()) + ']'); - } - return true; + void setType(String type); + + void setId(String id); + + void setVersion(long version); + + void setVersionType(VersionType versionType); + + void setParent(String parent); + + void setRouting(String routing); + + void setTimestamp(String timestamp); + + void setTtl(Long ttl); + + void setSource(Map source); + + Map getSource(); + + Self self(); } - protected abstract void scriptChangedIndex(IndexRequest index, Object to); + /** + * {@link RequestWrapper} for {@link IndexRequest} + */ + public static class IndexRequestWrapper implements RequestWrapper { - protected abstract void scriptChangedType(IndexRequest index, Object to); + private final IndexRequest request; - protected abstract void scriptChangedId(IndexRequest index, Object to); + IndexRequestWrapper(IndexRequest request) { + this.request = Objects.requireNonNull(request, "Wrapped IndexRequest can not be null"); + } - protected abstract void scriptChangedVersion(IndexRequest index, Object to); + @Override + public void setIndex(String index) { + request.index(index); + } - protected abstract void scriptChangedRouting(IndexRequest index, Object to); + @Override + public void setType(String type) { + request.type(type); + } - protected abstract void scriptChangedParent(IndexRequest index, Object to); + @Override + public void setId(String id) { + request.id(id); + } - protected abstract void scriptChangedTimestamp(IndexRequest index, Object to); + @Override + public void setVersion(long version) { + request.version(version); + } - protected abstract void scriptChangedTTL(IndexRequest index, Object to); + @Override + public void setVersionType(VersionType versionType) { + request.versionType(versionType); + } + + @Override + public void setParent(String parent) { + request.parent(parent); + } + + @Override + public void setRouting(String routing) { + request.routing(routing); + } + + @Override + public void setTimestamp(String timestamp) { + request.timestamp(timestamp); + } + + @Override + public void setTtl(Long ttl) { + if (ttl == null) { + request.ttl((TimeValue) null); + } else { + request.ttl(ttl); + } + } + + @Override + public Map getSource() { + return request.sourceAsMap(); + } + + @Override + public void setSource(Map source) { + request.source(source); + } + + @Override + public IndexRequest self() { + return request; + } + } + + /** + * Wraps a {@link IndexRequest} in a {@link RequestWrapper} + */ + static RequestWrapper wrap(IndexRequest request) { + return new IndexRequestWrapper(request); + } + + /** + * {@link RequestWrapper} for {@link DeleteRequest} + */ + public static class DeleteRequestWrapper implements RequestWrapper { + + private final DeleteRequest request; + + DeleteRequestWrapper(DeleteRequest request) { + this.request = Objects.requireNonNull(request, "Wrapped DeleteRequest can not be null"); + } + + @Override + public void setIndex(String index) { + request.index(index); + } + + @Override + public void setType(String type) { + request.type(type); + } + + @Override + public void setId(String id) { + request.id(id); + } + + @Override + public void setVersion(long version) { + request.version(version); + } + + @Override + public void setVersionType(VersionType versionType) { + request.versionType(versionType); + } + + @Override + public void setParent(String parent) { + request.parent(parent); + } + + @Override + public void setRouting(String routing) { + request.routing(routing); + } + + @Override + public void setTimestamp(String timestamp) { + throw new UnsupportedOperationException("unable to set [timestamp] on action request [" + request.getClass() + "]"); + } + + @Override + public void setTtl(Long ttl) { + throw new UnsupportedOperationException("unable to set [ttl] on action request [" + request.getClass() + "]"); + } + + @Override + public Map getSource() { + throw new UnsupportedOperationException("unable to get source from action request [" + request.getClass() + "]"); + } + + @Override + public void setSource(Map source) { + throw new UnsupportedOperationException("unable to set [source] on action request [" + request.getClass() + "]"); + } + + @Override + public DeleteRequest self() { + return request; + } + } + + /** + * Wraps a {@link DeleteRequest} in a {@link RequestWrapper} + */ + static RequestWrapper wrap(DeleteRequest request) { + return new DeleteRequestWrapper(request); + } + + /** + * Apply a {@link Script} to a {@link RequestWrapper} + */ + public abstract class ScriptApplier implements BiFunction, SearchHit, RequestWrapper> { + + private final BulkByScrollTask task; + private final ScriptService scriptService; + private final ClusterState state; + private final Script script; + private final Map params; + + private ExecutableScript executable; + private Map context; + + public ScriptApplier(BulkByScrollTask task, ScriptService scriptService, Script script, ClusterState state, + Map params) { + this.task = task; + this.scriptService = scriptService; + this.script = script; + this.state = state; + this.params = params; + } + + @Override + @SuppressWarnings("unchecked") + public RequestWrapper apply(RequestWrapper request, SearchHit doc) { + if (script == null) { + return request; + } + if (executable == null) { + CompiledScript compiled = scriptService.compile(script, ScriptContext.Standard.UPDATE, emptyMap(), state); + executable = scriptService.executable(compiled, params); + } + if (context == null) { + context = new HashMap<>(); + } + + context.put(IndexFieldMapper.NAME, doc.index()); + context.put(TypeFieldMapper.NAME, doc.type()); + context.put(IdFieldMapper.NAME, doc.id()); + Long oldVersion = doc.getVersion(); + context.put(VersionFieldMapper.NAME, oldVersion); + String oldParent = fieldValue(doc, ParentFieldMapper.NAME); + context.put(ParentFieldMapper.NAME, oldParent); + String oldRouting = fieldValue(doc, RoutingFieldMapper.NAME); + context.put(RoutingFieldMapper.NAME, oldRouting); + Long oldTimestamp = fieldValue(doc, TimestampFieldMapper.NAME); + context.put(TimestampFieldMapper.NAME, oldTimestamp); + Long oldTTL = fieldValue(doc, TTLFieldMapper.NAME); + context.put(TTLFieldMapper.NAME, oldTTL); + context.put(SourceFieldMapper.NAME, request.getSource()); + context.put("op", "update"); + executable.setNextVar("ctx", context); + executable.run(); + + Map resultCtx = (Map) executable.unwrap(context); + String newOp = (String) resultCtx.remove("op"); + if (newOp == null) { + throw new IllegalArgumentException("Script cleared op!"); + } + if ("noop".equals(newOp)) { + task.countNoop(); + return null; + } + if (false == "update".equals(newOp)) { + throw new IllegalArgumentException("Invalid op [" + newOp + ']'); + } + + /* + * It'd be lovely to only set the source if we know its been modified + * but it isn't worth keeping two copies of it around just to check! + */ + request.setSource((Map) resultCtx.remove(SourceFieldMapper.NAME)); + + Object newValue = context.remove(IndexFieldMapper.NAME); + if (false == doc.index().equals(newValue)) { + scriptChangedIndex(request, newValue); + } + newValue = context.remove(TypeFieldMapper.NAME); + if (false == doc.type().equals(newValue)) { + scriptChangedType(request, newValue); + } + newValue = context.remove(IdFieldMapper.NAME); + if (false == doc.id().equals(newValue)) { + scriptChangedId(request, newValue); + } + newValue = context.remove(VersionFieldMapper.NAME); + if (false == Objects.equals(oldVersion, newValue)) { + scriptChangedVersion(request, newValue); + } + newValue = context.remove(ParentFieldMapper.NAME); + if (false == Objects.equals(oldParent, newValue)) { + scriptChangedParent(request, newValue); + } + /* + * Its important that routing comes after parent in case you want to + * change them both. + */ + newValue = context.remove(RoutingFieldMapper.NAME); + if (false == Objects.equals(oldRouting, newValue)) { + scriptChangedRouting(request, newValue); + } + newValue = context.remove(TimestampFieldMapper.NAME); + if (false == Objects.equals(oldTimestamp, newValue)) { + scriptChangedTimestamp(request, newValue); + } + newValue = context.remove(TTLFieldMapper.NAME); + if (false == Objects.equals(oldTTL, newValue)) { + scriptChangedTTL(request, newValue); + } + if (false == context.isEmpty()) { + throw new IllegalArgumentException("Invalid fields added to context [" + String.join(",", context.keySet()) + ']'); + } + return request; + } + + protected abstract void scriptChangedIndex(RequestWrapper request, Object to); + + protected abstract void scriptChangedType(RequestWrapper request, Object to); + + protected abstract void scriptChangedId(RequestWrapper request, Object to); + + protected abstract void scriptChangedVersion(RequestWrapper request, Object to); + + protected abstract void scriptChangedRouting(RequestWrapper request, Object to); + + protected abstract void scriptChangedParent(RequestWrapper request, Object to); + + protected abstract void scriptChangedTimestamp(RequestWrapper request, Object to); + + protected abstract void scriptChangedTTL(RequestWrapper request, Object to); + } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java index d98735d3fb4..e78a6a9c350 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; @@ -45,32 +46,6 @@ public abstract class AbstractBaseReindexRestHandler< TA extends TransportAction > extends BaseRestHandler { - /** - * @return requests_per_second from the request as a float if it was on the request, null otherwise - */ - public static Float parseRequestsPerSecond(RestRequest request) { - String requestsPerSecondString = request.param("requests_per_second"); - if (requestsPerSecondString == null) { - return null; - } - if ("unlimited".equals(requestsPerSecondString)) { - return Float.POSITIVE_INFINITY; - } - float requestsPerSecond; - try { - requestsPerSecond = Float.parseFloat(requestsPerSecondString); - } catch (NumberFormatException e) { - throw new IllegalArgumentException( - "[requests_per_second] must be a float greater than 0. Use \"unlimited\" to disable throttling.", e); - } - if (requestsPerSecond <= 0) { - // We validate here and in the setters because the setters use "Float.POSITIVE_INFINITY" instead of "unlimited" - throw new IllegalArgumentException( - "[requests_per_second] must be a float greater than 0. Use \"unlimited\" to disable throttling."); - } - return requestsPerSecond; - } - protected final IndicesQueriesRegistry indicesQueriesRegistry; protected final AggregatorParsers aggParsers; protected final Suggesters suggesters; @@ -88,41 +63,95 @@ public abstract class AbstractBaseReindexRestHandler< this.action = action; } - protected void execute(RestRequest request, Request internalRequest, RestChannel channel, - boolean includeCreated, boolean includeUpdated, boolean includeDeleted) throws IOException { - Float requestsPerSecond = parseRequestsPerSecond(request); - if (requestsPerSecond != null) { - internalRequest.setRequestsPerSecond(requestsPerSecond); - } + protected void handleRequest(RestRequest request, RestChannel channel, + boolean includeCreated, boolean includeUpdated, boolean includeDeleted) throws IOException { + // Build the internal request + Request internal = setCommonOptions(request, buildRequest(request)); + + // Executes the request and waits for completion if (request.paramAsBoolean("wait_for_completion", true)) { Map params = new HashMap<>(); params.put(BulkByScrollTask.Status.INCLUDE_CREATED, Boolean.toString(includeCreated)); params.put(BulkByScrollTask.Status.INCLUDE_UPDATED, Boolean.toString(includeUpdated)); params.put(BulkByScrollTask.Status.INCLUDE_DELETED, Boolean.toString(includeDeleted)); - action.execute(internalRequest, new BulkIndexByScrollResponseContentListener<>(channel, params)); + action.execute(internal, new BulkIndexByScrollResponseContentListener<>(channel, params)); return; } + /* * Lets try and validate before forking so the user gets some error. The * task can't totally validate until it starts but this is better than * nothing. */ - ActionRequestValidationException validationException = internalRequest.validate(); + ActionRequestValidationException validationException = internal.validate(); if (validationException != null) { channel.sendResponse(new BytesRestResponse(channel, validationException)); return; } - Task task = action.execute(internalRequest, LoggingTaskListener.instance()); - sendTask(channel, task); + sendTask(channel, action.execute(internal, LoggingTaskListener.instance())); + } + + /** + * Build the Request based on the RestRequest. + */ + protected abstract Request buildRequest(RestRequest request) throws IOException; + + /** + * Sets common options of {@link AbstractBulkByScrollRequest} requests. + */ + protected Request setCommonOptions(RestRequest restRequest, Request request) { + assert restRequest != null : "RestRequest should not be null"; + assert request != null : "Request should not be null"; + + request.setRefresh(restRequest.paramAsBoolean("refresh", request.isRefresh())); + request.setTimeout(restRequest.paramAsTime("timeout", request.getTimeout())); + + String consistency = restRequest.param("consistency"); + if (consistency != null) { + request.setConsistency(WriteConsistencyLevel.fromString(consistency)); + } + + Float requestsPerSecond = parseRequestsPerSecond(restRequest); + if (requestsPerSecond != null) { + request.setRequestsPerSecond(requestsPerSecond); + } + return request; } private void sendTask(RestChannel channel, Task task) throws IOException { - XContentBuilder builder = channel.newBuilder(); - builder.startObject(); - builder.field("task", clusterService.localNode().getId() + ":" + task.getId()); - builder.endObject(); - channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); + try (XContentBuilder builder = channel.newBuilder()) { + builder.startObject(); + builder.field("task", clusterService.localNode().getId() + ":" + task.getId()); + builder.endObject(); + channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); + } + } + + /** + * @return requests_per_second from the request as a float if it was on the request, null otherwise + */ + public static Float parseRequestsPerSecond(RestRequest request) { + String requestsPerSecondString = request.param("requests_per_second"); + if (requestsPerSecondString == null) { + return null; + } + if ("unlimited".equals(requestsPerSecondString)) { + return Float.POSITIVE_INFINITY; + } + float requestsPerSecond; + try { + requestsPerSecond = Float.parseFloat(requestsPerSecondString); + } catch (NumberFormatException e) { + throw new IllegalArgumentException( + "[requests_per_second] must be a float greater than 0. Use \"unlimited\" to disable throttling.", e); + } + if (requestsPerSecond <= 0) { + // We validate here and in the setters because the setters use "Float.POSITIVE_INFINITY" instead of "unlimited" + throw new IllegalArgumentException( + "[requests_per_second] must be a float greater than 0. Use \"unlimited\" to disable throttling."); + } + return requestsPerSecond; } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java new file mode 100644 index 00000000000..926da3befdd --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -0,0 +1,118 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.rest.action.support.RestActions; +import org.elasticsearch.search.aggregations.AggregatorParsers; +import org.elasticsearch.search.suggest.Suggesters; + +import java.io.IOException; +import java.util.Map; +import java.util.function.Consumer; + +import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.SIZE_ALL_MATCHES; + +/** + * Rest handler for reindex actions that accepts a search request like Update-By-Query or Delete-By-Query + */ +public abstract class AbstractBulkByQueryRestHandler< + Request extends AbstractBulkByScrollRequest, + TA extends TransportAction> extends AbstractBaseReindexRestHandler { + + protected AbstractBulkByQueryRestHandler(Settings settings, Client client, IndicesQueriesRegistry indicesQueriesRegistry, + AggregatorParsers aggParsers, Suggesters suggesters, ClusterService clusterService, + TA action) { + super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); + } + + protected void parseInternalRequest(Request internal, RestRequest restRequest, + Map> consumers) throws IOException { + assert internal != null : "Request should not be null"; + assert restRequest != null : "RestRequest should not be null"; + + SearchRequest searchRequest = internal.getSearchRequest(); + int scrollSize = searchRequest.source().size(); + searchRequest.source().size(SIZE_ALL_MATCHES); + + parseSearchRequest(searchRequest, restRequest, consumers); + + internal.setSize(searchRequest.source().size()); + searchRequest.source().size(restRequest.paramAsInt("scroll_size", scrollSize)); + + String conflicts = restRequest.param("conflicts"); + if (conflicts != null) { + internal.setConflicts(conflicts); + } + + // Let the requester set search timeout. It is probably only going to be useful for testing but who knows. + if (restRequest.hasParam("search_timeout")) { + searchRequest.source().timeout(restRequest.paramAsTime("search_timeout", null)); + } + } + + protected void parseSearchRequest(SearchRequest searchRequest, RestRequest restRequest, + Map> consumers) throws IOException { + assert searchRequest != null : "SearchRequest should not be null"; + assert restRequest != null : "RestRequest should not be null"; + + /* + * We can't send parseSearchRequest REST content that it doesn't support + * so we will have to remove the content that is valid in addition to + * what it supports from the content first. This is a temporary hack and + * should get better when SearchRequest has full ObjectParser support + * then we can delegate and stuff. + */ + BytesReference content = RestActions.hasBodyContent(restRequest) ? RestActions.getRestContent(restRequest) : null; + if ((content != null) && (consumers != null && consumers.size() > 0)) { + Tuple> body = XContentHelper.convertToMap(content, false); + boolean modified = false; + for (Map.Entry> consumer : consumers.entrySet()) { + Object value = body.v2().remove(consumer.getKey()); + if (value != null) { + consumer.getValue().accept(value); + modified = true; + } + } + + if (modified) { + try (XContentBuilder builder = XContentFactory.contentBuilder(body.v1())) { + content = builder.map(body.v2()).bytes(); + } + } + } + + RestSearchAction.parseSearchRequest(searchRequest, indicesQueriesRegistry, restRequest, parseFieldMatcher, aggParsers, + suggesters, content); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java new file mode 100644 index 00000000000..c789e9c77b4 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +public class DeleteByQueryAction extends Action { + + public static final DeleteByQueryAction INSTANCE = new DeleteByQueryAction(); + public static final String NAME = "indices:data/write/delete/byquery"; + + private DeleteByQueryAction() { + super(NAME); + } + + @Override + public DeleteByQueryRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new DeleteByQueryRequestBuilder(client, this); + } + + @Override + public BulkIndexByScrollResponse newResponse() { + return new BulkIndexByScrollResponse(); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java new file mode 100644 index 00000000000..327459bd339 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.search.SearchRequest; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Creates a new {@link DeleteByQueryRequest} that uses scrolling and bulk requests to delete all documents matching + * the query. This can have performance as well as visibility implications. + * + * Delete-by-query now has the following semantics: + *

    + *
  • it's non-atomic, a delete-by-query may fail at any time while some documents matching the query have already been + * deleted
  • + *
  • it's syntactic sugar, a delete-by-query is equivalent to a scroll search and corresponding bulk-deletes by ID
  • + *
  • it's executed on a point-in-time snapshot, a delete-by-query will only delete the documents that are visible at the + * point in time the delete-by-query was started, equivalent to the scroll API
  • + *
  • it's consistent, a delete-by-query will yield consistent results across all replicas of a shard
  • + *
  • it's forward-compatible, a delete-by-query will only send IDs to the shards as deletes such that no queries are + * stored in the transaction logs that might not be supported in the future.
  • + *
  • it's results won't be visible until the index is refreshed.
  • + *
+ */ +public class DeleteByQueryRequest extends AbstractBulkByScrollRequest { + + public DeleteByQueryRequest() { + } + + public DeleteByQueryRequest(SearchRequest search) { + super(search); + // Delete-By-Query does not require the source + search.source().fetchSource(false); + } + + @Override + protected DeleteByQueryRequest self() { + return this; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException e = super.validate(); + if (getSearchRequest().indices() == null || getSearchRequest().indices().length == 0) { + e = addValidationError("use _all if you really want to delete from all existing indexes", e); + } + if (getSearchRequest() == null || getSearchRequest().source() == null) { + e = addValidationError("source is missing", e); + } + return e; + } + + @Override + public String toString() { + StringBuilder b = new StringBuilder(); + b.append("delete-by-query "); + searchToString(b); + return b.toString(); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java new file mode 100644 index 00000000000..f4d8a91f4cb --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; + +public class DeleteByQueryRequestBuilder extends + AbstractBulkByScrollRequestBuilder { + + public DeleteByQueryRequestBuilder(ElasticsearchClient client, + Action action) { + this(client, action, new SearchRequestBuilder(client, SearchAction.INSTANCE)); + } + + private DeleteByQueryRequestBuilder(ElasticsearchClient client, + Action action, + SearchRequestBuilder search) { + super(client, action, search, new DeleteByQueryRequest(search.request())); + } + + @Override + protected DeleteByQueryRequestBuilder self() { + return this; + } + + @Override + public DeleteByQueryRequestBuilder abortOnVersionConflict(boolean abortOnVersionConflict) { + request.setAbortOnVersionConflict(abortOnVersionConflict); + return this; + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java index 6e42f56dece..e3a826dbdad 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java @@ -39,12 +39,14 @@ public class ReindexPlugin extends Plugin { public void onModule(ActionModule actionModule) { actionModule.registerAction(ReindexAction.INSTANCE, TransportReindexAction.class); actionModule.registerAction(UpdateByQueryAction.INSTANCE, TransportUpdateByQueryAction.class); + actionModule.registerAction(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class); actionModule.registerAction(RethrottleAction.INSTANCE, TransportRethrottleAction.class); } public void onModule(NetworkModule networkModule) { networkModule.registerRestHandler(RestReindexAction.class); networkModule.registerRestHandler(RestUpdateByQueryAction.class); + networkModule.registerRestHandler(RestDeleteByQueryAction.class); networkModule.registerRestHandler(RestRethrottleAction.class); networkModule.registerTaskStatus(BulkByScrollTask.Status.NAME, BulkByScrollTask.Status::new); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java new file mode 100644 index 00000000000..4750fe22313 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.search.aggregations.AggregatorParsers; +import org.elasticsearch.search.suggest.Suggesters; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Consumer; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { + + @Inject + public RestDeleteByQueryAction(Settings settings, RestController controller, Client client, + IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, + ClusterService clusterService, TransportDeleteByQueryAction action) { + super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); + controller.registerHandler(POST, "/{index}/_delete_by_query", this); + controller.registerHandler(POST, "/{index}/{type}/_delete_by_query", this); + } + + @Override + protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception { + if (false == request.hasContent()) { + throw new ElasticsearchException("_delete_by_query requires a request body"); + } + handleRequest(request, channel, false, false, true); + } + + @Override + protected DeleteByQueryRequest buildRequest(RestRequest request) throws IOException { + /* + * Passing the search request through DeleteByQueryRequest first allows + * it to set its own defaults which differ from SearchRequest's + * defaults. Then the parseInternalRequest can override them. + */ + DeleteByQueryRequest internal = new DeleteByQueryRequest(new SearchRequest()); + + Map> consumers = new HashMap<>(); + consumers.put("conflicts", o -> internal.setConflicts((String) o)); + + parseInternalRequest(internal, request, consumers); + + return internal; + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index d61980fb8ce..267994672d4 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.WriteConsistencyLevel; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Client; @@ -27,7 +27,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcherSupplier; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -39,7 +38,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -53,13 +51,14 @@ import java.util.Map; import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; /** * Expose IndexBySearchRequest over rest. */ public class RestReindexAction extends AbstractBaseReindexRestHandler { + private static final ObjectParser PARSER = new ObjectParser<>("reindex"); + static { ObjectParser.Parser sourceParser = (parser, search, context) -> { /* @@ -114,41 +113,18 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler internalRequest, RestRequest request) { - internalRequest.setRefresh(request.paramAsBoolean("refresh", internalRequest.isRefresh())); - internalRequest.setTimeout(request.paramAsTime("timeout", internalRequest.getTimeout())); - String consistency = request.param("consistency"); - if (consistency != null) { - internalRequest.setConsistency(WriteConsistencyLevel.fromString(consistency)); + PARSER.parse(xcontent, internal, new ReindexParseContext(indicesQueriesRegistry, aggParsers, suggesters, parseFieldMatcher)); } + return internal; } /** diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java index a7c29d40a7d..382f5b51726 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java @@ -39,6 +39,7 @@ public class RestRethrottleAction extends BaseRestHandler { super(settings, client); this.action = action; controller.registerHandler(POST, "/_update_by_query/{taskId}/_rethrottle", this); + controller.registerHandler(POST, "/_delete_by_query/{taskId}/_rethrottle", this); controller.registerHandler(POST, "/_reindex/{taskId}/_rethrottle", this); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 9f76be3f0ff..f7dbbf893a8 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -22,31 +22,24 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.suggest.Suggesters; +import java.io.IOException; +import java.util.HashMap; import java.util.Map; +import java.util.function.Consumer; -import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.SIZE_ALL_MATCHES; -import static org.elasticsearch.index.reindex.RestReindexAction.parseCommon; import static org.elasticsearch.rest.RestRequest.Method.POST; -public class RestUpdateByQueryAction extends AbstractBaseReindexRestHandler { +public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { @Inject public RestUpdateByQueryAction(Settings settings, RestController controller, Client client, @@ -59,60 +52,26 @@ public class RestUpdateByQueryAction extends AbstractBaseReindexRestHandler> body = XContentHelper.convertToMap(bodyContent, false); - boolean modified = false; - String conflicts = (String) body.v2().remove("conflicts"); - if (conflicts != null) { - internalRequest.setConflicts(conflicts); - modified = true; - } - @SuppressWarnings("unchecked") - Map script = (Map) body.v2().remove("script"); - if (script != null) { - internalRequest.setScript(Script.parse(script, false, parseFieldMatcher)); - modified = true; - } - if (modified) { - XContentBuilder builder = XContentFactory.contentBuilder(body.v1()); - builder.map(body.v2()); - bodyContent = builder.bytes(); - } - } - RestSearchAction.parseSearchRequest(internalRequest.getSearchRequest(), indicesQueriesRegistry, request, - parseFieldMatcher, aggParsers, suggesters, bodyContent); + UpdateByQueryRequest internal = new UpdateByQueryRequest(new SearchRequest()); - String conflicts = request.param("conflicts"); - if (conflicts != null) { - internalRequest.setConflicts(conflicts); - } - parseCommon(internalRequest, request); + Map> consumers = new HashMap<>(); + consumers.put("conflicts", o -> internal.setConflicts((String) o)); + consumers.put("script", o -> internal.setScript(Script.parse((Map)o, false, parseFieldMatcher))); - internalRequest.setSize(internalRequest.getSearchRequest().source().size()); - internalRequest.setPipeline(request.param("pipeline")); - internalRequest.getSearchRequest().source().size(request.paramAsInt("scroll_size", scrollSize)); - // Let the requester set search timeout. It is probably only going to be useful for testing but who knows. - if (request.hasParam("search_timeout")) { - internalRequest.getSearchRequest().source().timeout(request.paramAsTime("search_timeout", null)); - } + parseInternalRequest(internal, request, consumers); - execute(request, internalRequest, channel, false, true, false); + internal.setPipeline(request.param("pipeline")); + return internal; } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java new file mode 100644 index 00000000000..471bd066f94 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ParentTaskAssigningClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +public class TransportDeleteByQueryAction extends HandledTransportAction { + private final Client client; + private final ScriptService scriptService; + private final ClusterService clusterService; + + @Inject + public TransportDeleteByQueryAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver resolver, Client client, TransportService transportService, + ScriptService scriptService, ClusterService clusterService) { + super(settings, DeleteByQueryAction.NAME, threadPool, transportService, actionFilters, resolver, DeleteByQueryRequest::new); + this.client = client; + this.scriptService = scriptService; + this.clusterService = clusterService; + } + + @Override + protected void doExecute(Task task, DeleteByQueryRequest request, ActionListener listener) { + ClusterState state = clusterService.state(); + ParentTaskAssigningClient client = new ParentTaskAssigningClient(this.client, clusterService.localNode(), task); + new AsyncDeleteBySearchAction((BulkByScrollTask) task, logger, client, threadPool, request, listener, scriptService, state).start(); + } + + @Override + protected void doExecute(DeleteByQueryRequest request, ActionListener listener) { + throw new UnsupportedOperationException("task required"); + } + + /** + * Implementation of delete-by-query using scrolling and bulk. + */ + static class AsyncDeleteBySearchAction extends AbstractAsyncBulkIndexByScrollAction { + + public AsyncDeleteBySearchAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, ThreadPool threadPool, + DeleteByQueryRequest request, ActionListener listener, + ScriptService scriptService, ClusterState clusterState) { + super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); + } + + @Override + protected boolean accept(SearchHit doc) { + // Delete-by-query does not require the source to delete a document + // and the default implementation checks for it + return true; + } + + @Override + protected RequestWrapper buildRequest(SearchHit doc) { + DeleteRequest delete = new DeleteRequest(); + delete.index(doc.index()); + delete.type(doc.type()); + delete.id(doc.id()); + delete.version(doc.version()); + return wrap(delete); + } + + /** + * Overrides the parent {@link AbstractAsyncBulkIndexByScrollAction#copyMetadata(RequestWrapper, SearchHit)} + * method that is much more Update/Reindex oriented and so also copies things like timestamp/ttl which we + * don't care for a deletion. + */ + @Override + protected RequestWrapper copyMetadata(RequestWrapper request, SearchHit doc) { + copyParent(request, fieldValue(doc, ParentFieldMapper.NAME)); + copyRouting(request, fieldValue(doc, RoutingFieldMapper.NAME)); + return request; + } + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 0f07cc560c8..a49ba0a3b32 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -35,16 +35,18 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.Map; import java.util.Objects; +import java.util.function.BiFunction; import static java.util.Objects.requireNonNull; import static org.elasticsearch.index.VersionType.INTERNAL; @@ -72,7 +74,7 @@ public class TransportReindexAction extends HandledTransportAction { - public AsyncIndexBySearchAction(BulkByScrollTask task, ESLogger logger, ScriptService scriptService, - ParentTaskAssigningClient client, ClusterState state, ThreadPool threadPool, ReindexRequest request, - ActionListener listener) { - super(task, logger, scriptService, state, client, threadPool, request, request.getSearchRequest(), listener); + + public AsyncIndexBySearchAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, ThreadPool threadPool, + ReindexRequest request, ActionListener listener, + ScriptService scriptService, ClusterState clusterState) { + super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); } @Override - protected IndexRequest buildIndexRequest(SearchHit doc) { + protected BiFunction, SearchHit, RequestWrapper> buildScriptApplier() { + Script script = mainRequest.getScript(); + if (script != null) { + return new ReindexScriptApplier(task, scriptService, script, clusterState, script.getParams()); + } + return super.buildScriptApplier(); + } + + @Override + protected RequestWrapper buildRequest(SearchHit doc) { IndexRequest index = new IndexRequest(); // Copy the index from the request so we always write where it asked to write @@ -161,109 +174,120 @@ public class TransportReindexAction extends HandledTransportAction request, String routing) { String routingSpec = mainRequest.getDestination().routing(); if (routingSpec == null) { - super.copyRouting(index, doc); + super.copyRouting(request, routing); return; } if (routingSpec.startsWith("=")) { - index.routing(mainRequest.getDestination().routing().substring(1)); + super.copyRouting(request, mainRequest.getDestination().routing().substring(1)); return; } switch (routingSpec) { case "keep": - super.copyRouting(index, doc); + super.copyRouting(request, routing); break; case "discard": - index.routing(null); + super.copyRouting(request, null); break; default: throw new IllegalArgumentException("Unsupported routing command"); } } - /* - * Methods below here handle script updating the index request. They try - * to be pretty liberal with regards to types because script are often - * dynamically typed. - */ - @Override - protected void scriptChangedIndex(IndexRequest index, Object to) { - requireNonNull(to, "Can't reindex without a destination index!"); - index.index(to.toString()); - } + class ReindexScriptApplier extends ScriptApplier { - @Override - protected void scriptChangedType(IndexRequest index, Object to) { - requireNonNull(to, "Can't reindex without a destination type!"); - index.type(to.toString()); - } - - @Override - protected void scriptChangedId(IndexRequest index, Object to) { - index.id(Objects.toString(to, null)); - } - - @Override - protected void scriptChangedVersion(IndexRequest index, Object to) { - if (to == null) { - index.version(Versions.MATCH_ANY).versionType(INTERNAL); - return; + ReindexScriptApplier(BulkByScrollTask task, ScriptService scriptService, Script script, ClusterState state, + Map params) { + super(task, scriptService, script, state, params); } - index.version(asLong(to, VersionFieldMapper.NAME)); - } - @Override - protected void scriptChangedParent(IndexRequest index, Object to) { - // Have to override routing with parent just in case its changed - String routing = Objects.toString(to, null); - index.parent(routing).routing(routing); - } - - @Override - protected void scriptChangedRouting(IndexRequest index, Object to) { - index.routing(Objects.toString(to, null)); - } - - @Override - protected void scriptChangedTimestamp(IndexRequest index, Object to) { - index.timestamp(Objects.toString(to, null)); - } - - @Override - protected void scriptChangedTTL(IndexRequest index, Object to) { - if (to == null) { - index.ttl((TimeValue) null); - return; - } - index.ttl(asLong(to, TTLFieldMapper.NAME)); - } - - private long asLong(Object from, String name) { /* - * Stuffing a number into the map will have converted it to - * some Number. + * Methods below here handle script updating the index request. They try + * to be pretty liberal with regards to types because script are often + * dynamically typed. */ - Number fromNumber; - try { - fromNumber = (Number) from; - } catch (ClassCastException e) { - throw new IllegalArgumentException(name + " may only be set to an int or a long but was [" + from + "]", e); + + @Override + protected void scriptChangedIndex(RequestWrapper request, Object to) { + requireNonNull(to, "Can't reindex without a destination index!"); + request.setIndex(to.toString()); } - long l = fromNumber.longValue(); - // Check that we didn't round when we fetched the value. - if (fromNumber.doubleValue() != l) { - throw new IllegalArgumentException(name + " may only be set to an int or a long but was [" + from + "]"); + + @Override + protected void scriptChangedType(RequestWrapper request, Object to) { + requireNonNull(to, "Can't reindex without a destination type!"); + request.setType(to.toString()); + } + + @Override + protected void scriptChangedId(RequestWrapper request, Object to) { + request.setId(Objects.toString(to, null)); + } + + @Override + protected void scriptChangedVersion(RequestWrapper request, Object to) { + if (to == null) { + request.setVersion(Versions.MATCH_ANY); + request.setVersionType(INTERNAL); + } else { + request.setVersion(asLong(to, VersionFieldMapper.NAME)); + } + } + + @Override + protected void scriptChangedParent(RequestWrapper request, Object to) { + // Have to override routing with parent just in case its changed + String routing = Objects.toString(to, null); + request.setParent(routing); + request.setRouting(routing); + } + + @Override + protected void scriptChangedRouting(RequestWrapper request, Object to) { + request.setRouting(Objects.toString(to, null)); + } + + @Override + protected void scriptChangedTimestamp(RequestWrapper request, Object to) { + request.setTimestamp(Objects.toString(to, null)); + } + + @Override + protected void scriptChangedTTL(RequestWrapper request, Object to) { + if (to == null) { + request.setTtl(null); + } else { + request.setTtl(asLong(to, TTLFieldMapper.NAME)); + } + } + + private long asLong(Object from, String name) { + /* + * Stuffing a number into the map will have converted it to + * some Number. + * */ + Number fromNumber; + try { + fromNumber = (Number) from; + } catch (ClassCastException e) { + throw new IllegalArgumentException(name + " may only be set to an int or a long but was [" + from + "]", e); + } + long l = fromNumber.longValue(); + // Check that we didn't round when we fetched the value. + if (fromNumber.doubleValue() != l) { + throw new IllegalArgumentException(name + " may only be set to an int or a long but was [" + from + "]"); + } + return l; } - return l; } } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index 4d702be4e9d..7459972ce64 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -39,12 +39,16 @@ import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.Map; +import java.util.function.BiFunction; + public class TransportUpdateByQueryAction extends HandledTransportAction { private final Client client; private final ScriptService scriptService; @@ -65,8 +69,7 @@ public class TransportUpdateByQueryAction extends HandledTransportAction listener) { ClusterState state = clusterService.state(); ParentTaskAssigningClient client = new ParentTaskAssigningClient(this.client, clusterService.localNode(), task); - new AsyncIndexBySearchAction((BulkByScrollTask) task, logger, scriptService, client, threadPool, state, request, listener) - .start(); + new AsyncIndexBySearchAction((BulkByScrollTask) task, logger, client, threadPool, request, listener, scriptService, state).start(); } @Override @@ -78,14 +81,24 @@ public class TransportUpdateByQueryAction extends HandledTransportAction { - public AsyncIndexBySearchAction(BulkByScrollTask task, ESLogger logger, ScriptService scriptService, - ParentTaskAssigningClient client, ThreadPool threadPool, ClusterState clusterState, UpdateByQueryRequest request, - ActionListener listener) { - super(task, logger, scriptService, clusterState, client, threadPool, request, request.getSearchRequest(), listener); + + public AsyncIndexBySearchAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, ThreadPool threadPool, + UpdateByQueryRequest request, ActionListener listener, + ScriptService scriptService, ClusterState clusterState) { + super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); } @Override - protected IndexRequest buildIndexRequest(SearchHit doc) { + protected BiFunction, SearchHit, RequestWrapper> buildScriptApplier() { + Script script = mainRequest.getScript(); + if (script != null) { + return new UpdateByQueryScriptApplier(task, scriptService, script, clusterState, script.getParams()); + } + return super.buildScriptApplier(); + } + + @Override + protected RequestWrapper buildRequest(SearchHit doc) { IndexRequest index = new IndexRequest(); index.index(doc.index()); index.type(doc.type()); @@ -94,47 +107,55 @@ public class TransportUpdateByQueryAction extends HandledTransportAction params) { + super(task, scriptService, script, state, params); + } - @Override - protected void scriptChangedId(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [" + IdFieldMapper.NAME + "] not allowed"); - } + @Override + protected void scriptChangedIndex(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + IndexFieldMapper.NAME + "] not allowed"); + } - @Override - protected void scriptChangedVersion(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [_version] not allowed"); - } + @Override + protected void scriptChangedType(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + TypeFieldMapper.NAME + "] not allowed"); + } - @Override - protected void scriptChangedRouting(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [" + RoutingFieldMapper.NAME + "] not allowed"); - } + @Override + protected void scriptChangedId(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + IdFieldMapper.NAME + "] not allowed"); + } - @Override - protected void scriptChangedParent(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [" + ParentFieldMapper.NAME + "] not allowed"); - } + @Override + protected void scriptChangedVersion(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [_version] not allowed"); + } - @Override - protected void scriptChangedTimestamp(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [" + TimestampFieldMapper.NAME + "] not allowed"); - } + @Override + protected void scriptChangedRouting(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + RoutingFieldMapper.NAME + "] not allowed"); + } - @Override - protected void scriptChangedTTL(IndexRequest index, Object to) { - throw new IllegalArgumentException("Modifying [" + TTLFieldMapper.NAME + "] not allowed"); + @Override + protected void scriptChangedParent(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + ParentFieldMapper.NAME + "] not allowed"); + } + + @Override + protected void scriptChangedTimestamp(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + TimestampFieldMapper.NAME + "] not allowed"); + } + + @Override + protected void scriptChangedTTL(RequestWrapper request, Object to) { + throw new IllegalArgumentException("Modifying [" + TTLFieldMapper.NAME + "] not allowed"); + } } } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java index b8f389d171a..27a8a42d5e6 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java @@ -22,10 +22,15 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.Index; +import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchHit; +import org.junit.Before; +import org.mockito.Matchers; import java.util.HashMap; import java.util.Map; @@ -33,18 +38,35 @@ import java.util.function.Consumer; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public abstract class AbstractAsyncBulkIndexByScrollActionScriptTestCase< Request extends AbstractBulkIndexByScrollRequest, Response extends BulkIndexByScrollResponse> extends AbstractAsyncBulkIndexByScrollActionTestCase { + + private static final Script EMPTY_SCRIPT = new Script(""); + + protected ScriptService scriptService; + + @Before + public void setupScriptService() { + scriptService = mock(ScriptService.class); + } + protected IndexRequest applyScript(Consumer> scriptBody) { IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); Map fields = new HashMap<>(); InternalSearchHit doc = new InternalSearchHit(0, "id", new Text("type"), fields); doc.shardTarget(new SearchShardTarget("nodeid", new Index("index", "uuid"), 1)); - ExecutableScript script = new SimpleExecutableScript(scriptBody); - action().applyScript(index, doc, script, new HashMap<>()); + ExecutableScript executableScript = new SimpleExecutableScript(scriptBody); + + when(scriptService.executable(any(CompiledScript.class), Matchers.>any())) + .thenReturn(executableScript); + AbstractAsyncBulkIndexByScrollAction action = action(scriptService, request().setScript(EMPTY_SCRIPT)); + action.buildScriptApplier().apply(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc); return index; } @@ -53,7 +75,7 @@ public abstract class AbstractAsyncBulkIndexByScrollActionScriptTestCase< applyScript((Map ctx) -> ctx.put("junk", "junk")); fail("Expected error"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Invalid fields added to ctx [junk]")); + assertThat(e.getMessage(), equalTo("Invalid fields added to context [junk]")); } } @@ -65,4 +87,6 @@ public abstract class AbstractAsyncBulkIndexByScrollActionScriptTestCase< }); assertEquals("cat", index.sourceAsMap().get("bar")); } + + protected abstract AbstractAsyncBulkIndexByScrollAction action(ScriptService scriptService, Request request); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionTestCase.java index ab5f8f0d748..b9489e9f5d9 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionTestCase.java @@ -46,8 +46,6 @@ public abstract class AbstractAsyncBulkIndexByScrollActionTestCase< threadPool.shutdown(); } - protected abstract AbstractAsyncBulkIndexByScrollAction action(); - protected abstract Request request(); protected PlainActionFuture listener() { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java index 37386abf12e..5a9976fc005 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java @@ -49,13 +49,15 @@ public abstract class AbstractAsyncBulkIndexbyScrollActionMetadataTestCase< public void testTimestampIsCopied() { IndexRequest index = new IndexRequest(); - action().copyMetadata(index, doc(TimestampFieldMapper.NAME, 10L)); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(TimestampFieldMapper.NAME, 10L)); assertEquals("10", index.timestamp()); } public void testTTL() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(index, doc(TTLFieldMapper.NAME, 10L)); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(TTLFieldMapper.NAME, 10L)); assertEquals(timeValueMillis(10), index.ttl()); } + + protected abstract AbstractAsyncBulkIndexByScrollAction action(); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index c9bd22d3552..d78fcfe69ca 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -665,7 +665,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { } private class DummyAbstractAsyncBulkByScrollAction - extends AbstractAsyncBulkByScrollAction { + extends AbstractAsyncBulkByScrollAction { public DummyAbstractAsyncBulkByScrollAction() { super(testTask, logger, new ParentTaskAssigningClient(client, localNode, testTask), threadPool, testRequest, firstSearchRequest, listener); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java index 5bce3260929..4ef16c59141 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java @@ -30,6 +30,8 @@ public class BulkIndexByScrollResponseMatcher extends TypeSafeMatcher createdMatcher = equalTo(0L); private Matcher updatedMatcher = equalTo(0L); + private Matcher deletedMatcher = equalTo(0L); + /** * Matches for number of batches. Optional. */ @@ -56,6 +58,15 @@ public class BulkIndexByScrollResponseMatcher extends TypeSafeMatcher deletedMatcher) { + this.deletedMatcher = deletedMatcher; + return this; + } + + public BulkIndexByScrollResponseMatcher deleted(long deleted) { + return deleted(equalTo(deleted)); + } + /** * Set the matches for the number of batches. Defaults to matching any * integer because we usually don't care about how many batches the job @@ -110,6 +121,7 @@ public class BulkIndexByScrollResponseMatcher extends TypeSafeMatcher builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + builders.add(client().prepareIndex("test", "doc", String.valueOf(i)).setSource("fields1", 1)); + } + indexRandom(true, true, true, builders); + + assertThat(deleteByQuery().source("t*").refresh(true).get(), matcher().deleted(docs)); + assertHitCount(client().prepareSearch("test").setSize(0).get(), 0); + } + + public void testDeleteByQueryWithMultipleIndices() throws Exception { + final int indices = randomIntBetween(2, 5); + final int docs = randomIntBetween(2, 10) * 2; + long[] candidates = new long[indices]; + + // total number of expected deletions + long deletions = 0; + + List builders = new ArrayList<>(); + for (int i = 0; i < indices; i++) { + // number of documents to be deleted with the upcoming delete-by-query + // (this number differs for each index) + candidates[i] = randomIntBetween(1, docs); + deletions = deletions + candidates[i]; + + for (int j = 0; j < docs; j++) { + boolean candidate = (j < candidates[i]); + builders.add(client().prepareIndex("test-" + i, "doc", String.valueOf(j)).setSource("candidate", candidate)); + } + } + indexRandom(true, true, true, builders); + + // Deletes all the documents with candidate=true + assertThat(deleteByQuery().source("test-*").filter(termQuery("candidate", true)).refresh(true).get(), + matcher().deleted(deletions)); + + for (int i = 0; i < indices; i++) { + long remaining = docs - candidates[i]; + assertHitCount(client().prepareSearch("test-" + i).setSize(0).get(), remaining); + } + + assertHitCount(client().prepareSearch().setSize(0).get(), (indices * docs) - deletions); + } + + public void testDeleteByQueryWithMissingIndex() throws Exception { + indexRandom(true, client().prepareIndex("test", "test", "1").setSource("foo", "a")); + assertHitCount(client().prepareSearch().setSize(0).get(), 1); + + try { + deleteByQuery().source("missing").get(); + fail("should have thrown an exception because of a missing index"); + } catch (IndexNotFoundException e) { + // Ok + } + } + + public void testDeleteByQueryWithRouting() throws Exception { + assertAcked(prepareCreate("test").setSettings("number_of_shards", 2)); + ensureGreen("test"); + + final int docs = randomIntBetween(2, 10); + logger.info("--> indexing [{}] documents with routing", docs); + + List builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + builders.add(client().prepareIndex("test", "test", String.valueOf(i)).setRouting(String.valueOf(i)).setSource("field1", 1)); + } + indexRandom(true, true, true, builders); + + logger.info("--> counting documents with no routing, should be equal to [{}]", docs); + assertHitCount(client().prepareSearch().setSize(0).get(), docs); + + String routing = String.valueOf(randomIntBetween(2, docs)); + + logger.info("--> counting documents with routing [{}]", routing); + long expected = client().prepareSearch().setSize(0).setRouting(routing).get().getHits().totalHits(); + + logger.info("--> delete all documents with routing [{}] with a delete-by-query", routing); + DeleteByQueryRequestBuilder delete = deleteByQuery().source("test"); + delete.source().setRouting(routing); + assertThat(delete.refresh(true).get(), matcher().deleted(expected)); + + assertHitCount(client().prepareSearch().setSize(0).get(), docs - expected); + } + + public void testDeleteByMatchQuery() throws Exception { + assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); + + final int docs = scaledRandomIntBetween(10, 100); + + List builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + builders.add(client().prepareIndex("test", "test", Integer.toString(i)) + .setRouting(randomAsciiOfLengthBetween(1, 5)) + .setSource("foo", "bar")); + } + indexRandom(true, true, true, builders); + + int n = between(0, docs - 1); + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(matchQuery("_id", Integer.toString(n))).get(), 1); + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get(), docs); + + DeleteByQueryRequestBuilder delete = deleteByQuery().source("alias").filter(matchQuery("_id", Integer.toString(n))); + assertThat(delete.refresh(true).get(), matcher().deleted(1L)); + + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get(), docs - 1); + } + + public void testDeleteByQueryWithDateMath() throws Exception { + indexRandom(true, client().prepareIndex("test", "type", "1").setSource("d", "2013-01-01")); + + DeleteByQueryRequestBuilder delete = deleteByQuery().source("test").filter(rangeQuery("d").to("now-1h")); + assertThat(delete.refresh(true).get(), matcher().deleted(1L)); + + assertHitCount(client().prepareSearch("test").setSize(0).get(), 0); + } + + public void testDeleteByQueryOnReadOnlyIndex() throws Exception { + createIndex("test"); + + final int docs = randomIntBetween(1, 50); + List builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + builders.add(client().prepareIndex("test", "test", Integer.toString(i)).setSource("field", 1)); + } + indexRandom(true, true, true, builders); + + try { + enableIndexBlock("test", IndexMetaData.SETTING_READ_ONLY); + assertThat(deleteByQuery().source("test").refresh(true).get(), matcher().deleted(0).failures(docs)); + } finally { + disableIndexBlock("test", IndexMetaData.SETTING_READ_ONLY); + } + + assertHitCount(client().prepareSearch("test").setSize(0).get(), docs); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryCancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryCancelTests.java new file mode 100644 index 00000000000..6007b646429 --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryCancelTests.java @@ -0,0 +1,184 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ListenableActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.shard.IndexingOperationListener; +import org.elasticsearch.plugins.Plugin; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +/** + * Tests that you can actually cancel a delete-by-query request and all the plumbing works. Doesn't test all of the different cancellation + * places - that is the responsibility of {@link AsyncBulkByScrollActionTests} which have more precise control to simulate failures but do + * not exercise important portion of the stack like transport and task management. + */ +public class DeleteByQueryCancelTests extends ReindexTestCase { + + private static final String INDEX = "test-delete-by-query"; + private static final String TYPE = "test"; + + private static final int MAX_DELETIONS = 10; + private static final CyclicBarrier barrier = new CyclicBarrier(2); + + @Override + protected int numberOfShards() { + // Only 1 shard and no replica so that test execution + // can be easily controlled within a {@link IndexingOperationListener#preDelete} + return 1; + } + + @Override + protected int numberOfReplicas() { + // Only 1 shard and no replica so that test execution + // can be easily controlled within a {@link IndexingOperationListener#preDelete} + return 0; + } + + @Override + protected Collection> nodePlugins() { + Collection> plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(DeleteByQueryCancellationPlugin.class); + return plugins; + } + + public void testCancel() throws Exception { + createIndex(INDEX); + + int totalNumShards = getNumShards(INDEX).totalNumShards; + + // Number of documents to be deleted in this test + final int nbDocsToDelete = totalNumShards * MAX_DELETIONS; + + // Total number of documents that will be created in this test + final int nbDocs = nbDocsToDelete * randomIntBetween(1, 5); + for (int i = 0; i < nbDocs; i++) { + indexRandom(false, client().prepareIndex(INDEX, TYPE, String.valueOf(i)).setSource("n", i)); + } + + refresh(INDEX); + assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), nbDocs); + + // Executes the delete by query; each shard will block after MAX_DELETIONS + DeleteByQueryRequestBuilder deleteByQuery = deleteByQuery().source("_all"); + deleteByQuery.source().setSize(1); + + ListenableActionFuture future = deleteByQuery.execute(); + + // Waits for the indexing operation listener to block + barrier.await(30, TimeUnit.SECONDS); + + // Status should show running + ListTasksResponse tasksList = client().admin().cluster().prepareListTasks() + .setActions(DeleteByQueryAction.NAME).setDetailed(true).get(); + assertThat(tasksList.getNodeFailures(), empty()); + assertThat(tasksList.getTaskFailures(), empty()); + assertThat(tasksList.getTasks(), hasSize(1)); + BulkByScrollTask.Status status = (BulkByScrollTask.Status) tasksList.getTasks().get(0).getStatus(); + assertNull(status.getReasonCancelled()); + + // Cancel the request while the deletions are blocked. This will prevent further deletions requests from being sent. + List cancelledTasks = client().admin().cluster().prepareCancelTasks() + .setActions(DeleteByQueryAction.NAME).get().getTasks(); + assertThat(cancelledTasks, hasSize(1)); + + // The status should now show canceled. The request will still be in the list because the script is still blocked. + tasksList = client().admin().cluster().prepareListTasks().setActions(DeleteByQueryAction.NAME).setDetailed(true).get(); + assertThat(tasksList.getNodeFailures(), empty()); + assertThat(tasksList.getTaskFailures(), empty()); + assertThat(tasksList.getTasks(), hasSize(1)); + status = (BulkByScrollTask.Status) tasksList.getTasks().get(0).getStatus(); + assertEquals(CancelTasksRequest.DEFAULT_REASON, status.getReasonCancelled()); + + // Now unblock the listener so that it can proceed + barrier.await(); + + // And check the status of the response + BulkIndexByScrollResponse response = future.get(); + assertThat(response, matcher() + .deleted(lessThanOrEqualTo((long) MAX_DELETIONS)).batches(MAX_DELETIONS).reasonCancelled(equalTo("by user request"))); + } + + + public static class DeleteByQueryCancellationPlugin extends Plugin { + + @Override + public String name() { + return "delete-by-query-cancellation"; + } + + @Override + public String description() { + return "See " + DeleteByQueryCancellationPlugin.class.getName(); + } + + @Override + public void onIndexModule(IndexModule indexModule) { + indexModule.addIndexOperationListener(new BlockingDeleteListener()); + } + } + + /** + * A {@link IndexingOperationListener} that allows a given number of documents to be deleted + * and then blocks until it is notified to proceed. + */ + public static class BlockingDeleteListener implements IndexingOperationListener { + + private final CountDown blockAfter = new CountDown(MAX_DELETIONS); + + @Override + public Engine.Delete preDelete(Engine.Delete delete) { + if (blockAfter.isCountedDown() || (TYPE.equals(delete.type()) == false)) { + return delete; + } + + if (blockAfter.countDown()) { + try { + // Tell the test we've deleted enough documents. + barrier.await(30, TimeUnit.SECONDS); + + // Wait for the test to tell us to proceed. + barrier.await(30, TimeUnit.SECONDS); + } catch (InterruptedException | BrokenBarrierException | TimeoutException e) { + throw new RuntimeException(e); + } + } + return delete; + } + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryConcurrentTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryConcurrentTests.java new file mode 100644 index 00000000000..40a776626a3 --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryConcurrentTests.java @@ -0,0 +1,119 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicLong; + +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.equalTo; + +public class DeleteByQueryConcurrentTests extends ReindexTestCase { + + public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Throwable { + final Thread[] threads = new Thread[scaledRandomIntBetween(2, 5)]; + final long docs = randomIntBetween(1, 50); + + List builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + for (int t = 0; t < threads.length; t++) { + builders.add(client().prepareIndex("test", "doc").setSource("field", t)); + } + } + indexRandom(true, true, true, builders); + + final CountDownLatch start = new CountDownLatch(1); + for (int t = 0; t < threads.length; t++) { + final int threadNum = t; + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", threadNum)).get(), docs); + + Runnable r = () -> { + try { + start.await(); + + assertThat(deleteByQuery().source("_all").filter(termQuery("field", threadNum)).refresh(true).get(), + matcher().deleted(docs)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + }; + threads[t] = new Thread(r); + threads[t].start(); + } + + start.countDown(); + for (Thread thread : threads) { + thread.join(); + } + + for (int t = 0; t < threads.length; t++) { + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", t)).get(), 0); + } + } + + public void testConcurrentDeleteByQueriesOnSameDocs() throws Throwable { + final long docs = randomIntBetween(50, 100); + + List builders = new ArrayList<>(); + for (int i = 0; i < docs; i++) { + builders.add(client().prepareIndex("test", "doc", String.valueOf(i)).setSource("foo", "bar")); + } + indexRandom(true, true, true, builders); + + final Thread[] threads = new Thread[scaledRandomIntBetween(2, 9)]; + + final CountDownLatch start = new CountDownLatch(1); + final MatchQueryBuilder query = matchQuery("foo", "bar"); + final AtomicLong deleted = new AtomicLong(0); + + for (int t = 0; t < threads.length; t++) { + Runnable r = () -> { + try { + start.await(); + + BulkIndexByScrollResponse response = deleteByQuery().source("test").filter(query).refresh(true).get(); + // Some deletions might fail due to version conflict, but + // what matters here is the total of successful deletions + deleted.addAndGet(response.getDeleted()); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + }; + threads[t] = new Thread(r); + threads[t].start(); + } + + start.countDown(); + for (Thread thread : threads) { + thread.join(); + } + + assertHitCount(client().prepareSearch("test").setSize(0).get(), 0L); + assertThat(deleted.get(), equalTo(docs)); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java index e82dd12a407..670fcefbf55 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMetadataTestCase { public void testRoutingCopiedByDefault() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals("foo", index.routing()); } @@ -37,7 +37,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("keep"); IndexRequest index = new IndexRequest(); - action.copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals("foo", index.routing()); } @@ -45,7 +45,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("discard"); IndexRequest index = new IndexRequest(); - action.copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals(null, index.routing()); } @@ -53,7 +53,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("=cat"); IndexRequest index = new IndexRequest(); - action.copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals("cat", index.routing()); } @@ -61,13 +61,13 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("==]"); IndexRequest index = new IndexRequest(); - action.copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals("=]", index.routing()); } @Override protected TransportReindexAction.AsyncIndexBySearchAction action() { - return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, null, null, threadPool, request(), listener()); + return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request(), listener(), null, null); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java index b805dbd2772..74b7548cd63 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java @@ -20,7 +20,10 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptService; import java.util.Map; @@ -31,6 +34,7 @@ import static org.hamcrest.Matchers.containsString; * Tests index-by-search with a script modifying the documents. */ public class ReindexScriptTests extends AbstractAsyncBulkIndexByScrollActionScriptTestCase { + public void testSetIndex() throws Exception { Object dest = randomFrom(new Object[] {234, 234L, "pancake"}); IndexRequest index = applyScript((Map ctx) -> ctx.put("_index", dest)); @@ -129,11 +133,12 @@ public class ReindexScriptTests extends AbstractAsyncBulkIndexByScrollActionScri @Override protected ReindexRequest request() { - return new ReindexRequest(); + return new ReindexRequest(new SearchRequest(), new IndexRequest()); } @Override - protected AbstractAsyncBulkIndexByScrollAction action() { - return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, null, null, threadPool, request(), listener()); + protected AbstractAsyncBulkIndexByScrollAction action(ScriptService scriptService, ReindexRequest request) { + return new TransportReindexAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request, listener(), scriptService, + null); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java index 33c72baa7cb..f4a777a1973 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java @@ -43,6 +43,10 @@ public abstract class ReindexTestCase extends ESIntegTestCase { return UpdateByQueryAction.INSTANCE.newRequestBuilder(client()); } + protected DeleteByQueryRequestBuilder deleteByQuery() { + return DeleteByQueryAction.INSTANCE.newRequestBuilder(client()); + } + protected RethrottleRequestBuilder rethrottle() { return RethrottleAction.INSTANCE.newRequestBuilder(client()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java index 5c5e45993b9..bb6a33b593a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java @@ -27,14 +27,13 @@ public class UpdateByQueryMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMetadataTestCase { public void testRoutingIsCopied() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(index, doc(RoutingFieldMapper.NAME, "foo")); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); assertEquals("foo", index.routing()); } @Override protected TransportUpdateByQueryAction.AsyncIndexBySearchAction action() { - return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, null, threadPool, null, request(), - listener()); + return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request(), listener(), null, null); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java index 62f405cb0cd..1c57c202766 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.script.ScriptService; + import java.util.Date; import java.util.Map; @@ -26,6 +28,7 @@ import static org.hamcrest.Matchers.containsString; public class UpdateByQueryWithScriptTests extends AbstractAsyncBulkIndexByScrollActionScriptTestCase { + public void testModifyingCtxNotAllowed() { /* * Its important that none of these actually match any of the fields. @@ -49,7 +52,8 @@ public class UpdateByQueryWithScriptTests } @Override - protected AbstractAsyncBulkIndexByScrollAction action() { - return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, null, threadPool, null, request(), listener()); + protected AbstractAsyncBulkIndexByScrollAction action(ScriptService scriptService, UpdateByQueryRequest request) { + return new TransportUpdateByQueryAction.AsyncIndexBySearchAction(task, logger, null, threadPool, request, listener(), + scriptService, null); } } diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml new file mode 100644 index 00000000000..bdad5f581bc --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml @@ -0,0 +1,304 @@ +--- +"Basic response": + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: test + body: + query: + match_all: {} + + - is_false: timed_out + - match: {deleted: 1} + - is_false: created + - is_false: updated + - match: {version_conflicts: 0} + - match: {batches: 1} + - match: {failures: []} + - match: {noops: 0} + - match: {throttled_millis: 0} + - gte: { took: 0 } + - is_false: task + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 0} + +--- +"wait_for_completion=false": + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + wait_for_completion: false + index: test + body: + query: + match_all: {} + + - match: {task: '/.+:\d+/'} + - set: {task: task} + - is_false: version_conflicts + - is_false: batches + - is_false: failures + - is_false: noops + - is_false: took + - is_false: throttled_millis + - is_false: created + - is_false: updated + - is_false: deleted + + - do: + tasks.list: + wait_for_completion: true + task_id: $task + - is_false: node_failures + +--- +"Response for version conflict": + - do: + indices.create: + index: test + body: + settings: + index.refresh_interval: -1 + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test" } + - do: + indices.refresh: {} + # Creates a new version for reindex to miss on scan. + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test2" } + + - do: + catch: conflict + delete_by_query: + index: test + body: + query: + match_all: {} + + - match: {deleted: 0} + - match: {version_conflicts: 1} + - match: {batches: 1} + - match: {failures.0.index: test} + - match: {failures.0.type: foo} + - match: {failures.0.id: "1"} + - match: {failures.0.status: 409} + - match: {failures.0.cause.type: version_conflict_engine_exception} + # Use a regex so we don't mind if the current version isn't always 1. Sometimes it comes out 2. + - match: {failures.0.cause.reason: "/\\[foo\\]\\[1\\]:.version.conflict,.current.version.\\[\\d+\\].is.different.than.the.one.provided.\\[\\d+\\]/"} + - match: {failures.0.cause.shard: /\d+/} + - match: {failures.0.cause.index: test} + - gte: { took: 0 } + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 1} + +--- +"Response for version conflict with conflicts=proceed": + - do: + indices.create: + index: test + body: + settings: + index.refresh_interval: -1 + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test" } + - do: + indices.refresh: {} + # Creates a new version for reindex to miss on scan. + - do: + index: + index: test + type: foo + id: 1 + body: { "text": "test2" } + + - do: + delete_by_query: + index: test + conflicts: proceed + body: + query: + match_all: {} + + - match: {deleted: 0} + - match: {version_conflicts: 1} + - match: {batches: 1} + - match: {noops: 0} + - match: {failures: []} + - match: {throttled_millis: 0} + - gte: { took: 0 } + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 1} + +--- +"Limit by query": + - do: + index: + index: twitter + type: tweet + id: 1 + body: { "user": "kimchy" } + - do: + index: + index: twitter + type: tweet + id: 2 + body: { "user": "junk" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: twitter + refresh: true + body: + query: + match: + user: kimchy + - match: {deleted: 1} + - match: {version_conflicts: 0} + - match: {batches: 1} + - match: {failures: []} + - gte: { took: 0 } + + - do: + count: + index: twitter + + - match: {count: 1} + +--- +"Limit by size": + - do: + index: + index: twitter + type: tweet + id: 1 + body: { "user": "kimchy" } + - do: + index: + index: twitter + type: tweet + id: 2 + body: { "user": "kimchy" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: twitter + size: 1 + body: + query: + match_all: {} + + - match: {deleted: 1} + - match: {version_conflicts: 0} + - match: {batches: 1} + - match: {failures: []} + - match: {throttled_millis: 0} + - gte: { took: 0 } + + - do: + indices.refresh: {} + + - do: + count: + index: twitter + + - match: {count: 1} + +--- +"Can override scroll_size": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: test + refresh: true + scroll_size: 1 + body: + query: + match_all: {} + + - match: {batches: 3} + - match: {deleted: 3} + + - do: + count: + index: test + + - match: {count: 0} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml new file mode 100644 index 00000000000..8648c9034ee --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml @@ -0,0 +1,99 @@ +--- +"no body fails": + - do: + catch: param + delete_by_query: + index: _all + +--- +"invalid conflicts fails": + - do: + index: + index: test + type: test + id: 1 + body: { "text": "test" } + - do: + catch: /conflicts may only be .* but was \[cat\]/ + delete_by_query: + index: test + conflicts: cat + body: + query: + match_all: {} + +--- +"invalid size fails": + - do: + index: + index: test + type: test + id: 1 + body: { "text": "test" } + - do: + catch: /size should be greater than 0 if the request is limited to some number of documents or -1 if it isn't but it was \[-4\]/ + delete_by_query: + index: test + size: -4 + body: + query: + match_all: {} + +--- +"invalid scroll_size fails": + - do: + index: + index: test + type: test + id: 1 + body: { "text": "test" } + - do: + catch: /Failed to parse int parameter \[scroll_size\] with value \[asdf\]/ + delete_by_query: + index: test + scroll_size: asdf + body: + query: + match_all: {} + +--- +"source fields may not be modified": + - do: + catch: /fields is not supported in this context/ + delete_by_query: + index: test + body: + fields: [_id] + +--- +"requests_per_second cannot be an empty string": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + delete_by_query: + requests_per_second: "" + index: test + body: + query: + match_all: {} + +--- +"requests_per_second cannot be negative": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + delete_by_query: + requests_per_second: -12 + index: test + body: + query: + match_all: {} + +--- +"requests_per_second cannot be zero": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + delete_by_query: + requests_per_second: 0 + index: test + body: + query: + match_all: {} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/30_by_type.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/30_by_type.yaml new file mode 100644 index 00000000000..1ee249fc6bb --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/30_by_type.yaml @@ -0,0 +1,72 @@ +--- +"Delete by type": + - do: + index: + index: test + type: t1 + id: 1 + body: { foo: bar } + - do: + index: + index: test + type: t1 + id: 2 + body: { foo: bar } + - do: + index: + index: test + type: t2 + id: 1 + body: { foo: bar } + - do: + index: + index: test + type: t2 + id: 2 + body: { foo: bar } + - do: + index: + index: test + type: t2 + id: 3 + body: { foo: baz } + - do: + indices.refresh: {} + - do: + count: + index: test + type: t2 + + - match: {count: 3} + + - do: + delete_by_query: + index: test + type: t2 + body: + query: + match: + foo: bar + + - is_false: timed_out + - match: {deleted: 2} + - is_false: created + - is_false: updated + - match: {version_conflicts: 0} + - match: {batches: 1} + - match: {failures: []} + - match: {noops: 0} + - match: {throttled_millis: 0} + - gte: { took: 0 } + - is_false: task + + - do: + indices.refresh: {} + + - do: + count: + index: test + type: t2 + + - match: {count: 1} + diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml new file mode 100644 index 00000000000..a69dfdfac04 --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml @@ -0,0 +1,62 @@ +--- +"can override consistency": + - do: + indices.create: + index: test + body: + settings: + number_of_replicas: 5 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: test + id: 1 + body: {"text": "test"} + consistency: one + - do: + indices.refresh: {} + + - do: + catch: unavailable + delete_by_query: + index: test + timeout: 1s + body: + query: + match_all: {} + + - match: + failures.0.cause.reason: /Not.enough.active.copies.to.meet.write.consistency.of.\[QUORUM\].\(have.1,.needed.4\)..Timeout\:.\[1s\],.request:.\[BulkShardRequest.to.\[test\].containing.\[1\].requests\]/ + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 1} + + - do: + delete_by_query: + index: test + consistency: one + body: + query: + match_all: {} + + - match: {failures: []} + - match: {deleted: 1} + - match: {version_conflicts: 0} + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 0} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml new file mode 100644 index 00000000000..0ff382ff751 --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml @@ -0,0 +1,202 @@ +"Throttle the request": + # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard + # and a small batch size on the request + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: test + scroll_size: 1 + requests_per_second: 1 + body: + query: + match_all: {} + + - match: {batches: 3} + - match: {deleted: 3} + - gt: {throttled_millis: 1000} + - lt: {throttled_millis: 4000} + +--- +"requests_per_second supports unlimited which turns off throttling": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + index: test + scroll_size: 1 + requests_per_second: unlimited + body: + query: + match_all: {} + + - match: {batches: 3} + - match: {deleted: 3} + - match: {throttled_millis: 0} + +--- +"Rethrottle": + # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard + # and a small batch size on the request + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + requests_per_second: .00000001 # About 9.5 years to complete the request + wait_for_completion: false + index: test + scroll_size: 1 + body: + query: + match_all: {} + + - match: {task: '/.+:\d+/'} + - set: {task: task} + + - do: + reindex.rethrottle: + requests_per_second: unlimited + task_id: $task + + - do: + tasks.list: + wait_for_completion: true + task_id: $task + + - do: + indices.refresh: {} + + - do: + count: + index: test + + - match: {count: 0} + +--- +"Rethrottle but not unlimited": + # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard + # and a small batch size on the request + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + - do: + cluster.health: + wait_for_status: yellow + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + index: + index: test + type: foo + body: { "text": "test" } + - do: + indices.refresh: {} + + - do: + delete_by_query: + requests_per_second: .00000001 # About 9.5 years to complete the request + wait_for_completion: false + index: test + scroll_size: 1 + body: + query: + match_all: {} + + - match: {task: '/.+:\d+/'} + - set: {task: task} + + - do: + reindex.rethrottle: + requests_per_second: 1 + task_id: $task + + - do: + tasks.list: + wait_for_completion: true + task_id: $task diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml index 63d0edefe14..f4025383321 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml @@ -24,6 +24,7 @@ - match: {throttled_millis: 0} - gte: { took: 0 } - is_false: task + - is_false: deleted --- "Response format for updated": @@ -57,6 +58,7 @@ - match: {throttled_millis: 0} - gte: { took: 0 } - is_false: task + - is_false: deleted --- "wait_for_completion=false": @@ -88,6 +90,7 @@ - is_false: took - is_false: throttled_millis - is_false: created + - is_false: deleted - do: tasks.list: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml index dc54c0d91cf..a7a5198e430 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml @@ -59,7 +59,7 @@ --- "search size fails if not a number": - do: - catch: '/NumberFormatException: For input string: "cat"/' + catch: '/number_format_exception.*For input string: \"cat\"/' reindex: body: source: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml index 89266198f2c..843bb9b6eb5 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml @@ -23,6 +23,7 @@ # Update by query can't create - is_false: created - is_false: task + - is_false: deleted --- "wait_for_completion=false": @@ -49,6 +50,7 @@ - is_false: took - is_false: throttled_millis - is_false: created + - is_false: deleted - do: tasks.list: diff --git a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml index 00c6e814eed..54a79ac1e32 100644 --- a/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml +++ b/qa/smoke-test-reindex-with-groovy/src/test/resources/rest-api-spec/test/update_by_query/10_script.yaml @@ -102,7 +102,7 @@ - match: {batches: 1} --- -"Setting bogus ctx is an error": +"Setting bogus context is an error": - do: index: index: twitter @@ -113,7 +113,7 @@ indices.refresh: {} - do: - catch: /Invalid fields added to ctx \[junk\]/ + catch: /Invalid fields added to context \[junk\]/ update_by_query: index: twitter body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json new file mode 100644 index 00000000000..981aea79a1c --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -0,0 +1,207 @@ +{ + "delete_by_query": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-reindex.html", + "methods": ["POST"], + "url": { + "path": "/{index}/_delete_by_query", + "paths": ["/{index}/_delete_by_query", "/{index}/{type}/_delete_by_query"], + "comment": "most things below this are just copied from search.json", + "parts": { + "index": { + "required" : true, + "type" : "list", + "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" + }, + "type": { + "type" : "list", + "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" + } + }, + "params": { + "analyzer": { + "type" : "string", + "description" : "The analyzer to use for the query string" + }, + "analyze_wildcard": { + "type" : "boolean", + "description" : "Specify whether wildcard and prefix queries should be analyzed (default: false)" + }, + "default_operator": { + "type" : "enum", + "options" : ["AND","OR"], + "default" : "OR", + "description" : "The default operator for query string query (AND or OR)" + }, + "df": { + "type" : "string", + "description" : "The field to use as default where no field prefix is given in the query string" + }, + "explain": { + "type" : "boolean", + "description" : "Specify whether to return detailed information about score computation as part of a hit" + }, + "fields": { + "type" : "list", + "description" : "A comma-separated list of fields to return as part of a hit" + }, + "fielddata_fields": { + "type" : "list", + "description" : "A comma-separated list of fields to return as the field data representation of a field for each hit" + }, + "from": { + "type" : "number", + "description" : "Starting offset (default: 0)" + }, + "ignore_unavailable": { + "type" : "boolean", + "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" + }, + "allow_no_indices": { + "type" : "boolean", + "description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" + }, + "conflicts": { + "note": "This is not copied from search", + "type" : "enum", + "options": ["abort", "proceed"], + "default": "abort", + "description" : "What to do when the delete-by-query hits version conflicts?" + }, + "expand_wildcards": { + "type" : "enum", + "options" : ["open","closed","none","all"], + "default" : "open", + "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." + }, + "lenient": { + "type" : "boolean", + "description" : "Specify whether format-based query failures (such as providing text to a numeric field) should be ignored" + }, + "lowercase_expanded_terms": { + "type" : "boolean", + "description" : "Specify whether query terms should be lowercased" + }, + "preference": { + "type" : "string", + "description" : "Specify the node or shard the operation should be performed on (default: random)" + }, + "q": { + "type" : "string", + "description" : "Query in the Lucene query string syntax" + }, + "routing": { + "type" : "list", + "description" : "A comma-separated list of specific routing values" + }, + "scroll": { + "type" : "duration", + "description" : "Specify how long a consistent view of the index should be maintained for scrolled search" + }, + "search_type": { + "type" : "enum", + "options" : ["query_then_fetch", "dfs_query_then_fetch"], + "description" : "Search operation type" + }, + "search_timeout": { + "type" : "time", + "description" : "Explicit timeout for each search request. Defaults to no timeout." + }, + "size": { + "type" : "number", + "description" : "Number of hits to return (default: 10)" + }, + "sort": { + "type" : "list", + "description" : "A comma-separated list of : pairs" + }, + "_source": { + "type" : "list", + "description" : "True or false to return the _source field or not, or a list of fields to return" + }, + "_source_exclude": { + "type" : "list", + "description" : "A list of fields to exclude from the returned _source field" + }, + "_source_include": { + "type" : "list", + "description" : "A list of fields to extract and return from the _source field" + }, + "terminate_after": { + "type" : "number", + "description" : "The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early." + }, + "stats": { + "type" : "list", + "description" : "Specific 'tag' of the request for logging and statistical purposes" + }, + "suggest_field": { + "type" : "string", + "description" : "Specify which field to use for suggestions" + }, + "suggest_mode": { + "type" : "enum", + "options" : ["missing", "popular", "always"], + "default" : "missing", + "description" : "Specify suggest mode" + }, + "suggest_size": { + "type" : "number", + "description" : "How many suggestions to return in response" + }, + "suggest_text": { + "type" : "text", + "description" : "The source text for which the suggestions should be returned" + }, + "timeout": { + "type" : "time", + "description" : "Explicit operation timeout" + }, + "track_scores": { + "type" : "boolean", + "description": "Whether to calculate and return scores even if they are not used for sorting" + }, + "version": { + "type" : "boolean", + "description" : "Specify whether to return document version as part of a hit" + }, + "request_cache": { + "type" : "boolean", + "description" : "Specify if request cache should be used for this request or not, defaults to index level setting" + }, + "refresh": { + "type" : "boolean", + "description" : "Should the effected indexes be refreshed?" + }, + "timeout": { + "type" : "time", + "default": "1m", + "description" : "Time each individual bulk request should wait for shards that are unavailable." + }, + "consistency": { + "type" : "enum", + "options" : ["one", "quorum", "all"], + "description" : "Explicit write consistency setting for the operation" + }, + "scroll_size": { + "type": "integer", + "defaut_value": 100, + "description": "Size on the scroll request powering the update_by_query" + }, + "wait_for_completion": { + "type" : "boolean", + "default": false, + "description" : "Should the request should block until the delete-by-query is complete." + }, + "requests_per_second": { + "type": "float", + "default": 0, + "description": "The throttle for this request in sub-requests per second. 0 means set no throttle." + } + } + }, + "body": { + "description": "The search definition using the Query DSL", + "required": true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json index 4903c7598c3..921249ab4cd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json @@ -4,7 +4,7 @@ "methods": ["POST"], "url": { "path": "/_reindex/{task_id}/_rethrottle", - "paths": ["/_reindex/{task_id}/_rethrottle", "/_update_by_query/{task_id}/_rethrottle"], + "paths": ["/_reindex/{task_id}/_rethrottle", "/_update_by_query/{task_id}/_rethrottle", "/_delete_by_query/{task_id}/_rethrottle"], "parts": { "task_id": { "type": "string", From 757ccf00b296228e4b2e63485f7aa0d37a131d7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 19 May 2016 14:53:55 +0200 Subject: [PATCH 29/36] Enforce MatchQueryBuilder#maxExpansions() to be strictly positive --- .../index/query/MatchQueryBuilder.java | 6 +- .../index/query/MatchQueryBuilderTests.java | 67 +++++++------------ 2 files changed, 27 insertions(+), 46 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index 58207f7ca54..6c867590653 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -267,7 +267,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { */ public MatchQueryBuilder prefixLength(int prefixLength) { if (prefixLength < 0 ) { - throw new IllegalArgumentException("No negative prefix length allowed."); + throw new IllegalArgumentException("[" + NAME + "] requires prefix length to be non-negative."); } this.prefixLength = prefixLength; return this; @@ -284,8 +284,8 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { * When using fuzzy or prefix type query, the number of term expansions to use. */ public MatchQueryBuilder maxExpansions(int maxExpansions) { - if (maxExpansions < 0 ) { - throw new IllegalArgumentException("No negative maxExpansions allowed."); + if (maxExpansions <= 0 ) { + throw new IllegalArgumentException("[" + NAME + "] requires maxExpansions to be positive."); } this.maxExpansions = maxExpansions; return this; diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 87f8f3981d3..c5e01c1d0ff 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PointRangeQuery; @@ -202,64 +201,46 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase new MatchQueryBuilder(null, "value")); + assertEquals("[match] requires fieldName", e.getMessage()); } - try { - new MatchQueryBuilder("fieldName", null); - fail("value must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchQueryBuilder("fieldName", null)); + assertEquals("[match] requires query value", e.getMessage()); } MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text"); - try { - matchQuery.prefixLength(-1); - fail("must not be positive"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.prefixLength(-1)); + assertEquals("[match] requires prefix length to be non-negative.", e.getMessage()); } - try { - matchQuery.maxExpansions(-1); - fail("must not be positive"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> matchQuery.maxExpansions(randomIntBetween(-10, 0))); + assertEquals("[match] requires maxExpansions to be positive.", e.getMessage()); } - try { - matchQuery.operator(null); - fail("must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.operator(null)); + assertEquals("[match] requires operator to be non-null", e.getMessage()); } - try { - matchQuery.type(null); - fail("must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.type(null)); + assertEquals("[match] requires type to be non-null", e.getMessage()); } - try { - matchQuery.zeroTermsQuery(null); - fail("must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.zeroTermsQuery(null)); + assertEquals("[match] requires zeroTermsQuery to be non-null", e.getMessage()); } - } - public void testBadAnalyzer() throws IOException { - MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text"); matchQuery.analyzer("bogusAnalyzer"); - try { - matchQuery.toQuery(createShardContext()); - fail("Expected QueryShardException"); - } catch (QueryShardException e) { + { + QueryShardException e = expectThrows(QueryShardException.class, () -> matchQuery.toQuery(createShardContext())); assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found")); } } From 4d6887075f2a57944001032630d4f55fd9616d9f Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Thu, 19 May 2016 17:12:37 +0200 Subject: [PATCH 30/36] Log IndexShard.refresh logs under trace (#18435) We log them every second... --- .../org/elasticsearch/index/shard/IndexShard.java | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 768db935308..2cadc1ad24b 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -576,16 +576,22 @@ public class IndexShard extends AbstractIndexShardComponent { long bytes = getEngine().getIndexBufferRAMBytesUsed(); writingBytes.addAndGet(bytes); try { - logger.debug("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes)); + if (logger.isTraceEnabled()) { + logger.trace("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes)); + } long time = System.nanoTime(); getEngine().refresh(source); refreshMetric.inc(System.nanoTime() - time); } finally { - logger.debug("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId()); + if (logger.isTraceEnabled()) { + logger.trace("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId()); + } writingBytes.addAndGet(-bytes); } } else { - logger.debug("refresh with source [{}]", source); + if (logger.isTraceEnabled()) { + logger.trace("refresh with source [{}]", source); + } long time = System.nanoTime(); getEngine().refresh(source); refreshMetric.inc(System.nanoTime() - time); From d2515727d09b16a97357f7963538ce40928112cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 19 May 2016 16:09:57 +0200 Subject: [PATCH 31/36] Improve random DateTimeZone creation in tests We often require a random joda DateTimeZone in our tests. Currently there are a few options for generating such a random DateTimeZone from the set of available ids. Currently most random picks are not really reproducable across different jvms because they rely on order in the ids set implementation. The helper in DateProcessorFactoryTests thus performs a sort on the set of ids before random picking from the result, so I moved this to ESTestCase to make it publicly available and changed all other tests to use that method. --- .../rounding/TimeZoneRoundingTests.java | 10 ++++------ .../index/query/AbstractQueryTestCase.java | 6 ------ .../query/QueryStringQueryBuilderTests.java | 4 +--- .../index/query/RangeQueryBuilderTests.java | 2 +- .../processor/DateProcessorFactoryTests.java | 10 +--------- .../aggregations/bucket/DateRangeIT.java | 3 +-- .../aggregations/bucket/DateRangeTests.java | 5 +---- .../elasticsearch/test/ESIntegTestCase.java | 19 ------------------- .../org/elasticsearch/test/ESTestCase.java | 10 ++++++++++ 9 files changed, 19 insertions(+), 50 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java index 08a4ba11342..61710b726d4 100644 --- a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java +++ b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java @@ -25,7 +25,6 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; -import java.util.ArrayList; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; @@ -234,19 +233,18 @@ public class TimeZoneRoundingTests extends ESTestCase { * amount of milliseconds. */ public void testSubHourNextRoundingEquallySpaced() { - String timeZone = randomFrom(new ArrayList<>(DateTimeZone.getAvailableIDs())); DateTimeUnit unit = randomFrom(new DateTimeUnit[] { DateTimeUnit.HOUR_OF_DAY, DateTimeUnit.MINUTES_OF_HOUR, DateTimeUnit.SECOND_OF_MINUTE }); - DateTimeZone tz = DateTimeZone.forID(timeZone); - TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(unit, tz); + DateTimeZone timezone = randomDateTimeZone(); + TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(unit, timezone); // move the random date to transition for timezones that have offset change due to dst transition - long nextTransition = tz.nextTransition(Math.abs(randomLong() % ((long) 10e11))); + long nextTransition = timezone.nextTransition(Math.abs(randomLong() % ((long) 10e11))); final long millisPerUnit = unit.field().getDurationField().getUnitMillis(); // start ten units before transition long roundedDate = rounding.round(nextTransition - (10 * millisPerUnit)); while (roundedDate < nextTransition + 10 * millisPerUnit) { long delta = rounding.nextRoundingValue(roundedDate) - roundedDate; - assertEquals("Difference between rounded values not equally spaced for [" + unit.name() + "], [" + timeZone + "] at " + assertEquals("Difference between rounded values not equally spaced for [" + unit.name() + "], [" + timezone + "] at " + new DateTime(roundedDate), millisPerUnit, delta); roundedDate = rounding.nextRoundingValue(roundedDate); } diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index 9c47d701f57..22bdc2c7290 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -903,12 +903,6 @@ public abstract class AbstractQueryTestCase> return randomFrom("1", "-1", "75%", "-25%", "2<75%", "2<-25%"); } - protected static String randomTimeZone() { - return randomFrom(TIMEZONE_IDS); - } - - private static final List TIMEZONE_IDS = new ArrayList<>(DateTimeZone.getAvailableIDs()); - private static class ClientInvocationHandler implements InvocationHandler { AbstractQueryTestCase delegate; diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 375db0c1eaf..cf16456b2bf 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import org.apache.lucene.document.IntPoint; import org.apache.lucene.index.Term; import org.apache.lucene.queryparser.classic.MapperQueryParser; import org.apache.lucene.queryparser.classic.QueryParserSettings; @@ -27,7 +26,6 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; -import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PhraseQuery; @@ -148,7 +146,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase ids = new ArrayList<>(DateTimeZone.getAvailableIDs()); - Collections.sort(ids); - return DateTimeZone.forID(randomFrom(ids)); - } - - public void testParseMatchFormats() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index a95b3cd4871..36613cfa784 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -295,8 +295,7 @@ public class DateRangeIT extends ESIntegTestCase { } public void testSingleValueFieldWithDateMath() throws Exception { - String[] ids = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]); - DateTimeZone timezone = DateTimeZone.forID(randomFrom(ids)); + DateTimeZone timezone = randomDateTimeZone(); int timeZoneOffset = timezone.getOffset(date(2, 15)); // if time zone is UTC (or equivalent), time zone suffix is "Z", else something like "+03:00", which we get with the "ZZ" format String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2,15, timezone).toString("ZZ"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java index eba3790330b..ee32915fdd7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java @@ -22,12 +22,9 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder; -import org.joda.time.DateTimeZone; public class DateRangeTests extends BaseAggregationTestCase { - private final static String[] timeZoneIds = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]); - @Override protected DateRangeAggregationBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); @@ -60,7 +57,7 @@ public class DateRangeTests extends BaseAggregationTestCase ids = new ArrayList<>(DateTimeZone.getAvailableIDs()); + Collections.sort(ids); + return DateTimeZone.forID(randomFrom(ids)); + } + /** * helper to randomly perform on consumer with value */ From 848648862736843c219de1036114ddf4a911e6cc Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Thu, 19 May 2016 18:44:53 +0200 Subject: [PATCH 32/36] Disable DeleteByQueryRestIT delete_by_query/10_basic/Basic delete_by_query Because of a REST test namespace conflict introduced by 18329. Issue tracked in 18469 --- .../rest-api-spec/test/delete_by_query/10_basic.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml b/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml index 063e959a807..124122a44bf 100644 --- a/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml +++ b/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml @@ -25,6 +25,10 @@ setup: --- "Basic delete_by_query": + - skip: + version: all + reason: "Test muted because of a REST test namespace conflict, see https://github.com/elastic/elasticsearch/issues/18469" + - do: delete_by_query: index: test_1 From dc33a8323188416df4e85d0ab762a750a482611b Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 19 May 2016 19:37:13 +0200 Subject: [PATCH 33/36] Remove the preserve_original option from the FingerprintAnalyzer (#18471) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The preserve_original option to the ASCIIFoldingFilter doesn't play well with the FingerprintFilter, as it ends up producing fingerprints like: "and consistent godel gödel is said sentence this yes" The goal of the OpenRefine algorithm is to product a small normalized ASCII fingerprint. There's no need to expose preserve_original. --- .../index/analysis/FingerprintAnalyzer.java | 6 ++---- .../analysis/FingerprintAnalyzerProvider.java | 5 +---- .../analysis/FingerprintAnalyzerTests.java | 20 ++++--------------- 3 files changed, 7 insertions(+), 24 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java index f7bf44256cc..985a081ccc8 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java @@ -33,13 +33,11 @@ import org.apache.lucene.analysis.util.CharArraySet; public final class FingerprintAnalyzer extends Analyzer { private final char separator; private final int maxOutputSize; - private final boolean preserveOriginal; private final CharArraySet stopWords; - public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize, boolean preserveOriginal) { + public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize) { this.separator = separator; this.maxOutputSize = maxOutputSize; - this.preserveOriginal = preserveOriginal; this.stopWords = stopWords; } @@ -48,7 +46,7 @@ public final class FingerprintAnalyzer extends Analyzer { final Tokenizer tokenizer = new StandardTokenizer(); TokenStream stream = tokenizer; stream = new LowerCaseFilter(stream); - stream = new ASCIIFoldingFilter(stream, preserveOriginal); + stream = new ASCIIFoldingFilter(stream, false); stream = new StopFilter(stream, stopWords); stream = new FingerprintFilter(stream, maxOutputSize, separator); return new TokenStreamComponents(tokenizer, stream); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java index 897068cbf8b..bb8a51e0969 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java @@ -34,10 +34,8 @@ import org.elasticsearch.index.IndexSettings; public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider { public static ParseField MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.MAX_OUTPUT_SIZE; - public static ParseField PRESERVE_ORIGINAL = ASCIIFoldingTokenFilterFactory.PRESERVE_ORIGINAL; public static int DEFAULT_MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.DEFAULT_MAX_OUTPUT_SIZE; - public static boolean DEFAULT_PRESERVE_ORIGINAL = ASCIIFoldingTokenFilterFactory.DEFAULT_PRESERVE_ORIGINAL; public static CharArraySet DEFAULT_STOP_WORDS = CharArraySet.EMPTY_SET; private final FingerprintAnalyzer analyzer; @@ -47,10 +45,9 @@ public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider
Date: Thu, 19 May 2016 19:42:23 +0200 Subject: [PATCH 34/36] Docs: Improved tokenizer docs (#18356) * Docs: Improved tokenizer docs Added descriptions and runnable examples * Addressed Nik's comments * Added TESTRESPONSEs for all tokenizer examples * Added TESTRESPONSEs for all analyzer examples too * Added docs, examples, and TESTRESPONSES for character filters * Skipping two tests: One interprets "$1" as a stack variable - same problem exists with the REST tests The other because the "took" value is always different * Fixed tests with "took" * Fixed failing tests and removed preserve_original from fingerprint analyzer --- .../analysis/analyzers/configuring.asciidoc | 35 ++ .../analyzers/custom-analyzer.asciidoc | 88 ++++- .../analyzers/fingerprint-analyzer.asciidoc | 59 ++- .../analyzers/keyword-analyzer.asciidoc | 21 ++ .../analyzers/pattern-analyzer.asciidoc | 203 ++++++++++ .../analyzers/simple-analyzer.asciidoc | 91 +++++ .../analyzers/standard-analyzer.asciidoc | 174 +++++++++ .../analysis/analyzers/stop-analyzer.asciidoc | 147 ++++++++ .../analyzers/whitespace-analyzer.asciidoc | 84 +++++ docs/reference/analysis/charfilters.asciidoc | 34 +- .../charfilters/htmlstrip-charfilter.asciidoc | 134 ++++++- .../charfilters/mapping-charfilter.asciidoc | 210 +++++++++-- .../pattern-replace-charfilter.asciidoc | 266 +++++++++++-- docs/reference/analysis/tokenizers.asciidoc | 132 ++++++- .../tokenizers/classic-tokenizer.asciidoc | 276 +++++++++++++- .../tokenizers/edgengram-tokenizer.asciidoc | 355 +++++++++++++++--- .../tokenizers/keyword-tokenizer.asciidoc | 63 +++- .../tokenizers/letter-tokenizer.asciidoc | 124 +++++- .../tokenizers/lowercase-tokenizer.asciidoc | 137 ++++++- .../tokenizers/ngram-tokenizer.asciidoc | 345 ++++++++++++++--- .../pathhierarchy-tokenizer.asciidoc | 183 ++++++++- .../tokenizers/pattern-tokenizer.asciidoc | 288 ++++++++++++-- .../tokenizers/standard-tokenizer.asciidoc | 280 +++++++++++++- .../tokenizers/thai-tokenizer.asciidoc | 107 +++++- .../tokenizers/uaxurlemail-tokenizer.asciidoc | 205 +++++++++- .../tokenizers/whitespace-tokenizer.asciidoc | 114 +++++- 26 files changed, 3844 insertions(+), 311 deletions(-) diff --git a/docs/reference/analysis/analyzers/configuring.asciidoc b/docs/reference/analysis/analyzers/configuring.asciidoc index c93d800afb9..2ce13702e00 100644 --- a/docs/reference/analysis/analyzers/configuring.asciidoc +++ b/docs/reference/analysis/analyzers/configuring.asciidoc @@ -64,3 +64,38 @@ POST my_index/_analyze English stop words will be removed. The resulting terms are: `[ old, brown, cow ]` + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "old", + "start_offset": 4, + "end_offset": 7, + "type": "", + "position": 1 + }, + { + "token": "brown", + "start_offset": 8, + "end_offset": 13, + "type": "", + "position": 2 + }, + { + "token": "cow", + "start_offset": 14, + "end_offset": 17, + "type": "", + "position": 3 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index eccd16c23be..1707a9a399b 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -84,6 +84,48 @@ POST my_index/_analyze -------------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "is", + "start_offset": 0, + "end_offset": 2, + "type": "", + "position": 0 + }, + { + "token": "this", + "start_offset": 3, + "end_offset": 7, + "type": "", + "position": 1 + }, + { + "token": "deja", + "start_offset": 11, + "end_offset": 15, + "type": "", + "position": 2 + }, + { + "token": "vu", + "start_offset": 16, + "end_offset": 22, + "type": "", + "position": 3 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] @@ -119,13 +161,10 @@ PUT my_index "analyzer": { "my_custom_analyzer": { "type": "custom", - "char_filter": [ "emoticons" <1> ], - "tokenizer": "punctuation", <1> - "filter": [ "lowercase", "english_stop" <1> @@ -165,11 +204,54 @@ POST my_index/_analyze "text": "I'm a :) person, and you?" } -------------------------------------------------- +// CONSOLE <1> The `emoticon` character filter, `punctuation` tokenizer and `english_stop` token filter are custom implementations which are defined in the same index settings. +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "i'm", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "_happy_", + "start_offset": 6, + "end_offset": 8, + "type": "word", + "position": 2 + }, + { + "token": "person", + "start_offset": 9, + "end_offset": 15, + "type": "word", + "position": 3 + }, + { + "token": "you", + "start_offset": 21, + "end_offset": 24, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc index b393c883441..24dc92380bb 100644 --- a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc @@ -36,6 +36,27 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "and consistent godel is said sentence this yes", + "start_offset": 0, + "end_offset": 52, + "type": "fingerprint", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following single term: [source,text] @@ -58,16 +79,11 @@ The `fingerprint` analyzer accepts the following parameters: The maximum token size to emit. Defaults to `255`. Tokens larger than this size will be discarded. -`preserve_original`:: - - If `true`, emits two tokens: one with ASCII-folding of terms that contain - extended characters (if any) and one with the original characters. - Defaults to `false`. - `stopwords`:: A pre-defined stop words list like `_english_` or an array containing a list of stop words. Defaults to `_none_`. + `stopwords_path`:: The path to a file containing stop words. @@ -80,8 +96,7 @@ about stop word configuration. === Example configuration In this example, we configure the `fingerprint` analyzer to use the -pre-defined list of English stop words, and to emit a second token in -the presence of non-ASCII characters: +pre-defined list of English stop words: [source,js] ---------------------------- @@ -92,8 +107,7 @@ PUT my_index "analyzer": { "my_fingerprint_analyzer": { "type": "fingerprint", - "stopwords": "_english_", - "preserve_original": true + "stopwords": "_english_" } } } @@ -110,9 +124,30 @@ POST my_index/_analyze ---------------------------- // CONSOLE -The above example produces the following two terms: +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "consistent godel said sentence yes", + "start_offset": 0, + "end_offset": 52, + "type": "fingerprint", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following term: [source,text] --------------------------- -[ consistent godel said sentence yes, consistent gödel said sentence yes ] +[ consistent godel said sentence yes ] --------------------------- diff --git a/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc b/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc index a0c1b1b0a6a..cc94f3b757e 100644 --- a/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc @@ -25,6 +25,27 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone.", + "start_offset": 0, + "end_offset": 56, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following single term: [source,text] diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index 6a4ca274416..2d5741c2b9e 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -30,6 +30,104 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 3 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 8 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 9 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 10 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 11 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] @@ -110,6 +208,55 @@ POST my_index/_analyze <1> The backslashes in the pattern need to be escaped when specifying the pattern as a JSON string. +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "john", + "start_offset": 0, + "end_offset": 4, + "type": "word", + "position": 0 + }, + { + "token": "smith", + "start_offset": 5, + "end_offset": 10, + "type": "word", + "position": 1 + }, + { + "token": "foo", + "start_offset": 11, + "end_offset": 14, + "type": "word", + "position": 2 + }, + { + "token": "bar", + "start_offset": 15, + "end_offset": 18, + "type": "word", + "position": 3 + }, + { + "token": "com", + "start_offset": 19, + "end_offset": 22, + "type": "word", + "position": 4 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] @@ -148,6 +295,62 @@ GET my_index/_analyze -------------------------------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "moose", + "start_offset": 0, + "end_offset": 5, + "type": "word", + "position": 0 + }, + { + "token": "x", + "start_offset": 5, + "end_offset": 6, + "type": "word", + "position": 1 + }, + { + "token": "ftp", + "start_offset": 8, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "class", + "start_offset": 11, + "end_offset": 16, + "type": "word", + "position": 3 + }, + { + "token": "2", + "start_offset": 16, + "end_offset": 17, + "type": "word", + "position": 4 + }, + { + "token": "beta", + "start_offset": 18, + "end_offset": 22, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/simple-analyzer.asciidoc b/docs/reference/analysis/analyzers/simple-analyzer.asciidoc index 4c932bb5d3e..a57c30d8dd6 100644 --- a/docs/reference/analysis/analyzers/simple-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/simple-analyzer.asciidoc @@ -25,6 +25,97 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc index 72292e1d40d..3b948892483 100644 --- a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc @@ -33,6 +33,97 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] @@ -98,6 +189,89 @@ POST my_index/_analyze ---------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumpe", + "start_offset": 24, + "end_offset": 29, + "type": "", + "position": 5 + }, + { + "token": "d", + "start_offset": 29, + "end_offset": 30, + "type": "", + "position": 6 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 9 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 10 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 11 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc index ada9022a287..e40436342d7 100644 --- a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc @@ -29,6 +29,83 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] @@ -87,6 +164,76 @@ POST my_index/_analyze ---------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc b/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc index 0dce8db1c99..f95e5c6e4ab 100644 --- a/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc @@ -25,6 +25,90 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "Brown-Foxes", + "start_offset": 12, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "word", + "position": 8 + }, + { + "token": "bone.", + "start_offset": 51, + "end_offset": 56, + "type": "word", + "position": 9 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] diff --git a/docs/reference/analysis/charfilters.asciidoc b/docs/reference/analysis/charfilters.asciidoc index c9f5805284c..cd24f5bf571 100644 --- a/docs/reference/analysis/charfilters.asciidoc +++ b/docs/reference/analysis/charfilters.asciidoc @@ -1,16 +1,36 @@ [[analysis-charfilters]] == Character Filters -Character filters are used to preprocess the string of -characters before it is passed to the <>. -A character filter may be used to strip out HTML markup, or to convert -`"&"` characters to the word `"and"`. +_Character filters_ are used to preprocess the stream of characters before it +is passed to the <>. -Elasticsearch has built in characters filters which can be -used to build <>. +A character filter receives the original text as a stream of characters and +can transform the stream by adding, removing, or changing characters. For +instance, a character filter could be used to convert Arabic numerals +(٠‎١٢٣٤٥٦٧٨‎٩‎) into their Latin equivalents (0123456789), or to strip HTML +elements like `` from the stream. -include::charfilters/mapping-charfilter.asciidoc[] + +Elasticsearch has a number of built in character filters which can be used to build +<>. + +<>:: + +The `html_strip` character filter strips out HTML elements like `` and +decodes HTML entities like `&`. + +<>:: + +The `mapping` character filter replaces any occurrences of the specified +strings with the specified replacements. + +<>:: + +The `pattern_replace` character filter replaces any characters matching a +regular expression with the specified replacement. include::charfilters/htmlstrip-charfilter.asciidoc[] +include::charfilters/mapping-charfilter.asciidoc[] + include::charfilters/pattern-replace-charfilter.asciidoc[] diff --git a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc index f12238a36ad..3d8b187d772 100644 --- a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc @@ -1,5 +1,135 @@ [[analysis-htmlstrip-charfilter]] === HTML Strip Char Filter -A char filter of type `html_strip` stripping out HTML elements from an -analyzed text. +The `html_strip` character filter strips HTML elements from the text and +replaces HTML entities with their decoded value (e.g. replacing `&` with +`&`). + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "keyword", <1> + "char_filter": [ "html_strip" ], + "text": "

I'm so happy!

" +} +--------------------------- +// CONSOLE +<1> The <> returns a single term. + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "\nI'm so happy!\n", + "start_offset": 0, + "end_offset": 32, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example returns the term: + +[source,js] +--------------------------- +[ \nI'm so happy!\n ] +--------------------------- + +The same example with the `standard` tokenizer would return the following terms: + +[source,js] +--------------------------- +[ I'm, so, happy ] +--------------------------- + +[float] +=== Configuration + +The `html_strip` character filter accepts the following parameter: + +[horizontal] +`escaped_tags`:: + + An array of HTML tags which should not be stripped from the original text. + +[float] +=== Example configuration + +In this example, we configure the `html_strip` character filter to leave `` +tags in place: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "keyword", + "char_filter": ["my_char_filter"] + } + }, + "char_filter": { + "my_char_filter": { + "type": "html_strip", + "escaped_tags": ["b"] + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "

I'm so happy!

" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "\nI'm so happy!\n", + "start_offset": 0, + "end_offset": 32, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following term: + +[source,text] +--------------------------- +[ \nI'm so happy!\n ] +--------------------------- + + + diff --git a/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc b/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc index 14c316dcac5..ed90e9f6ab6 100644 --- a/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc @@ -1,42 +1,202 @@ [[analysis-mapping-charfilter]] === Mapping Char Filter -A char filter of type `mapping` replacing characters of an analyzed text -with given mapping. +The `mapping` character filter accepts a map of keys and values. Whenever it +encounters a string of characters that is the same as a key, it replaces them +with the value associated with that key. + +Matching is greedy; the longest pattern matching at a given point wins. +Replacements are allowed to be the empty string. + +[float] +=== Configuration + +The `mapping` character filter accepts the following parameters: [horizontal] `mappings`:: - A list of mappings to use. + A array of mappings, with each element having the form `key => value`. `mappings_path`:: - A path, relative to the `config` directory, to a mappings file - configuration. + A path, either absolute or relative to the `config` directory, to a UTF-8 + encoded text mappings file containing a `key => value` mapping per line. -Here is a sample configuration: +Either the `mappings` or `mappings_path` parameter must be provided. + +[float] +=== Example configuration + +In this example, we configure the `mapping` character filter to replace Arabic +numerals with their Latin equivalents: [source,js] --------------------------------------------------- +---------------------------- +PUT my_index { - "index" : { - "analysis" : { - "char_filter" : { - "my_mapping" : { - "type" : "mapping", - "mappings" : [ - "ph => f", - "qu => k" - ] - } - }, - "analyzer" : { - "custom_with_char_filter" : { - "tokenizer" : "standard", - "char_filter" : ["my_mapping"] - } - } + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "keyword", + "char_filter": [ + "my_char_filter" + ] } + }, + "char_filter": { + "my_char_filter": { + "type": "mapping", + "mappings": [ + "٠ => 0", + "١ => 1", + "٢ => 2", + "٣ => 3", + "٤ => 4", + "٥ => 5", + "٦ => 6", + "٧ => 7", + "٨ => 8", + "٩ => 9" + ] + } + } } + } } --------------------------------------------------- + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "My license plate is ٢٥٠١٥" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "My license plate is 25015", + "start_offset": 0, + "end_offset": 25, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following term: + +[source,text] +--------------------------- +[ My license plate is 25015 ] +--------------------------- + +Keys and values can be strings with multiple characters. The following +example replaces the `:)` and `:(` emoticons with a text equivalent: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "standard", + "char_filter": [ + "my_char_filter" + ] + } + }, + "char_filter": { + "my_char_filter": { + "type": "mapping", + "mappings": [ + ":) => _happy_", + ":( => _sad_" + ] + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "I'm delighted about it :(" +} +---------------------------- +// CONSOLE + + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "I'm", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "delighted", + "start_offset": 4, + "end_offset": 13, + "type": "", + "position": 1 + }, + { + "token": "about", + "start_offset": 14, + "end_offset": 19, + "type": "", + "position": 2 + }, + { + "token": "it", + "start_offset": 20, + "end_offset": 22, + "type": "", + "position": 3 + }, + { + "token": "_sad_", + "start_offset": 23, + "end_offset": 25, + "type": "", + "position": 4 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ I'm, delighted, about, it, _sad_ ] +--------------------------- diff --git a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc index e3b85fd7bd1..72adefa5aec 100644 --- a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc @@ -1,37 +1,249 @@ [[analysis-pattern-replace-charfilter]] === Pattern Replace Char Filter -The `pattern_replace` char filter allows the use of a regex to -manipulate the characters in a string before analysis. The regular -expression is defined using the `pattern` parameter, and the replacement -string can be provided using the `replacement` parameter (supporting -referencing the original text, as explained -http://docs.oracle.com/javase/6/docs/api/java/util/regex/Matcher.html#appendReplacement(java.lang.StringBuffer,%20java.lang.String)[here]). -For more information check the -http://lucene.apache.org/core/4_3_1/analyzers-common/org/apache/lucene/analysis/pattern/PatternReplaceCharFilter.html[lucene -documentation] +The `pattern_replace` character filter uses a regular expression to match +characters which should be replaced with the specified replacement string. +The replacement string can refer to capture groups in the regular expression. -Here is a sample configuration: +[float] +=== Configuration + +The `pattern_replace` character filter accepts the following parameters: + +[horizontal] +`pattern`:: + + A http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html[Java regular expression]. Required. + +`replacement`:: + + The replacement string, which can reference capture groups using the + `$1`..`$9` syntax, as explained + http://docs.oracle.com/javase/8/docs/api/java/util/regex/Matcher.html#appendReplacement-java.lang.StringBuffer-java.lang.String-[here]. + +[float] +=== Example configuration + +In this example, we configure the `pattern_replace` character filter to +replace any embedded dashes in numbers with underscores, i.e `123-456-789` -> +`123_456_789`: [source,js] --------------------------------------------------- +---------------------------- +PUT my_index { - "index" : { - "analysis" : { - "char_filter" : { - "my_pattern":{ - "type":"pattern_replace", - "pattern":"sample(.*)", - "replacement":"replacedSample $1" - } - }, - "analyzer" : { - "custom_with_char_filter" : { - "tokenizer" : "standard", - "char_filter" : ["my_pattern"] - } - } + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "standard", + "char_filter": [ + "my_char_filter" + ] } + }, + "char_filter": { + "my_char_filter": { + "type": "pattern_replace", + "pattern": "(\\d+)-(?=\\d)", + "replacement": "$1_" + } + } } + } } --------------------------------------------------- + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "My credit card is 123-456-789" +} +---------------------------- +// CONSOLE +// TEST[skip:Test interprets $1 as a stashed variable] + +The above example produces the following term: + +[source,text] +--------------------------- +[ My, credit, card, is 123_456_789 ] +--------------------------- + + +WARNING: Using a replacement string that changes the length of the original +text will work for search purposes, but will result in incorrect highlighting, +as can be seen in the following example. + +This example inserts a space whenever it encounters a lower-case letter +followed by an upper-case letter (i.e. `fooBarBaz` -> `foo Bar Baz`), allowing +camelCase words to be queried individually: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "standard", + "char_filter": [ + "my_char_filter" + ], + "filter": [ + "lowercase" + ] + } + }, + "char_filter": { + "my_char_filter": { + "type": "pattern_replace", + "pattern": "(?<=\\p{Lower})(?=\\p{Upper})", + "replacement": " " + } + } + } + }, + "mappings": { + "my_type": { + "properties": { + "text": { + "type": "text", + "analyzer": "my_analyzer" + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "The fooBarBaz method" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "foo", + "start_offset": 4, + "end_offset": 6, + "type": "", + "position": 1 + }, + { + "token": "bar", + "start_offset": 7, + "end_offset": 9, + "type": "", + "position": 2 + }, + { + "token": "baz", + "start_offset": 10, + "end_offset": 13, + "type": "", + "position": 3 + }, + { + "token": "method", + "start_offset": 14, + "end_offset": 20, + "type": "", + "position": 4 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + +The above returns the following terms: + +[source,js] +---------------------------- +[ the, foo, bar, baz, method ] +---------------------------- + +Querying for `bar` will find the document correctly, but highlighting on the +result will produce incorrect highlights, because our character filter changed +the length of the original text: + +[source,js] +---------------------------- +PUT my_index/my_doc/1?refresh +{ + "text": "The fooBarBaz method" +} + +GET my_index/_search +{ + "query": { + "match": { + "text": "bar" + } + }, + "highlight": { + "fields": { + "text": {} + } + } +} +---------------------------- +// CONSOLE +// TEST[continued] + +The output from the above is: + +[source,js] +---------------------------- +{ + "timed_out": false, + "took": $body.took, + "_shards": { + "total": 5, + "successful": 5, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 0.4375, + "hits": [ + { + "_index": "my_index", + "_type": "my_doc", + "_id": "1", + "_score": 0.4375, + "_source": { + "text": "The fooBarBaz method" + }, + "highlight": { + "text": [ + "The fooBarBaz method" <1> + ] + } + } + ] + } +} +---------------------------- +// TESTRESPONSE[s/"took".*/"took": "$body.took",/] +<1> Note the incorrect highlight. diff --git a/docs/reference/analysis/tokenizers.asciidoc b/docs/reference/analysis/tokenizers.asciidoc index 46c02f9a4fc..b30822b6a0b 100644 --- a/docs/reference/analysis/tokenizers.asciidoc +++ b/docs/reference/analysis/tokenizers.asciidoc @@ -1,34 +1,136 @@ [[analysis-tokenizers]] == Tokenizers -Tokenizers are used to break a string down into a stream of terms -or tokens. A simple tokenizer might split the string up into terms -wherever it encounters whitespace or punctuation. +A _tokenizer_ receives a stream of characters, breaks it up into individual +_tokens_ (usually individual words), and outputs a stream of _tokens_. For +instance, a <> tokenizer breaks +text into tokens whenever it sees any whitespace. It would convert the text +`"Quick brown fox!"` into the terms `[Quick, brown, fox!]`. + +The tokenizer is also responsible for recording the order or _position_ of +each term (used for phrase and word proximity queries) and the start and end +_character offsets_ of the original word which the term represents (used for +highlighting search snippets). + +Elasticsearch has a number of built in tokenizers which can be used to build +<>. + +[float] +=== Word Oriented Tokenizers + +The following tokenizers are usually used for tokenizing full text into +individual words: + +<>:: + +The `standard` tokenizer divides text into terms on word boundaries, as +defined by the Unicode Text Segmentation algorithm. It removes most +punctuation symbols. It is the best choice for most languages. + +<>:: + +The `letter` tokenizer divides text into terms whenever it encounters a +character which is not a letter. + +<>:: + +The `lowercase` tokenizer, like the `letter` tokenizer, divides text into +terms whenever it encounters a character which is not a letter, but it also +lowercases all terms. + +<>:: + +The `whitespace` tokenizer divides text into terms whenever it encounters any +whitespace character. + +<>:: + +The `uax_url_email` tokenizer is like the `standard` tokenizer except that it +recognises URLs and email addresses as single tokens. + +<>:: + +The `classic` tokenizer is a grammar based tokenizer for the English Language. + +<>:: + +The `thai` tokenizer segments Thai text into words. + +[float] +=== Partial Word Tokenizers + +These tokenizers break up text or words into small fragments, for partial word +matching: + +<>:: + +The `ngram` tokenizer can break up text into words when it encounters any of +a list of specified characters (e.g. whitespace or punctuation), then it returns +n-grams of each word: a sliding window of continuous letters, e.g. `quick` -> +`[qu, ui, ic, ck]`. + +<>:: + +The `edge_ngram` tokenizer can break up text into words when it encounters any of +a list of specified characters (e.g. whitespace or punctuation), then it returns +n-grams of each word which are anchored to the start of the word, e.g. `quick` -> +`[q, qu, qui, quic, quick]`. + + +[float] +=== Structured Text Tokenizers + +The following tokenizers are usually used with structured text like +identifiers, email addresses, zip codes, and paths, rather than with full +text: + +<>:: + +The `keyword` tokenizer is a ``noop'' tokenizer that accepts whatever text it +is given and outputs the exact same text as a single term. It can be combined +with token filters like <> to +normalise the analysed terms. + +<>:: + +The `pattern` tokenizer uses a regular expression to either split text into +terms whenever it matches a word separator, or to capture matching text as +terms. + +<>:: + +The `path_hierarchy` tokenizer takes a hierarchical value like a filesystem +path, splits on the path separator, and emits a term for each component in the +tree, e.g. `/foo/bar/baz` -> `[/foo, /foo/bar, /foo/bar/baz ]`. + + + -Elasticsearch has a number of built in tokenizers which can be -used to build <>. include::tokenizers/standard-tokenizer.asciidoc[] -include::tokenizers/edgengram-tokenizer.asciidoc[] - -include::tokenizers/keyword-tokenizer.asciidoc[] - include::tokenizers/letter-tokenizer.asciidoc[] include::tokenizers/lowercase-tokenizer.asciidoc[] -include::tokenizers/ngram-tokenizer.asciidoc[] - include::tokenizers/whitespace-tokenizer.asciidoc[] -include::tokenizers/pattern-tokenizer.asciidoc[] - include::tokenizers/uaxurlemail-tokenizer.asciidoc[] -include::tokenizers/pathhierarchy-tokenizer.asciidoc[] - include::tokenizers/classic-tokenizer.asciidoc[] include::tokenizers/thai-tokenizer.asciidoc[] + +include::tokenizers/ngram-tokenizer.asciidoc[] + +include::tokenizers/edgengram-tokenizer.asciidoc[] + + +include::tokenizers/keyword-tokenizer.asciidoc[] + +include::tokenizers/pattern-tokenizer.asciidoc[] + +include::tokenizers/pathhierarchy-tokenizer.asciidoc[] + + diff --git a/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc index 9b6315cec96..45d4ad41526 100644 --- a/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc @@ -1,19 +1,269 @@ [[analysis-classic-tokenizer]] === Classic Tokenizer -A tokenizer of type `classic` providing grammar based tokenizer that is -a good tokenizer for English language documents. This tokenizer has -heuristics for special treatment of acronyms, company names, email addresses, -and internet host names. However, these rules don't always work, and -the tokenizer doesn't work well for most languages other than English. +The `classic` tokenizer is a grammar based tokenizer that is good for English +language documents. This tokenizer has heuristics for special treatment of +acronyms, company names, email addresses, and internet host names. However, +these rules don't always work, and the tokenizer doesn't work well for most +languages other than English: + +* It splits words at most punctuation characters, removing punctuation. However, a + dot that's not followed by whitespace is considered part of a token. + +* It splits words at hyphens, unless there's a number in the token, in which case + the whole token is interpreted as a product number and is not split. + +* It recognizes email addresses and internet hostnames as one token. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "classic", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumped, over, the, lazy, dog's, bone ] +--------------------------- + +[float] +=== Configuration + +The `classic` tokenizer accepts the following parameters: + +[horizontal] +`max_token_length`:: + + The maximum token length. If a token is seen that exceeds this length then + it is split at `max_token_length` intervals. Defaults to `255`. + +[float] +=== Example configuration + +In this example, we configure the `classic` tokenizer to have a +`max_token_length` of 5 (for demonstration purposes): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "classic", + "max_token_length": 5 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumpe, d, over, the, lazy, dog's, bone ] +--------------------------- -The following are settings that can be set for a `classic` tokenizer -type: -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`max_token_length` |The maximum token length. If a token is seen that -exceeds this length then it is discarded. Defaults to `255`. -|======================================================================= diff --git a/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc index 41cc2337940..2328354998e 100644 --- a/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc @@ -1,80 +1,323 @@ [[analysis-edgengram-tokenizer]] === Edge NGram Tokenizer -A tokenizer of type `edgeNGram`. +The `edge_ngram` tokenizer first breaks text down into words whenever it +encounters one of a list of specified characters, then it emits +https://en.wikipedia.org/wiki/N-gram[N-grams] of each word where the start of +the N-gram is anchored to the beginning of the word. -This tokenizer is very similar to `nGram` but only keeps n-grams which -start at the beginning of a token. +Edge N-Grams are useful for _search-as-you-type_ queries. -The following are settings that can be set for a `edgeNGram` tokenizer -type: +TIP: When you need _search-as-you-type_ for text which has a widely known +order, such as movie or song titles, the +<> is a much more efficient +choice than edge N-grams. Edge N-grams have the advantage when trying to +autocomplete words that can appear in any order. -[cols="<,<,<",options="header",] -|======================================================================= -|Setting |Description |Default value -|`min_gram` |Minimum size in codepoints of a single n-gram |`1`. +[float] +=== Example output -|`max_gram` |Maximum size in codepoints of a single n-gram |`2`. +With the default settings, the `edge_ngram` tokenizer treats the initial text as a +single token and produces N-grams with minimum length `1` and maximum length +`2`: -|`token_chars` | Characters classes to keep in the -tokens, Elasticsearch will split on characters that don't belong to any -of these classes. |`[]` (Keep all characters) -|======================================================================= +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "edge_ngram", + "text": "Quick Fox" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Q", + "start_offset": 0, + "end_offset": 1, + "type": "word", + "position": 0 + }, + { + "token": "Qu", + "start_offset": 0, + "end_offset": 2, + "type": "word", + "position": 1 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// -`token_chars` accepts the following character classes: +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ Q, Qu ] +--------------------------- + +NOTE: These default gram lengths are almost entirely useless. You need to +configure the `edge_ngram` before using it. + +[float] +=== Configuration + +The `edge_ngram` tokenizer accepts the following parameters: [horizontal] -`letter`:: for example `a`, `b`, `ï` or `京` -`digit`:: for example `3` or `7` -`whitespace`:: for example `" "` or `"\n"` -`punctuation`:: for example `!` or `"` -`symbol`:: for example `$` or `√` +`min_gram`:: + Minimum length of characters in a gram. Defaults to `1`. + +`max_gram`:: + Maximum length of characters in a gram. Defaults to `2`. + +`token_chars`:: + + Character classes that should be included in a token. Elasticsearch + will split on characters that don't belong to the classes specified. + Defaults to `[]` (keep all characters). ++ +Character classes may be any of the following: ++ +* `letter` -- for example `a`, `b`, `ï` or `京` +* `digit` -- for example `3` or `7` +* `whitespace` -- for example `" "` or `"\n"` +* `punctuation` -- for example `!` or `"` +* `symbol` -- for example `$` or `√` [float] -==== Example +=== Example configuration + +In this example, we configure the `edge_ngram` tokenizer to treat letters and +digits as tokens, and to produce grams with minimum length `2` and maximum +length `10`: [source,js] --------------------------------------------------- - curl -XPUT 'localhost:9200/test' -d ' - { - "settings" : { - "analysis" : { - "analyzer" : { - "my_edge_ngram_analyzer" : { - "tokenizer" : "my_edge_ngram_tokenizer" - } - }, - "tokenizer" : { - "my_edge_ngram_tokenizer" : { - "type" : "edgeNGram", - "min_gram" : "2", - "max_gram" : "5", - "token_chars": [ "letter", "digit" ] - } - } - } +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" } - }' + }, + "tokenizer": { + "my_tokenizer": { + "type": "edge_ngram", + "min_gram": 2, + "max_gram": 10, + "token_chars": [ + "letter", + "digit" + ] + } + } + } + } +} - curl 'localhost:9200/test/_analyze?pretty=1&analyzer=my_edge_ngram_analyzer' -d 'FC Schalke 04' - # FC, Sc, Sch, Scha, Schal, 04 --------------------------------------------------- +GET _cluster/health?wait_for_status=yellow -[float] -==== `side` deprecated +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "2 Quick Foxes." +} +---------------------------- +// CONSOLE -There used to be a `side` parameter up to `0.90.1` but it is now deprecated. In -order to emulate the behavior of `"side" : "BACK"` a -<> should be used together -with the <>. The -`edgeNGram` filter must be enclosed in `reverse` filters like this: +///////////////////// [source,js] --------------------------------------------------- - "filter" : ["reverse", "edgeNGram", "reverse"] --------------------------------------------------- +---------------------------- +{ + "tokens": [ + { + "token": "Qu", + "start_offset": 2, + "end_offset": 4, + "type": "word", + "position": 0 + }, + { + "token": "Qui", + "start_offset": 2, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "Quic", + "start_offset": 2, + "end_offset": 6, + "type": "word", + "position": 2 + }, + { + "token": "Quick", + "start_offset": 2, + "end_offset": 7, + "type": "word", + "position": 3 + }, + { + "token": "Fo", + "start_offset": 8, + "end_offset": 10, + "type": "word", + "position": 4 + }, + { + "token": "Fox", + "start_offset": 8, + "end_offset": 11, + "type": "word", + "position": 5 + }, + { + "token": "Foxe", + "start_offset": 8, + "end_offset": 12, + "type": "word", + "position": 6 + }, + { + "token": "Foxes", + "start_offset": 8, + "end_offset": 13, + "type": "word", + "position": 7 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + +The above example produces the following terms: + +[source,text] +--------------------------- +[ Qu, Qui, Quic, Quick, Fo, Fox, Foxe, Foxes ] +--------------------------- + +Usually we recommend using the same `analyzer` at index time and at search +time. In the case of the `edge_ngram` tokenizer, the advice is different. It +only makes sense to use the `edge_ngram` tokenizer at index time, to ensure +that partial words are available for matching in the index. At search time, +just search for the terms the user has typed in, for instance: `Quick Fo`. + +Below is an example of how to set up a field for _search-as-you-type_: + +[source,js] +----------------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "autocomplete": { + "tokenizer": "autocomplete", + "filter": [ + "lowercase" + ] + }, + "autocomplete_search": { + "tokenizer": "lowercase" + } + }, + "tokenizer": { + "autocomplete": { + "type": "edge_ngram", + "min_gram": 2, + "max_gram": 10, + "token_chars": [ + "letter" + ] + } + } + } + }, + "mappings": { + "doc": { + "properties": { + "title": { + "type": "text", + "analyzer": "autocomplete", + "search_analyzer": "autocomplete_search" + } + } + } + } +} + +PUT my_index/doc/1 +{ + "title": "Quick Foxes" <1> +} + +POST my_index/_refresh + +GET my_index/_search +{ + "query": { + "match": { + "title": { + "query": "Quick Fo", <2> + "operator": "and" + } + } + } +} +----------------------------------- +// CONSOLE + +<1> The `autocomplete` analyzer indexes the terms `[qu, qui, quic, quick, fo, fox, foxe, foxes]`. +<2> The `autocomplete_search` analyzer searches for the terms `[quick, fo]`, both of which appear in the index. + +///////////////////// + +[source,js] +---------------------------- +{ + "took": $body.took, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 0.44194174, + "hits": [ + { + "_index": "my_index", + "_type": "doc", + "_id": "1", + "_score": 0.44194174, + "_source": { + "title": "Quick Foxes" + } + } + ] + } +} +---------------------------- +// TESTRESPONSE[s/"took".*/"took": "$body.took",/] +///////////////////// -which essentially reverses the token, builds front `EdgeNGrams` and reverses -the ngram again. This has the same effect as the previous `"side" : "BACK"` setting. diff --git a/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc index ad1652466be..27515516fe5 100644 --- a/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc @@ -1,15 +1,60 @@ [[analysis-keyword-tokenizer]] === Keyword Tokenizer -A tokenizer of type `keyword` that emits the entire input as a single -output. +The `keyword` tokenizer is a ``noop'' tokenizer that accepts whatever text it +is given and outputs the exact same text as a single term. It can be combined +with token filters to normalise output, e.g. lower-casing email addresses. -The following are settings that can be set for a `keyword` tokenizer -type: +[float] +=== Example output -[cols="<,<",options="header",] -|======================================================= -|Setting |Description -|`buffer_size` |The term buffer size. Defaults to `256`. -|======================================================= +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "keyword", + "text": "New York" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "New York", + "start_offset": 0, + "end_offset": 8, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following term: + +[source,text] +--------------------------- +[ New York ] +--------------------------- + +[float] +=== Configuration + +The `keyword` tokenizer accepts the following parameters: + +[horizontal] +`buffer_size`:: + + The number of characters read into the term buffer in a single pass. + Defaults to `256`. The term buffer will grow by this size until all the + text has been consumed. It is advisable not to change this setting. diff --git a/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc index 03025ccd303..7423a68732d 100644 --- a/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc @@ -1,7 +1,123 @@ [[analysis-letter-tokenizer]] === Letter Tokenizer -A tokenizer of type `letter` that divides text at non-letters. That's to -say, it defines tokens as maximal strings of adjacent letters. Note, -this does a decent job for most European languages, but does a terrible -job for some Asian languages, where words are not separated by spaces. +The `letter` tokenizer breaks text into terms whenever it encounters a +character which is not a letter. It does a reasonable job for most European +languages, but does a terrible job for some Asian languages, where words are +not separated by spaces. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "letter", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, QUICK, Brown, Foxes, jumped, over, the, lazy, dog, s, bone ] +--------------------------- + +[float] +=== Configuration + +The `letter` tokenizer is not configurable. diff --git a/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc index 0cdbbc387a4..5aad28b4394 100644 --- a/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc @@ -1,15 +1,128 @@ [[analysis-lowercase-tokenizer]] === Lowercase Tokenizer -A tokenizer of type `lowercase` that performs the function of -<> and -<> together. It divides text at non-letters and converts -them to lower case. While it is functionally equivalent to the -combination of -<> and -<>, there is a performance advantage to doing the two -tasks at once, hence this (redundant) implementation. + +The `lowercase` toknenizer, like the +<> breaks text into terms +whenever it encounters a character which is not a letter, but it also +lowecases all terms. It is functionally equivalent to the +<> combined with the +<>, but is more +efficient as it performs both steps in a single pass. + + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "lowercase", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ the, quick, brown, foxes, jumped, over, the, lazy, dog, s, bone ] +--------------------------- + +[float] +=== Configuration + +The `lowercase` tokenizer is not configurable. diff --git a/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc index 23e6bc52dda..cf45da0627e 100644 --- a/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc @@ -1,57 +1,306 @@ [[analysis-ngram-tokenizer]] === NGram Tokenizer -A tokenizer of type `nGram`. +The `ngram` tokenizer first breaks text down into words whenever it encounters +one of a list of specified characters, then it emits +https://en.wikipedia.org/wiki/N-gram[N-grams] of each word of the specified +length. -The following are settings that can be set for a `nGram` tokenizer type: - -[cols="<,<,<",options="header",] -|======================================================================= -|Setting |Description |Default value -|`min_gram` |Minimum size in codepoints of a single n-gram |`1`. - -|`max_gram` |Maximum size in codepoints of a single n-gram |`2`. - -|`token_chars` |Characters classes to keep in the -tokens, Elasticsearch will split on characters that don't belong to any -of these classes. |`[]` (Keep all characters) -|======================================================================= - -`token_chars` accepts the following character classes: - -[horizontal] -`letter`:: for example `a`, `b`, `ï` or `京` -`digit`:: for example `3` or `7` -`whitespace`:: for example `" "` or `"\n"` -`punctuation`:: for example `!` or `"` -`symbol`:: for example `$` or `√` +N-grams are like a sliding window that moves across the word - a continuous +sequence of characters of the specified length. They are useful for querying +languages that don't use spaces or that have long compound words, like German. [float] -==== Example +=== Example output + +With the default settings, the `ngram` tokenizer treats the initial text as a +single token and produces N-grams with minimum length `1` and maximum length +`2`: [source,js] --------------------------------------------------- - curl -XPUT 'localhost:9200/test' -d ' - { - "settings" : { - "analysis" : { - "analyzer" : { - "my_ngram_analyzer" : { - "tokenizer" : "my_ngram_tokenizer" - } - }, - "tokenizer" : { - "my_ngram_tokenizer" : { - "type" : "nGram", - "min_gram" : "2", - "max_gram" : "3", - "token_chars": [ "letter", "digit" ] - } - } - } - } - }' +--------------------------- +POST _analyze +{ + "tokenizer": "ngram", + "text": "Quick Fox" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Q", + "start_offset": 0, + "end_offset": 1, + "type": "word", + "position": 0 + }, + { + "token": "Qu", + "start_offset": 0, + "end_offset": 2, + "type": "word", + "position": 1 + }, + { + "token": "u", + "start_offset": 1, + "end_offset": 2, + "type": "word", + "position": 2 + }, + { + "token": "ui", + "start_offset": 1, + "end_offset": 3, + "type": "word", + "position": 3 + }, + { + "token": "i", + "start_offset": 2, + "end_offset": 3, + "type": "word", + "position": 4 + }, + { + "token": "ic", + "start_offset": 2, + "end_offset": 4, + "type": "word", + "position": 5 + }, + { + "token": "c", + "start_offset": 3, + "end_offset": 4, + "type": "word", + "position": 6 + }, + { + "token": "ck", + "start_offset": 3, + "end_offset": 5, + "type": "word", + "position": 7 + }, + { + "token": "k", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 8 + }, + { + "token": "k ", + "start_offset": 4, + "end_offset": 6, + "type": "word", + "position": 9 + }, + { + "token": " ", + "start_offset": 5, + "end_offset": 6, + "type": "word", + "position": 10 + }, + { + "token": " F", + "start_offset": 5, + "end_offset": 7, + "type": "word", + "position": 11 + }, + { + "token": "F", + "start_offset": 6, + "end_offset": 7, + "type": "word", + "position": 12 + }, + { + "token": "Fo", + "start_offset": 6, + "end_offset": 8, + "type": "word", + "position": 13 + }, + { + "token": "o", + "start_offset": 7, + "end_offset": 8, + "type": "word", + "position": 14 + }, + { + "token": "ox", + "start_offset": 7, + "end_offset": 9, + "type": "word", + "position": 15 + }, + { + "token": "x", + "start_offset": 8, + "end_offset": 9, + "type": "word", + "position": 16 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ Q, Qu, u, ui, i, ic, c, ck, k, "k ", " ", " F", F, Fo, o, ox, x ] +--------------------------- + +[float] +=== Configuration + +The `ngram` tokenizer accepts the following parameters: + +[horizontal] +`min_gram`:: + Minimum length of characters in a gram. Defaults to `1`. + +`max_gram`:: + Maximum length of characters in a gram. Defaults to `2`. + +`token_chars`:: + + Character classes that should be included in a token. Elasticsearch + will split on characters that don't belong to the classes specified. + Defaults to `[]` (keep all characters). ++ +Character classes may be any of the following: ++ +* `letter` -- for example `a`, `b`, `ï` or `京` +* `digit` -- for example `3` or `7` +* `whitespace` -- for example `" "` or `"\n"` +* `punctuation` -- for example `!` or `"` +* `symbol` -- for example `$` or `√` + +TIP: It usually makes sense to set `min_gram` and `max_gram` to the same +value. The smaller the length, the more documents will match but the lower +the quality of the matches. The longer the length, the more specific the +matches. A tri-gram (length `3`) is a good place to start. + +[float] +=== Example configuration + +In this example, we configure the `ngram` tokenizer to treat letters and +digits as tokens, and to produce tri-grams (grams of length `3`): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "ngram", + "min_gram": 3, + "max_gram": 3, + "token_chars": [ + "letter", + "digit" + ] + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "2 Quick Foxes." +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Qui", + "start_offset": 2, + "end_offset": 5, + "type": "word", + "position": 0 + }, + { + "token": "uic", + "start_offset": 3, + "end_offset": 6, + "type": "word", + "position": 1 + }, + { + "token": "ick", + "start_offset": 4, + "end_offset": 7, + "type": "word", + "position": 2 + }, + { + "token": "Fox", + "start_offset": 8, + "end_offset": 11, + "type": "word", + "position": 3 + }, + { + "token": "oxe", + "start_offset": 9, + "end_offset": 12, + "type": "word", + "position": 4 + }, + { + "token": "xes", + "start_offset": 10, + "end_offset": 13, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ Qui, uic, ick, Fox, oxe, xes ] +--------------------------- + - curl 'localhost:9200/test/_analyze?pretty=1&analyzer=my_ngram_analyzer' -d 'FC Schalke 04' - # FC, Sc, Sch, ch, cha, ha, hal, al, alk, lk, lke, ke, 04 --------------------------------------------------- diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index e6876f55bc6..b656e67eaec 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -1,32 +1,175 @@ [[analysis-pathhierarchy-tokenizer]] === Path Hierarchy Tokenizer -The `path_hierarchy` tokenizer takes something like this: +The `path_hierarchy` tokenizer takes a hierarchical value like a filesystem +path, splits on the path separator, and emits a term for each component in the +tree. -------------------------- -/something/something/else -------------------------- +[float] +=== Example output -And produces tokens: +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "path_hierarchy", + "text": "/one/two/three" +} +--------------------------- +// CONSOLE -------------------------- -/something -/something/something -/something/something/else -------------------------- +///////////////////// -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`delimiter` |The character delimiter to use, defaults to `/`. +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "/one", + "start_offset": 0, + "end_offset": 4, + "type": "word", + "position": 0 + }, + { + "token": "/one/two", + "start_offset": 0, + "end_offset": 8, + "type": "word", + "position": 0 + }, + { + "token": "/one/two/three", + "start_offset": 0, + "end_offset": 14, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE -|`replacement` |An optional replacement character to use. Defaults to -the `delimiter`. +///////////////////// -|`buffer_size` |The buffer size to use, defaults to `1024`. -|`reverse` |Generates tokens in reverse order, defaults to `false`. -|`skip` |Controls initial tokens to skip, defaults to `0`. -|======================================================================= +The above text would produce the following terms: + +[source,text] +--------------------------- +[ /one, /one/two, /one/two/three ] +--------------------------- + +[float] +=== Configuration + +The `path_hierarchy` tokenizer accepts the following parameters: + +[horizontal] +`delimiter`:: + The character to use as the path separator. Defaults to `/`. + +`replacement`:: + An optional replacement character to use for the delimiter. + Defaults to the `delimiter`. + +`buffer_size`:: + The number of characters read into the term buffer in a single pass. + Defaults to `1024`. The term buffer will grow by this size until all the + text has been consumed. It is advisable not to change this setting. + +`reverse`:: + If set to `true`, emits the tokens in reverse order. Defaults to `false`. + +`skip`:: + The number of initial tokens to skip. Defaults to `0`. + +[float] +=== Example configuration + +In this example, we configure the `path_hierarchy` tokenizer to split on `-` +characters, and to replace them with `/`. The first two tokens are skipped: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "path_hierarchy", + "delimiter": "-", + "replacement": "/", + "skip": 2 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "one-two-three-four-five" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "/three", + "start_offset": 7, + "end_offset": 13, + "type": "word", + "position": 0 + }, + { + "token": "/three/four", + "start_offset": 7, + "end_offset": 18, + "type": "word", + "position": 0 + }, + { + "token": "/three/four/five", + "start_offset": 7, + "end_offset": 23, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ /three, /three/four, /three/four/five ] +--------------------------- + +If we were to set `reverse` to `true`, it would produce the following: + +[source,text] +--------------------------- +[ one/two/three/, two/three/, three/ ] +--------------------------- diff --git a/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc index 9a148456195..ca902a4e5f2 100644 --- a/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc @@ -1,38 +1,268 @@ [[analysis-pattern-tokenizer]] === Pattern Tokenizer -A tokenizer of type `pattern` that can flexibly separate text into terms -via a regular expression. Accepts the following settings: +The `pattern` tokenizer uses a regular expression to either split text into +terms whenever it matches a word separator, or to capture matching text as +terms. -[cols="<,<",options="header",] -|====================================================================== -|Setting |Description -|`pattern` |The regular expression pattern, defaults to `\W+`. -|`flags` |The regular expression flags. -|`group` |Which group to extract into tokens. Defaults to `-1` (split). -|====================================================================== +The default pattern is `\W+`, which splits text whenever it encounters +non-word characters. -*IMPORTANT*: The regular expression should match the *token separators*, -not the tokens themselves. +[float] +=== Example output -********************************************* -Note that you may need to escape `pattern` string literal according to -your client language rules. For example, in many programming languages -a string literal for `\W+` pattern is written as `"\\W+"`. -There is nothing special about `pattern` (you may have to escape other -string literals as well); escaping `pattern` is common just because it -often contains characters that should be escaped. -********************************************* +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "pattern", + "text": "The foo_bar_size's default is 5." +} +--------------------------- +// CONSOLE -`group` set to `-1` (the default) is equivalent to "split". Using group ->= 0 selects the matching group as the token. For example, if you have: +///////////////////// ------------------------- -pattern = '([^']+)' -group = 0 -input = aaa 'bbb' 'ccc' ------------------------- +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "foo_bar_size", + "start_offset": 4, + "end_offset": 16, + "type": "word", + "position": 1 + }, + { + "token": "s", + "start_offset": 17, + "end_offset": 18, + "type": "word", + "position": 2 + }, + { + "token": "default", + "start_offset": 19, + "end_offset": 26, + "type": "word", + "position": 3 + }, + { + "token": "is", + "start_offset": 27, + "end_offset": 29, + "type": "word", + "position": 4 + }, + { + "token": "5", + "start_offset": 30, + "end_offset": 31, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE -the output will be two tokens: `'bbb'` and `'ccc'` (including the `'` -marks). With the same input but using group=1, the output would be: -`bbb` and `ccc` (no `'` marks). +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, foo_bar_size, s, default, is, 5 ] +--------------------------- + +[float] +=== Configuration + +The `pattern` tokenizer accepts the following parameters: + +[horizontal] +`pattern`:: + + A http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html[Java regular expression], defaults to `\W+`. + +`flags`:: + + Java regular expression http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html#field.summary[flags]. + lags should be pipe-separated, eg `"CASE_INSENSITIVE|COMMENTS"`. + +`group`:: + + Which capture group to extract as tokens. Defaults to `-1` (split). + +[float] +=== Example configuration + +In this example, we configure the `pattern` tokenizer to break text into +tokens when it encounters commas: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "pattern", + "pattern": "," + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "comma,separated,values" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "comma", + "start_offset": 0, + "end_offset": 5, + "type": "word", + "position": 0 + }, + { + "token": "separated", + "start_offset": 6, + "end_offset": 15, + "type": "word", + "position": 1 + }, + { + "token": "values", + "start_offset": 16, + "end_offset": 22, + "type": "word", + "position": 2 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ comma, separated, values ] +--------------------------- + +In the next example, we configure the `pattern` tokenizer to capture values +enclosed in double quotes (ignoring embedded escaped quotes `\"`). The regex +itself looks like this: + + "((?:\\"|[^"]|\\")*)" + +And reads as follows: + +* A literal `"` +* Start capturing: +** A literal `\"` OR any character except `"` +** Repeat until no more characters match +* A literal closing `"` + +When the pattern is specified in JSON, the `"` and `\` characters need to be +escaped, so the pattern ends up looking like: + + \"((?:\\\\\"|[^\"]|\\\\\")+)\" + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "pattern", + "pattern": "\"((?:\\\\\"|[^\"]|\\\\\")+)\"", + "group": 1 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "\"value\", \"value with embedded \\\" quote\"" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "value", + "start_offset": 1, + "end_offset": 6, + "type": "word", + "position": 0 + }, + { + "token": "value with embedded \\\" quote", + "start_offset": 10, + "end_offset": 38, + "type": "word", + "position": 1 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + +The above example produces the following two terms: + +[source,text] +--------------------------- +[ value, value with embedded \" quote ] +--------------------------- diff --git a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc index 42dbe5a864a..ee052529b43 100644 --- a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc @@ -1,18 +1,274 @@ [[analysis-standard-tokenizer]] === Standard Tokenizer -A tokenizer of type `standard` providing grammar based tokenizer that is -a good tokenizer for most European language documents. The tokenizer -implements the Unicode Text Segmentation algorithm, as specified in -http://unicode.org/reports/tr29/[Unicode Standard Annex #29]. +The `standard` tokenizer provides grammar based tokenization (based on the +Unicode Text Segmentation algorithm, as specified in +http://unicode.org/reports/tr29/[Unicode Standard Annex #29]) and works well +for most languages. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "standard", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumped, over, the, lazy, dog's, bone ] +--------------------------- + +[float] +=== Configuration + +The `standard` tokenizer accepts the following parameters: + +[horizontal] +`max_token_length`:: + + The maximum token length. If a token is seen that exceeds this length then + it is split at `max_token_length` intervals. Defaults to `255`. + +[float] +=== Example configuration + +In this example, we configure the `standard` tokenizer to have a +`max_token_length` of 5 (for demonstration purposes): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "standard", + "max_token_length": 5 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumpe", + "start_offset": 24, + "end_offset": 29, + "type": "", + "position": 5 + }, + { + "token": "d", + "start_offset": 29, + "end_offset": 30, + "type": "", + "position": 6 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 7 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 8 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 9 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 10 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 11 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumpe, d, over, the, lazy, dog's, bone ] +--------------------------- -The following are settings that can be set for a `standard` tokenizer -type: -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`max_token_length` |The maximum token length. If a token is seen that -exceeds this length then it is split at `max_token_length` intervals. Defaults to `255`. -|======================================================================= diff --git a/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc index 06f0b6892e7..3e9904d116e 100644 --- a/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc @@ -1,7 +1,106 @@ [[analysis-thai-tokenizer]] === Thai Tokenizer -A tokenizer of type `thai` that segments Thai text into words. This tokenizer -uses the built-in Thai segmentation algorithm included with Java to divide -up Thai text. Text in other languages in general will be treated the same -as `standard`. +The `thai` tokenizer segments Thai text into words, using the Thai +segmentation algorithm included with Java. Text in other languages in general +will be treated the same as the +<>. + +WARNING: This tokenizer may not be supported by all JREs. It is known to work +with Sun/Oracle and OpenJDK. If your application needs to be fully portable, +consider using the {plugins}/analysis-icu-tokenizer.html[ICU Tokenizer] instead. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "thai", + "text": "การที่ได้ต้องแสดงว่างานดี" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "การ", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "ที่", + "start_offset": 3, + "end_offset": 6, + "type": "word", + "position": 1 + }, + { + "token": "ได้", + "start_offset": 6, + "end_offset": 9, + "type": "word", + "position": 2 + }, + { + "token": "ต้อง", + "start_offset": 9, + "end_offset": 13, + "type": "word", + "position": 3 + }, + { + "token": "แสดง", + "start_offset": 13, + "end_offset": 17, + "type": "word", + "position": 4 + }, + { + "token": "ว่า", + "start_offset": 17, + "end_offset": 20, + "type": "word", + "position": 5 + }, + { + "token": "งาน", + "start_offset": 20, + "end_offset": 23, + "type": "word", + "position": 6 + }, + { + "token": "ดี", + "start_offset": 23, + "end_offset": 25, + "type": "word", + "position": 7 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ การ, ที่, ได้, ต้อง, แสดง, ว่า, งาน, ดี ] +--------------------------- + +[float] +=== Configuration + +The `thai` tokenizer is not configurable. diff --git a/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc index 9ed28e60b91..500a5e191f1 100644 --- a/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc @@ -1,16 +1,199 @@ [[analysis-uaxurlemail-tokenizer]] -=== UAX Email URL Tokenizer +=== UAX URL Email Tokenizer -A tokenizer of type `uax_url_email` which works exactly like the -`standard` tokenizer, but tokenizes emails and urls as single tokens. +The `uax_url_email` tokenizer is like the <> except that it +recognises URLs and email addresses as single tokens. -The following are settings that can be set for a `uax_url_email` -tokenizer type: +[float] +=== Example output -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`max_token_length` |The maximum token length. If a token is seen that -exceeds this length then it is discarded. Defaults to `255`. -|======================================================================= +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "uax_url_email", + "text": "Email me at john.smith@global-international.com" +} +--------------------------- +// CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Email", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "me", + "start_offset": 6, + "end_offset": 8, + "type": "", + "position": 1 + }, + { + "token": "at", + "start_offset": 9, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "john.smith@global-international.com", + "start_offset": 12, + "end_offset": 47, + "type": "", + "position": 3 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ Email, me, at, john.smith@global-international.com ] +--------------------------- + +while the `standard` tokenizer would produce: + +[source,text] +--------------------------- +[ Email, me, at, john.smith, global, international.com ] +--------------------------- + +[float] +=== Configuration + +The `uax_url_email` tokenizer accepts the following parameters: + +[horizontal] +`max_token_length`:: + + The maximum token length. If a token is seen that exceeds this length then + it is split at `max_token_length` intervals. Defaults to `255`. + +[float] +=== Example configuration + +In this example, we configure the `uax_url_email` tokenizer to have a +`max_token_length` of 5 (for demonstration purposes): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "uax_url_email", + "max_token_length": 5 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "john.smith@global-international.com" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "john", + "start_offset": 0, + "end_offset": 4, + "type": "", + "position": 0 + }, + { + "token": "smith", + "start_offset": 5, + "end_offset": 10, + "type": "", + "position": 1 + }, + { + "token": "globa", + "start_offset": 11, + "end_offset": 16, + "type": "", + "position": 2 + }, + { + "token": "l", + "start_offset": 16, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "inter", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "natio", + "start_offset": 23, + "end_offset": 28, + "type": "", + "position": 5 + }, + { + "token": "nal.c", + "start_offset": 28, + "end_offset": 33, + "type": "", + "position": 6 + }, + { + "token": "om", + "start_offset": 33, + "end_offset": 35, + "type": "", + "position": 7 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ john, smith, globa, l, inter, natio, nal.c, om ] +--------------------------- diff --git a/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc index f0e1ce28a12..9d06ea28d55 100644 --- a/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc @@ -1,4 +1,114 @@ [[analysis-whitespace-tokenizer]] -=== Whitespace Tokenizer +=== Whitespace Analyzer -A tokenizer of type `whitespace` that divides text at whitespace. +The `whitespace` tokenizer breaks text into terms whenever it encounters a +whitespace character. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "whitespace", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "Brown-Foxes", + "start_offset": 12, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "word", + "position": 8 + }, + { + "token": "bone.", + "start_offset": 51, + "end_offset": 56, + "type": "word", + "position": 9 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown-Foxes, jumped, over, the, lazy, dog's, bone. ] +--------------------------- + +[float] +=== Configuration + +The `whitespace` tokenizer is not configurable. From c257e2c51f235853c4453a86e10e463813140fc9 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 19 May 2016 14:08:08 -0400 Subject: [PATCH 35/36] Remove settings and system properties entanglement Today when parsing settings during bootstrap, we add a system property for every Elasticsearch setting. Additionally, settings can be set via system properties. This commit simplifies this situation. - settings are no longer propogated to system properties - system properties can not be used to set settings - the "es." prefix on settings is no longer required (nor permitted) - test logging has a dedicated system property (tests.logger.level) Relates #18198 --- TESTING.asciidoc | 2 +- .../elasticsearch/gradle/BuildPlugin.groovy | 2 +- .../elasticsearch/gradle/test/NodeInfo.groovy | 4 +- .../elasticsearch/bootstrap/Bootstrap.java | 27 +-- .../bootstrap/BootstrapInfo.java | 4 + .../bootstrap/Elasticsearch.java | 27 +-- .../java/org/elasticsearch/cli/Command.java | 7 +- .../org/elasticsearch/cli/SettingCommand.java | 77 +++++++ .../common/logging/LogConfigurator.java | 6 +- .../common/settings/ClusterSettings.java | 1 - .../common/settings/Settings.java | 62 ++---- .../internal/InternalSettingsPreparer.java | 48 +++-- .../plugins/InstallPluginCommand.java | 26 +-- .../plugins/ListPluginsCommand.java | 23 ++- .../org/elasticsearch/plugins/PluginCli.java | 21 +- .../plugins/RemovePluginCommand.java | 18 +- .../elasticsearch/bootstrap/security.policy | 2 +- .../bootstrap/ElasticsearchCliTests.java | 72 +------ .../client/transport/TransportClientIT.java | 1 - .../transport/TransportClientRetryIT.java | 1 - .../common/settings/ScopedSettingsTests.java | 9 +- .../common/settings/SettingsTests.java | 41 ++-- .../InternalSettingsPreparerTests.java | 12 +- .../common/logging/config/logging.yml | 7 +- dev-tools/smoke_test_rc.py | 2 +- .../src/main/packaging/init.d/elasticsearch | 2 +- .../src/main/packaging/init.d/elasticsearch | 2 +- .../packaging/systemd/elasticsearch.service | 6 +- .../main/resources/bin/elasticsearch-plugin | 6 +- .../src/main/resources/config/logging.yml | 7 +- docs/plugins/plugin-script.asciidoc | 2 +- docs/reference/getting-started.asciidoc | 2 +- .../allocation/filtering.asciidoc | 2 +- .../migration/migrate_5_0/packaging.asciidoc | 12 ++ .../migration/migrate_5_0/settings.asciidoc | 11 +- .../cluster/allocation_awareness.asciidoc | 2 +- docs/reference/modules/node.asciidoc | 2 +- docs/reference/setup/configuration.asciidoc | 9 +- docs/reference/setup/install/windows.asciidoc | 7 +- .../setup/install/zip-targz.asciidoc | 2 +- .../bootstrap/EvilElasticsearchCliTests.java | 62 ++++++ .../plugins/InstallPluginCommandTests.java | 195 ++++++++---------- .../plugins/ListPluginsCommandTests.java | 58 +++--- .../plugins/RemovePluginCommandTests.java | 42 ++-- .../elasticsearch/tribe/TribeUnitTests.java | 19 -- .../smoketest/ESSmokeClientTestCase.java | 1 - .../scripts/module_and_plugin_test_cases.bash | 2 +- .../scripts/packaging_test_utils.bash | 2 +- .../bootstrap/ESElasticsearchCliTestCase.java | 65 ++++++ .../test/ESSingleNodeTestCase.java | 1 - .../org/elasticsearch/test/ExternalNode.java | 1 - .../test/ExternalTestCluster.java | 1 - .../test/InternalTestCluster.java | 16 +- .../junit/listeners/ReproduceInfoPrinter.java | 4 +- .../src/main/resources/log4j.properties | 4 +- 55 files changed, 554 insertions(+), 495 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/cli/SettingCommand.java create mode 100644 qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java create mode 100644 test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 5eea0b8c163..af46c2e567b 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -201,7 +201,7 @@ gradle test -Dtests.timeoutSuite=5000! ... Change the logging level of ES (not gradle) -------------------------------- -gradle test -Des.logger.level=DEBUG +gradle test -Dtests.logger.level=DEBUG -------------------------------- Print all the logging output from the test runs to the commandline diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 029c80b6e25..3207aa73613 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -456,7 +456,7 @@ class BuildPlugin implements Plugin { // default test sysprop values systemProperty 'tests.ifNoTests', 'fail' // TODO: remove setting logging level via system property - systemProperty 'es.logger.level', 'WARN' + systemProperty 'tests.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { if (property.getKey().startsWith('tests.') || property.getKey().startsWith('es.')) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 2ff5e333139..014686e0207 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -129,7 +129,7 @@ class NodeInfo { } env = [ 'JAVA_HOME' : project.javaHome ] - args.addAll("-E", "es.node.portsfile=true") + args.addAll("-E", "node.portsfile=true") String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs env.put('ES_JAVA_OPTS', esJavaOpts) @@ -140,7 +140,7 @@ class NodeInfo { } } env.put('ES_JVM_OPTIONS', new File(confDir, 'jvm.options')) - args.addAll("-E", "es.path.conf=${confDir}") + args.addAll("-E", "path.conf=${confDir}") if (Os.isFamily(Os.FAMILY_WINDOWS)) { args.add('"') // end the entire command, quoted } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 6d35cafd088..305a4fd30ae 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -177,15 +177,7 @@ final class Bootstrap { // install SM after natives, shutdown hooks, etc. Security.configure(environment, BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(settings)); - // We do not need to reload system properties here as we have already applied them in building the settings and - // reloading could cause multiple prompts to the user for values if a system property was specified with a prompt - // placeholder - Settings nodeSettings = Settings.builder() - .put(settings) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) - .build(); - - node = new Node(nodeSettings) { + node = new Node(settings) { @Override protected void validateNodeBeforeAcceptingRequests(Settings settings, BoundTransportAddress boundTransportAddress) { BootstrapCheck.check(settings, boundTransportAddress); @@ -193,13 +185,13 @@ final class Bootstrap { }; } - private static Environment initialSettings(boolean foreground, String pidFile) { + private static Environment initialSettings(boolean foreground, String pidFile, Map esSettings) { Terminal terminal = foreground ? Terminal.DEFAULT : null; Settings.Builder builder = Settings.builder(); if (Strings.hasLength(pidFile)) { builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile); } - return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal); + return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal, esSettings); } private void start() { @@ -233,11 +225,13 @@ final class Bootstrap { // Set the system property before anything has a chance to trigger its use initLoggerPrefix(); - elasticsearchSettings(esSettings); + // force the class initializer for BootstrapInfo to run before + // the security manager is installed + BootstrapInfo.init(); INSTANCE = new Bootstrap(); - Environment environment = initialSettings(foreground, pidFile); + Environment environment = initialSettings(foreground, pidFile, esSettings); Settings settings = environment.settings(); LogConfigurator.configure(settings, true); checkForCustomConfFile(); @@ -295,13 +289,6 @@ final class Bootstrap { } } - @SuppressForbidden(reason = "Sets system properties passed as CLI parameters") - private static void elasticsearchSettings(Map esSettings) { - for (Map.Entry esSetting : esSettings.entrySet()) { - System.setProperty(esSetting.getKey(), esSetting.getValue()); - } - } - @SuppressForbidden(reason = "System#out") private static void closeSystOut() { System.out.close(); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java index bd693951eb2..791836bf8a4 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java @@ -120,4 +120,8 @@ public final class BootstrapInfo { } return SYSTEM_PROPERTIES; } + + public static void init() { + } + } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index bb1f6cc87d5..b3259129473 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -21,28 +21,25 @@ package org.elasticsearch.bootstrap; import joptsimple.OptionSet; import joptsimple.OptionSpec; -import joptsimple.util.KeyValuePair; import org.elasticsearch.Build; -import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.SettingCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserError; import org.elasticsearch.monitor.jvm.JvmInfo; import java.io.IOException; import java.util.Arrays; -import java.util.HashMap; import java.util.Map; /** * This class starts elasticsearch. */ -class Elasticsearch extends Command { +class Elasticsearch extends SettingCommand { private final OptionSpec versionOption; private final OptionSpec daemonizeOption; private final OptionSpec pidfileOption; - private final OptionSpec propertyOption; // visible for testing Elasticsearch() { @@ -56,7 +53,6 @@ class Elasticsearch extends Command { pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"), "Creates a pid file in the specified path on start") .withRequiredArg(); - propertyOption = parser.accepts("E", "Configure an Elasticsearch setting").withRequiredArg().ofType(KeyValuePair.class); } /** @@ -75,7 +71,7 @@ class Elasticsearch extends Command { } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { if (options.nonOptionArguments().isEmpty() == false) { throw new UserError(ExitCodes.USAGE, "Positional arguments not allowed, found " + options.nonOptionArguments()); } @@ -84,26 +80,15 @@ class Elasticsearch extends Command { throw new UserError(ExitCodes.USAGE, "Elasticsearch version option is mutually exclusive with any other option"); } terminal.println("Version: " + org.elasticsearch.Version.CURRENT - + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() - + ", JVM: " + JvmInfo.jvmInfo().version()); + + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + + ", JVM: " + JvmInfo.jvmInfo().version()); return; } final boolean daemonize = options.has(daemonizeOption); final String pidFile = pidfileOption.value(options); - final Map esSettings = new HashMap<>(); - for (final KeyValuePair kvp : propertyOption.values(options)) { - if (!kvp.key.startsWith("es.")) { - throw new UserError(ExitCodes.USAGE, "Elasticsearch settings must be prefixed with [es.] but was [" + kvp.key + "]"); - } - if (kvp.value.isEmpty()) { - throw new UserError(ExitCodes.USAGE, "Elasticsearch setting [" + kvp.key + "] must not be empty"); - } - esSettings.put(kvp.key, kvp.value); - } - - init(daemonize, pidFile, esSettings); + init(daemonize, pidFile, settings); } void init(final boolean daemonize, final String pidFile, final Map esSettings) { diff --git a/core/src/main/java/org/elasticsearch/cli/Command.java b/core/src/main/java/org/elasticsearch/cli/Command.java index 1fc7c9fe74f..3e2faf13657 100644 --- a/core/src/main/java/org/elasticsearch/cli/Command.java +++ b/core/src/main/java/org/elasticsearch/cli/Command.java @@ -19,15 +19,15 @@ package org.elasticsearch.cli; -import java.io.IOException; -import java.util.Arrays; - import joptsimple.OptionException; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.elasticsearch.common.SuppressForbidden; +import java.io.IOException; +import java.util.Arrays; + /** * An action to execute within a cli. */ @@ -112,4 +112,5 @@ public abstract class Command { * * Any runtime user errors (like an input file that does not exist), should throw a {@link UserError}. */ protected abstract void execute(Terminal terminal, OptionSet options) throws Exception; + } diff --git a/core/src/main/java/org/elasticsearch/cli/SettingCommand.java b/core/src/main/java/org/elasticsearch/cli/SettingCommand.java new file mode 100644 index 00000000000..868975ac6ff --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cli/SettingCommand.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cli; + +import joptsimple.OptionSet; +import joptsimple.OptionSpec; +import joptsimple.util.KeyValuePair; + +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +public abstract class SettingCommand extends Command { + + private final OptionSpec settingOption; + + public SettingCommand(String description) { + super(description); + this.settingOption = parser.accepts("E", "Configure a setting").withRequiredArg().ofType(KeyValuePair.class); + } + + @Override + protected void execute(Terminal terminal, OptionSet options) throws Exception { + final Map settings = new HashMap<>(); + for (final KeyValuePair kvp : settingOption.values(options)) { + if (kvp.value.isEmpty()) { + throw new UserError(ExitCodes.USAGE, "Setting [" + kvp.key + "] must not be empty"); + } + settings.put(kvp.key, kvp.value); + } + + putSystemPropertyIfSettingIsMissing(settings, "path.conf", "es.path.conf"); + putSystemPropertyIfSettingIsMissing(settings, "path.data", "es.path.data"); + putSystemPropertyIfSettingIsMissing(settings, "path.home", "es.path.home"); + putSystemPropertyIfSettingIsMissing(settings, "path.logs", "es.path.logs"); + + execute(terminal, options, settings); + } + + protected static void putSystemPropertyIfSettingIsMissing(final Map settings, final String setting, final String key) { + final String value = System.getProperty(key); + if (value != null) { + if (settings.containsKey(setting)) { + final String message = + String.format( + Locale.ROOT, + "duplicate setting [%s] found via command-line [%s] and system property [%s]", + setting, + settings.get(setting), + value); + throw new IllegalArgumentException(message); + } else { + settings.put(setting, value); + } + } + } + + protected abstract void execute(Terminal terminal, OptionSet options, Map settings) throws Exception; + +} diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index eba89c2e02a..b29cab9fbfe 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.logging; import org.apache.log4j.PropertyConfigurator; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; @@ -93,8 +92,7 @@ public class LogConfigurator { /** * Consolidates settings and converts them into actual log4j settings, then initializes loggers and appenders. - * - * @param settings custom settings that should be applied + * @param settings custom settings that should be applied * @param resolveConfig controls whether the logging conf file should be read too or not. */ public static void configure(Settings settings, boolean resolveConfig) { @@ -109,7 +107,7 @@ public class LogConfigurator { if (resolveConfig) { resolveConfig(environment, settingsBuilder); } - settingsBuilder.putProperties("es.", BootstrapInfo.getSystemProperties()); + // add custom settings after config was added so that they are not overwritten by config settingsBuilder.put(settings); settingsBuilder.replacePropertyPlaceholders(); diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 5b6130281d4..36ee01484e6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -375,7 +375,6 @@ public final class ClusterSettings extends AbstractScopedSettings { BaseRestHandler.MULTI_ALLOW_EXPLICIT_INDEX, ClusterName.CLUSTER_NAME_SETTING, Client.CLIENT_TYPE_SETTING_S, - InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING, EsExecutors.PROCESSORS_SETTING, ThreadContext.DEFAULT_HEADERS_SETTING, diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index 8488ca75c73..7a335aa1f32 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -58,9 +58,11 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; import static org.elasticsearch.common.unit.SizeValue.parseSizeValue; @@ -942,66 +944,27 @@ public final class Settings implements ToXContent { return this; } - /** - * Puts all the properties with keys starting with the provided prefix. - * - * @param prefix The prefix to filter property key by - * @param properties The properties to put - * @return The builder - */ - public Builder putProperties(String prefix, Dictionary properties) { - for (Object property : Collections.list(properties.keys())) { - String key = Objects.toString(property); - String value = Objects.toString(properties.get(property)); - if (key.startsWith(prefix)) { - map.put(key.substring(prefix.length()), value); + public Builder putProperties(Map esSettings, Predicate keyPredicate, Function keyFunction) { + for (final Map.Entry esSetting : esSettings.entrySet()) { + final String key = esSetting.getKey(); + if (keyPredicate.test(key)) { + map.put(keyFunction.apply(key), esSetting.getValue()); } } return this; } /** - * Puts all the properties with keys starting with the provided prefix. - * - * @param prefix The prefix to filter property key by - * @param properties The properties to put - * @return The builder - */ - public Builder putProperties(String prefix, Dictionary properties, String ignorePrefix) { - for (Object property : Collections.list(properties.keys())) { - String key = Objects.toString(property); - String value = Objects.toString(properties.get(property)); - if (key.startsWith(prefix)) { - if (!key.startsWith(ignorePrefix)) { - map.put(key.substring(prefix.length()), value); - } - } - } - return this; - } - - /** - * Runs across all the settings set on this builder and replaces ${...} elements in the - * each setting value according to the following logic: - *

- * First, tries to resolve it against a System property ({@link System#getProperty(String)}), next, - * tries and resolve it against an environment variable ({@link System#getenv(String)}), and last, tries - * and replace it with another setting already set on this builder. + * Runs across all the settings set on this builder and + * replaces ${...} elements in each setting with + * another setting already set on this builder. */ public Builder replacePropertyPlaceholders() { PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new PropertyPlaceholder.PlaceholderResolver() { @Override public String resolvePlaceholder(String placeholderName) { - if (placeholderName.startsWith("env.")) { - // explicit env var prefix - return System.getenv(placeholderName.substring("env.".length())); - } - String value = System.getProperty(placeholderName); - if (value != null) { - return value; - } - value = System.getenv(placeholderName); + final String value = System.getenv(placeholderName); if (value != null) { return value; } @@ -1010,8 +973,7 @@ public final class Settings implements ToXContent { @Override public boolean shouldIgnoreMissing(String placeholderName) { - // if its an explicit env var, we are ok with not having a value for it and treat it as optional - if (placeholderName.startsWith("env.") || placeholderName.startsWith("prompt.")) { + if (placeholderName.startsWith("prompt.")) { return true; } return false; diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 124c8b2dbdd..24a9cf589bd 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -19,14 +19,11 @@ package org.elasticsearch.node.internal; -import org.elasticsearch.bootstrap.BootstrapInfo; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; -import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; @@ -39,10 +36,13 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; import static org.elasticsearch.common.Strings.cleanPath; @@ -52,20 +52,18 @@ import static org.elasticsearch.common.Strings.cleanPath; public class InternalSettingsPreparer { private static final String[] ALLOWED_SUFFIXES = {".yml", ".yaml", ".json", ".properties"}; - static final String PROPERTY_PREFIX = "es."; - static final String PROPERTY_DEFAULTS_PREFIX = "es.default."; + static final String PROPERTY_DEFAULTS_PREFIX = "default."; + static final Predicate PROPERTY_DEFAULTS_PREDICATE = key -> key.startsWith(PROPERTY_DEFAULTS_PREFIX); public static final String SECRET_PROMPT_VALUE = "${prompt.secret}"; public static final String TEXT_PROMPT_VALUE = "${prompt.text}"; - public static final Setting IGNORE_SYSTEM_PROPERTIES_SETTING = - Setting.boolSetting("config.ignore_system_properties", false, Property.NodeScope); /** * Prepares the settings by gathering all elasticsearch system properties and setting defaults. */ public static Settings prepareSettings(Settings input) { Settings.Builder output = Settings.builder(); - initializeSettings(output, input, true); + initializeSettings(output, input, true, Collections.emptyMap()); finalizeSettings(output, null, null); return output.build(); } @@ -80,9 +78,23 @@ public class InternalSettingsPreparer { * @return the {@link Settings} and {@link Environment} as a {@link Tuple} */ public static Environment prepareEnvironment(Settings input, Terminal terminal) { + return prepareEnvironment(input, terminal, Collections.emptyMap()); + } + + /** + * Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings, + * and then replacing all property placeholders. If a {@link Terminal} is provided and configuration settings are loaded, + * settings with a value of ${prompt.text} or ${prompt.secret} will result in a prompt for + * the setting to the user. + * @param input The custom settings to use. These are not overwritten by settings in the configuration file. + * @param terminal the Terminal to use for input/output + * @param properties Map of properties key/value pairs (usually from the command-line) + * @return the {@link Settings} and {@link Environment} as a {@link Tuple} + */ + public static Environment prepareEnvironment(Settings input, Terminal terminal, Map properties) { // just create enough settings to build the environment, to get the config dir Settings.Builder output = Settings.builder(); - initializeSettings(output, input, true); + initializeSettings(output, input, true, properties); Environment environment = new Environment(output.build()); boolean settingsFileFound = false; @@ -103,7 +115,7 @@ public class InternalSettingsPreparer { // re-initialize settings now that the config file has been loaded // TODO: only re-initialize if a config file was actually loaded - initializeSettings(output, input, false); + initializeSettings(output, input, false, properties); finalizeSettings(output, terminal, environment.configFile()); environment = new Environment(output.build()); @@ -113,22 +125,16 @@ public class InternalSettingsPreparer { return new Environment(output.build()); } - private static boolean useSystemProperties(Settings input) { - return !IGNORE_SYSTEM_PROPERTIES_SETTING.get(input); - } - /** * Initializes the builder with the given input settings, and loads system properties settings if allowed. * If loadDefaults is true, system property default settings are loaded. */ - private static void initializeSettings(Settings.Builder output, Settings input, boolean loadDefaults) { + private static void initializeSettings(Settings.Builder output, Settings input, boolean loadDefaults, Map esSettings) { output.put(input); - if (useSystemProperties(input)) { - if (loadDefaults) { - output.putProperties(PROPERTY_DEFAULTS_PREFIX, BootstrapInfo.getSystemProperties()); - } - output.putProperties(PROPERTY_PREFIX, BootstrapInfo.getSystemProperties(), PROPERTY_DEFAULTS_PREFIX); + if (loadDefaults) { + output.putProperties(esSettings, PROPERTY_DEFAULTS_PREDICATE, key -> key.substring(PROPERTY_DEFAULTS_PREFIX.length())); } + output.putProperties(esSettings, PROPERTY_DEFAULTS_PREDICATE.negate(), Function.identity()); output.replacePropertyPlaceholders(); } diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index e81d376f3b5..645d07bfb64 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -27,11 +27,14 @@ import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.SettingCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; import java.io.BufferedReader; import java.io.IOException; @@ -56,6 +59,7 @@ import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.zip.ZipEntry; @@ -95,7 +99,7 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet; * elasticsearch config directory, using the name of the plugin. If any files to be installed * already exist, they will be skipped. */ -class InstallPluginCommand extends Command { +class InstallPluginCommand extends SettingCommand { private static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging"; @@ -132,7 +136,6 @@ class InstallPluginCommand extends Command { "store-smb", "x-pack"))); - private final Environment env; private final OptionSpec batchOption; private final OptionSpec arguments; @@ -160,9 +163,8 @@ class InstallPluginCommand extends Command { FILE_PERMS = Collections.unmodifiableSet(filePerms); } - InstallPluginCommand(Environment env) { + InstallPluginCommand() { super("Install a plugin"); - this.env = env; this.batchOption = parser.acceptsAll(Arrays.asList("b", "batch"), "Enable batch mode explicitly, automatic confirmation of security permission"); this.arguments = parser.nonOptions("plugin id"); @@ -178,7 +180,7 @@ class InstallPluginCommand extends Command { } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args List args = arguments.values(options); if (args.size() != 1) { @@ -186,12 +188,12 @@ class InstallPluginCommand extends Command { } String pluginId = args.get(0); boolean isBatch = options.has(batchOption) || System.console() == null; - execute(terminal, pluginId, isBatch); + execute(terminal, pluginId, isBatch, settings); } // pkg private for testing - void execute(Terminal terminal, String pluginId, boolean isBatch) throws Exception { - + void execute(Terminal terminal, String pluginId, boolean isBatch, Map settings) throws Exception { + final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); // TODO: remove this leniency!! is it needed anymore? if (Files.exists(env.pluginsFile()) == false) { terminal.println("Plugins directory [" + env.pluginsFile() + "] does not exist. Creating..."); @@ -200,7 +202,7 @@ class InstallPluginCommand extends Command { Path pluginZip = download(terminal, pluginId, env.tmpFile()); Path extractedZip = unzip(pluginZip, env.pluginsFile()); - install(terminal, isBatch, extractedZip); + install(terminal, isBatch, extractedZip, env); } /** Downloads the plugin and returns the file it was downloaded to. */ @@ -349,7 +351,7 @@ class InstallPluginCommand extends Command { } /** Load information about the plugin, and verify it can be installed with no errors. */ - private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch) throws Exception { + private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch, Environment env) throws Exception { // read and validate the plugin descriptor PluginInfo info = PluginInfo.readFromProperties(pluginRoot); terminal.println(VERBOSE, info.toString()); @@ -398,12 +400,12 @@ class InstallPluginCommand extends Command { * Installs the plugin from {@code tmpRoot} into the plugins dir. * If the plugin has a bin dir and/or a config dir, those are copied. */ - private void install(Terminal terminal, boolean isBatch, Path tmpRoot) throws Exception { + private void install(Terminal terminal, boolean isBatch, Path tmpRoot, Environment env) throws Exception { List deleteOnFailure = new ArrayList<>(); deleteOnFailure.add(tmpRoot); try { - PluginInfo info = verify(terminal, tmpRoot, isBatch); + PluginInfo info = verify(terminal, tmpRoot, isBatch, env); final Path destination = env.pluginsFile().resolve(info.getName()); if (Files.exists(destination)) { diff --git a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index c03e70ad4da..bd2f853bac0 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -19,6 +19,13 @@ package org.elasticsearch.plugins; +import joptsimple.OptionSet; +import org.elasticsearch.cli.SettingCommand; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; + import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; @@ -26,26 +33,20 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; - -import joptsimple.OptionSet; -import org.elasticsearch.cli.Command; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.env.Environment; +import java.util.Map; /** * A command for the plugin cli to list plugins installed in elasticsearch. */ -class ListPluginsCommand extends Command { +class ListPluginsCommand extends SettingCommand { - private final Environment env; - - ListPluginsCommand(Environment env) { + ListPluginsCommand() { super("Lists installed elasticsearch plugins"); - this.env = env; } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { + final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); if (Files.exists(env.pluginsFile()) == false) { throw new IOException("Plugins directory missing: " + env.pluginsFile()); } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java index be06ea7db1c..3a88c4d0083 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java @@ -26,21 +26,24 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; +import java.util.Collections; + /** * A cli tool for adding, removing and listing plugins for elasticsearch. */ public class PluginCli extends MultiCommand { - public PluginCli(Environment env) { + public PluginCli() { super("A tool for managing installed elasticsearch plugins"); - subcommands.put("list", new ListPluginsCommand(env)); - subcommands.put("install", new InstallPluginCommand(env)); - subcommands.put("remove", new RemovePluginCommand(env)); + subcommands.put("list", new ListPluginsCommand()); + subcommands.put("install", new InstallPluginCommand()); + subcommands.put("remove", new RemovePluginCommand()); } public static void main(String[] args) throws Exception { // initialize default for es.logger.level because we will not read the logging.yml String loggerLevel = System.getProperty("es.logger.level", "INFO"); + String pathHome = System.getProperty("es.path.home"); // Set the appender for all potential log files to terminal so that other components that use the logger print out the // same terminal. // The reason for this is that the plugin cli cannot be configured with a file appender because when the plugin command is @@ -48,12 +51,14 @@ public class PluginCli extends MultiCommand { // is run as service then the logs should be at /var/log/elasticsearch but when started from the tar they should be at es.home/logs. // Therefore we print to Terminal. Environment loggingEnvironment = InternalSettingsPreparer.prepareEnvironment(Settings.builder() + .put("path.home", pathHome) .put("appender.terminal.type", "terminal") - .put("rootLogger", "${es.logger.level}, terminal") - .put("es.logger.level", loggerLevel) + .put("rootLogger", "${logger.level}, terminal") + .put("logger.level", loggerLevel) .build(), Terminal.DEFAULT); LogConfigurator.configure(loggingEnvironment.settings(), false); - Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, Terminal.DEFAULT); - exit(new PluginCli(env).main(args, Terminal.DEFAULT)); + + exit(new PluginCli().main(args, Terminal.DEFAULT)); } + } diff --git a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java index a3e6c375f83..af48c1d8207 100644 --- a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java @@ -24,45 +24,49 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; +import java.util.Map; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.lucene.util.IOUtils; import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.SettingCommand; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.Strings; import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; /** * A command for the plugin cli to remove a plugin from elasticsearch. */ -class RemovePluginCommand extends Command { +class RemovePluginCommand extends SettingCommand { - private final Environment env; private final OptionSpec arguments; - RemovePluginCommand(Environment env) { + RemovePluginCommand() { super("Removes a plugin from elasticsearch"); - this.env = env; this.arguments = parser.nonOptions("plugin name"); } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args List args = arguments.values(options); if (args.size() != 1) { throw new UserError(ExitCodes.USAGE, "Must supply a single plugin id argument"); } - execute(terminal, args.get(0)); + execute(terminal, args.get(0), settings); } // pkg private for testing - void execute(Terminal terminal, String pluginName) throws Exception { + void execute(Terminal terminal, String pluginName, Map settings) throws Exception { + final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); + terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "..."); Path pluginDir = env.pluginsFile().resolve(pluginName); diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 74404903e91..32f7b2bf0dd 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -71,7 +71,7 @@ grant { // set by ESTestCase to improve test reproducibility // TODO: set this with gradle or some other way that repros with seed? - permission java.util.PropertyPermission "es.processors.override", "write"; + permission java.util.PropertyPermission "processors.override", "write"; // TODO: these simply trigger a noisy warning if its unable to clear the properties // fix that in randomizedtesting diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java index 632646146fe..8b8a4d947a9 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java @@ -22,25 +22,15 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.monitor.jvm.JvmInfo; -import org.elasticsearch.test.ESTestCase; -import org.junit.After; -import org.junit.Before; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.hasEntry; -public class ElasticsearchCliTests extends ESTestCase { +public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { public void testVersion() throws Exception { runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-d"); @@ -96,7 +86,7 @@ public class ElasticsearchCliTests extends ESTestCase { false, output -> assertThat(output, containsString("Positional arguments not allowed, found [foo]")), (foreground, pidFile, esSettings) -> {}, - "-E", "something", "foo", "-E", "somethingelse" + "-E", "foo=bar", "foo", "-E", "baz=qux" ); } @@ -138,26 +128,10 @@ public class ElasticsearchCliTests extends ESTestCase { output -> {}, (foreground, pidFile, esSettings) -> { assertThat(esSettings.size(), equalTo(2)); - assertThat(esSettings, hasEntry("es.foo", "bar")); - assertThat(esSettings, hasEntry("es.baz", "qux")); + assertThat(esSettings, hasEntry("foo", "bar")); + assertThat(esSettings, hasEntry("baz", "qux")); }, - "-Ees.foo=bar", "-E", "es.baz=qux" - ); - } - - public void testElasticsearchSettingPrefix() throws Exception { - runElasticsearchSettingPrefixTest("-E", "foo"); - runElasticsearchSettingPrefixTest("-E", "foo=bar"); - runElasticsearchSettingPrefixTest("-E", "=bar"); - } - - private void runElasticsearchSettingPrefixTest(String... args) throws Exception { - runTest( - ExitCodes.USAGE, - false, - output -> assertThat(output, containsString("Elasticsearch settings must be prefixed with [es.] but was [")), - (foreground, pidFile, esSettings) -> {}, - args + "-Efoo=bar", "-E", "baz=qux" ); } @@ -165,9 +139,9 @@ public class ElasticsearchCliTests extends ESTestCase { runTest( ExitCodes.USAGE, false, - output -> assertThat(output, containsString("Elasticsearch setting [es.foo] must not be empty")), + output -> assertThat(output, containsString("Setting [foo] must not be empty")), (foreground, pidFile, esSettings) -> {}, - "-E", "es.foo=" + "-E", "foo=" ); } @@ -180,36 +154,4 @@ public class ElasticsearchCliTests extends ESTestCase { "--network.host"); } - private interface InitConsumer { - void accept(final boolean foreground, final String pidFile, final Map esSettings); - } - - private void runTest( - final int expectedStatus, - final boolean expectedInit, - final Consumer outputConsumer, - final InitConsumer initConsumer, - String... args) throws Exception { - final MockTerminal terminal = new MockTerminal(); - try { - final AtomicBoolean init = new AtomicBoolean(); - final int status = Elasticsearch.main(args, new Elasticsearch() { - @Override - void init(final boolean daemonize, final String pidFile, final Map esSettings) { - init.set(true); - initConsumer.accept(!daemonize, pidFile, esSettings); - } - }, terminal); - assertThat(status, equalTo(expectedStatus)); - assertThat(init.get(), equalTo(expectedInit)); - outputConsumer.accept(terminal.getOutput()); - } catch (Throwable t) { - // if an unexpected exception is thrown, we log - // terminal output to aid debugging - logger.info(terminal.getOutput()); - // rethrow so the test fails - throw t; - } - } - } diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java index a20a5247ed6..9cdeef2a7ff 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java @@ -59,7 +59,6 @@ public class TransportClientIT extends ESIntegTestCase { .put("http.enabled", false) .put(Node.NODE_DATA_SETTING.getKey(), false) .put("cluster.name", "foobar") - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :) .build()); node.start(); try { diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java index 2fcadb51a10..4ec1f66df57 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java @@ -55,7 +55,6 @@ public class TransportClientRetryIT extends ESIntegTestCase { .put("node.name", "transport_client_retry_test") .put(Node.NODE_MODE_SETTING.getKey(), internalCluster().getNodeMode()) .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), internalCluster().getClusterName()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); try (TransportClient client = TransportClient.builder().settings(builder.build()).build()) { diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index aa8614aee71..3afd60d86e4 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -302,11 +302,8 @@ public class ScopedSettingsTests extends ESTestCase { public void testLoggingUpdates() { final String level = ESLoggerFactory.getRootLogger().getLevel(); final String testLevel = ESLoggerFactory.getLogger("test").getLevel(); - String property = System.getProperty("es.logger.level"); - Settings.Builder builder = Settings.builder(); - if (property != null) { - builder.put("logger.level", property); - } + String property = randomFrom(ESLoggerFactory.LogLevel.values()).toString(); + Settings.Builder builder = Settings.builder().put("logger.level", property); try { ClusterSettings settings = new ClusterSettings(builder.build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); try { @@ -319,7 +316,7 @@ public class ScopedSettingsTests extends ESTestCase { settings.applySettings(Settings.builder().put("logger._root", "TRACE").build()); assertEquals("TRACE", ESLoggerFactory.getRootLogger().getLevel()); settings.applySettings(Settings.builder().build()); - assertEquals(level, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(property, ESLoggerFactory.getRootLogger().getLevel()); settings.applySettings(Settings.builder().put("logger.test", "TRACE").build()); assertEquals("TRACE", ESLoggerFactory.getLogger("test").getLevel()); settings.applySettings(Settings.builder().build()); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 3539e54d943..fe7dcda3b25 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -31,7 +31,9 @@ import java.util.Set; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -42,31 +44,32 @@ public class SettingsTests extends ESTestCase { String value = System.getProperty("java.home"); assertFalse(value.isEmpty()); Settings settings = Settings.builder() - .put("setting1", "${java.home}") + .put("property.placeholder", value) + .put("setting1", "${property.placeholder}") .replacePropertyPlaceholders() .build(); assertThat(settings.get("setting1"), equalTo(value)); - - assertNull(System.getProperty("_test_property_should_not_exist")); - settings = Settings.builder() - .put("setting1", "${_test_property_should_not_exist:defaultVal1}") - .replacePropertyPlaceholders() - .build(); - assertThat(settings.get("setting1"), equalTo("defaultVal1")); - - settings = Settings.builder() - .put("setting1", "${_test_property_should_not_exist:}") - .replacePropertyPlaceholders() - .build(); - assertThat(settings.get("setting1"), is(nullValue())); } - public void testReplacePropertiesPlaceholderIgnoreEnvUnset() { - Settings settings = Settings.builder() - .put("setting1", "${env.UNSET_ENV_VAR}") + public void testReplacePropertiesPlaceholderSystemVariablesHaveNoEffect() { + final String value = System.getProperty("java.home"); + assertNotNull(value); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> Settings.builder() + .put("setting1", "${java.home}") .replacePropertyPlaceholders() - .build(); - assertThat(settings.get("setting1"), is(nullValue())); + .build()); + assertThat(e, hasToString(containsString("Could not resolve placeholder 'java.home'"))); + } + + public void testReplacePropertiesPlaceholderByEnvironmentVariables() { + final Map.Entry entry = randomSubsetOf(1, System.getenv().entrySet()).get(0); + assertNotNull(entry.getValue()); + + final Settings implicitEnvSettings = Settings.builder() + .put("setting1", "${" + entry.getKey() + "}") + .replacePropertyPlaceholders() + .build(); + assertThat(implicitEnvSettings.get("setting1"), equalTo(entry.getValue())); } public void testReplacePropertiesPlaceholderIgnoresPrompt() { diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index e1397ca47f1..87abc20a0de 100644 --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -19,11 +19,6 @@ package org.elasticsearch.node.internal; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; - import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.settings.Settings; @@ -33,6 +28,11 @@ import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -134,7 +134,6 @@ public class InternalSettingsPreparerTests extends ESTestCase { Files.createDirectory(config); Files.copy(garbage, config.resolve("elasticsearch.yml")); InternalSettingsPreparer.prepareEnvironment(Settings.builder() - .put("config.ignore_system_properties", true) .put(baseEnvSettings) .build(), null); } catch (SettingsException e) { @@ -153,7 +152,6 @@ public class InternalSettingsPreparerTests extends ESTestCase { try { InternalSettingsPreparer.prepareEnvironment(Settings.builder() - .put("config.ignore_system_properties", true) .put(baseEnvSettings) .build(), null); } catch (SettingsException e) { diff --git a/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml b/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml index 515e4320fd2..548b186e46f 100644 --- a/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml +++ b/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml @@ -1,6 +1,7 @@ -# you can override this using by setting a system property, for example -Ees.logger.level=DEBUG -es.logger.level: INFO -rootLogger: ${es.logger.level}, console +# you can override using a command-line parameter +# -E logger.level=(ERROR|WARN|INFO|DEBUG|TRACE) +logger.level: INFO +rootLogger: ${logger.level}, console logger: test: TRACE, console diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index 39db9929a54..ac5e9afec47 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -203,7 +203,7 @@ def smoke_test_release(release, files, expected_hash, plugins): headers = {} print(' Starting elasticsearch deamon from [%s]' % es_dir) try: - run('%s; %s -Ees.node.name=smoke_tester -Ees.cluster.name=prepare_release -Ees.script.inline=true -Ees.script.stored=true -Ees.repositories.url.allowed_urls=http://snapshot.test* %s -Ees.pidfile=%s -Ees.node.portsfile=true' + run('%s; %s -Enode.name=smoke_tester -Ecluster.name=prepare_release -Escript.inline=true -Escript.stored=true -Erepositories.url.allowed_urls=http://snapshot.test* %s -Epidfile=%s -Enode.portsfile=true' % (java_exe(), es_run_path, '-d', os.path.join(es_dir, 'es-smoke.pid'))) if not wait_for_node_startup(es_dir, header=headers): print("elasticsearch logs:") diff --git a/distribution/deb/src/main/packaging/init.d/elasticsearch b/distribution/deb/src/main/packaging/init.d/elasticsearch index a01643db2b3..fdeb6db308e 100755 --- a/distribution/deb/src/main/packaging/init.d/elasticsearch +++ b/distribution/deb/src/main/packaging/init.d/elasticsearch @@ -79,7 +79,7 @@ fi # Define other required variables PID_FILE="$PID_DIR/$NAME.pid" DAEMON=$ES_HOME/bin/elasticsearch -DAEMON_OPTS="-d -p $PID_FILE -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR" +DAEMON_OPTS="-d -p $PID_FILE -Edefault.path.logs=$LOG_DIR -Edefault.path.data=$DATA_DIR -Edefault.path.conf=$CONF_DIR" export ES_JAVA_OPTS export JAVA_HOME diff --git a/distribution/rpm/src/main/packaging/init.d/elasticsearch b/distribution/rpm/src/main/packaging/init.d/elasticsearch index 7bcb5692a88..8f1d93dcbdc 100644 --- a/distribution/rpm/src/main/packaging/init.d/elasticsearch +++ b/distribution/rpm/src/main/packaging/init.d/elasticsearch @@ -114,7 +114,7 @@ start() { cd $ES_HOME echo -n $"Starting $prog: " # if not running, start it up here, usually something like "daemon $exec" - daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -Ees.default.path.home=$ES_HOME -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR + daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -Edefault.path.logs=$LOG_DIR -Edefault.path.data=$DATA_DIR -Edefault.path.conf=$CONF_DIR retval=$? echo [ $retval -eq 0 ] && touch $lockfile diff --git a/distribution/src/main/packaging/systemd/elasticsearch.service b/distribution/src/main/packaging/systemd/elasticsearch.service index ccbf4650a22..0c99464c4f6 100644 --- a/distribution/src/main/packaging/systemd/elasticsearch.service +++ b/distribution/src/main/packaging/systemd/elasticsearch.service @@ -21,9 +21,9 @@ ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ -p ${PID_DIR}/elasticsearch.pid \ - -Ees.default.path.logs=${LOG_DIR} \ - -Ees.default.path.data=${DATA_DIR} \ - -Ees.default.path.conf=${CONF_DIR} + -Edefault.path.logs=${LOG_DIR} \ + -Edefault.path.data=${DATA_DIR} \ + -Edefault.path.conf=${CONF_DIR} StandardOutput=journal StandardError=inherit diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin b/distribution/src/main/resources/bin/elasticsearch-plugin index 8a3b6676a98..06f8c5b8c27 100755 --- a/distribution/src/main/resources/bin/elasticsearch-plugin +++ b/distribution/src/main/resources/bin/elasticsearch-plugin @@ -81,10 +81,10 @@ fi HOSTNAME=`hostname | cut -d. -f1` export HOSTNAME -declare -a properties=(-Delasticsearch -Des.path.home="$ES_HOME") +declare -a args=("$@") if [ -e "$CONF_DIR" ]; then - properties=("${properties[@]}" -Des.default.path.conf="$CONF_DIR") + args=("${args[@]}" -Edefault.path.conf="$CONF_DIR") fi -exec "$JAVA" $ES_JAVA_OPTS "${properties[@]}" -cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginCli "$@" +exec "$JAVA" $ES_JAVA_OPTS -Delasticsearch -Des.path.home="$ES_HOME" -cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginCli "${args[@]}" diff --git a/distribution/src/main/resources/config/logging.yml b/distribution/src/main/resources/config/logging.yml index 187e79cffa0..11cd181ebd0 100644 --- a/distribution/src/main/resources/config/logging.yml +++ b/distribution/src/main/resources/config/logging.yml @@ -1,6 +1,7 @@ -# you can override this using by setting a system property, for example -Ees.logger.level=DEBUG -es.logger.level: INFO -rootLogger: ${es.logger.level}, console, file +# you can override using a command-line parameter +# -E logger.level=(ERROR|WARN|INFO|DEBUG|TRACE) +logger.level: INFO +rootLogger: ${logger.level}, console, file logger: # log action execution errors for easier debugging action: DEBUG diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index 7cb7f396608..08ad129f22f 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -135,7 +135,7 @@ can do this as follows: [source,sh] --------------------- -sudo bin/elasticsearch-plugin -Ees.path.conf=/path/to/custom/config/dir install +sudo bin/elasticsearch-plugin -Epath.conf=/path/to/custom/config/dir install --------------------- You can also set the `CONF_DIR` environment variable to the custom config diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 977cb4e5a1d..132b287bd46 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -163,7 +163,7 @@ As mentioned previously, we can override either the cluster or node name. This c [source,sh] -------------------------------------------------- -./elasticsearch -Ees.cluster.name=my_cluster_name -Ees.node.name=my_node_name +./elasticsearch -Ecluster.name=my_cluster_name -Enode.name=my_node_name -------------------------------------------------- Also note the line marked http with information about the HTTP address (`192.168.8.112`) and port (`9200`) that our node is reachable from. By default, Elasticsearch uses port `9200` to provide access to its REST API. This port is configurable if necessary. diff --git a/docs/reference/index-modules/allocation/filtering.asciidoc b/docs/reference/index-modules/allocation/filtering.asciidoc index be45cd2a1ac..05007b46188 100644 --- a/docs/reference/index-modules/allocation/filtering.asciidoc +++ b/docs/reference/index-modules/allocation/filtering.asciidoc @@ -14,7 +14,7 @@ attribute as follows: [source,sh] ------------------------ -bin/elasticsearch -Ees.node.attr.rack=rack1 -Ees.node.attr.size=big <1> +bin/elasticsearch -Enode.attr.rack=rack1 -Enode.attr.size=big <1> ------------------------ <1> These attribute settings can also be specified in the `elasticsearch.yml` config file. diff --git a/docs/reference/migration/migrate_5_0/packaging.asciidoc b/docs/reference/migration/migrate_5_0/packaging.asciidoc index 5911b964b6b..977e20a76b1 100644 --- a/docs/reference/migration/migrate_5_0/packaging.asciidoc +++ b/docs/reference/migration/migrate_5_0/packaging.asciidoc @@ -43,3 +43,15 @@ Previously, the scripts used to start Elasticsearch and run plugin commands only required a Bourne-compatible shell. Starting in Elasticsearch 5.0.0, the bash shell is now required and `/bin/bash` is a hard-dependency for the RPM and Debian packages. + +==== Environmental Settings + +Previously, Elasticsearch could be configured via environment variables +in two ways: first by using the placeholder syntax +`${env.ENV_VAR_NAME}` and the second by using the same syntax without +the `env` prefix: `${ENV_VAR_NAME}`. The first method has been removed +from Elasticsearch. + +Additionally, it was previously possible to set any setting in +Elasticsearch via JVM system properties. This has been removed from +Elasticsearch. diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index 85895d65b67..0fa7d42e874 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -202,19 +202,14 @@ the cache implementation used for the request cache and the field data cache. ==== Using system properties to configure Elasticsearch -Elasticsearch can be configured by setting system properties on the -command line via `-Des.name.of.property=value.of.property`. This will be -removed in a future version of Elasticsearch. Instead, use -`-E es.name.of.setting=value.of.setting`. Note that in all cases the -name of the setting must be prefixed with `es.`. +Elasticsearch can no longer be configured by setting system properties. +Instead, use `-Ename.of.setting=value.of.setting`. ==== Removed using double-dashes to configure Elasticsearch Elasticsearch could previously be configured on the command line by setting settings via `--name.of.setting value.of.setting`. This feature -has been removed. Instead, use -`-Ees.name.of.setting=value.of.setting`. Note that in all cases the -name of the setting must be prefixed with `es.`. +has been removed. Instead, use `-Ename.of.setting=value.of.setting`. ==== Discovery Settings diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index 383252e23b3..f4e61fb0da1 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -21,7 +21,7 @@ attribute called `rack_id` -- we could use any attribute name. For example: [source,sh] ---------------------- -./bin/elasticsearch -Ees.node.attr.rack_id=rack_one <1> +./bin/elasticsearch -Enode.attr.rack_id=rack_one <1> ---------------------- <1> This setting could also be specified in the `elasticsearch.yml` config file. diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index 124d68f1d6d..2f1caa42ad8 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -265,7 +265,7 @@ Like all node settings, it can also be specified on the command line as: [source,sh] ----------------------- -./bin/elasticsearch -Ees.path.data=/var/elasticsearch/data +./bin/elasticsearch -Epath.data=/var/elasticsearch/data ----------------------- TIP: When using the `.zip` or `.tar.gz` distributions, the `path.data` setting diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index ceb3d8c38d3..68f73fc96b8 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -26,7 +26,7 @@ setting, as follows: [source,sh] ------------------------------- -./bin/elasticsearch -E es.path.conf=/path/to/my/config/ +./bin/elasticsearch -Epath.conf=/path/to/my/config/ ------------------------------- [float] @@ -93,15 +93,14 @@ is used in the settings and the process is run as a service or in the background === Setting default settings New default settings may be specified on the command line using the -`es.default.` prefix instead of the `es.` prefix. This will specify a value -that will be used by default unless another value is specified in the config -file. +`default.` prefix. This will specify a value that will be used by +default unless another value is specified in the config file. For instance, if Elasticsearch is started as follows: [source,sh] --------------------------- -./bin/elasticsearch -E es.default.node.name=My_Node +./bin/elasticsearch -Edefault.node.name=My_Node --------------------------- the value for `node.name` will be `My_Node`, unless it is overwritten on the diff --git a/docs/reference/setup/install/windows.asciidoc b/docs/reference/setup/install/windows.asciidoc index e3c7622d064..0d2e8bf04f6 100644 --- a/docs/reference/setup/install/windows.asciidoc +++ b/docs/reference/setup/install/windows.asciidoc @@ -45,15 +45,14 @@ file by default. The format of this config file is explained in <>. Any settings that can be specified in the config file can also be specified on -the command line, using the `-E` syntax, and prepending `es.` to the setting -name, as follows: +the command line, using the `-E` syntax as follows: [source,sh] -------------------------------------------- -./bin/elasticsearch -E es.cluster.name=my_cluster -E es.node.name=node_1 +./bin/elasticsearch -Ecluster.name=my_cluster -Enode.name=node_1 -------------------------------------------- -NOTE: Values that contain spaces must be surrounded with quotes. For instance `-E es.path.logs="C:\My Logs\logs"`. +NOTE: Values that contain spaces must be surrounded with quotes. For instance `-Epath.logs="C:\My Logs\logs"`. TIP: Typically, any cluster-wide settings (like `cluster.name`) should be added to the `elasticsearch.yml` config file, while any node-specific settings diff --git a/docs/reference/setup/install/zip-targz.asciidoc b/docs/reference/setup/install/zip-targz.asciidoc index 0ed67cb9ce9..7fc41a0f3f8 100644 --- a/docs/reference/setup/install/zip-targz.asciidoc +++ b/docs/reference/setup/install/zip-targz.asciidoc @@ -93,7 +93,7 @@ name, as follows: [source,sh] -------------------------------------------- -./bin/elasticsearch -d -E es.cluster.name=my_cluster -E es.node.name=node_1 +./bin/elasticsearch -d -Ecluster.name=my_cluster -Enode.name=node_1 -------------------------------------------- TIP: Typically, any cluster-wide settings (like `cluster.name`) should be diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java new file mode 100644 index 00000000000..8bd2451da57 --- /dev/null +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; + +public class EvilElasticsearchCliTests extends ESElasticsearchCliTestCase { + + @SuppressForbidden(reason = "manipulates system properties for testing") + public void testPathHome() throws Exception { + final String pathHome = System.getProperty("es.path.home"); + final String value = randomAsciiOfLength(16); + System.setProperty("es.path.home", value); + + runTest( + ExitCodes.OK, + true, + output -> {}, + (foreground, pidFile, esSettings) -> { + assertThat(esSettings.size(), equalTo(1)); + assertThat(esSettings, hasEntry("path.home", value)); + }); + + System.clearProperty("es.path.home"); + final String commandLineValue = randomAsciiOfLength(16); + runTest( + ExitCodes.OK, + true, + output -> {}, + (foreground, pidFile, esSettings) -> { + assertThat(esSettings.size(), equalTo(1)); + assertThat(esSettings, hasEntry("path.home", commandLineValue)); + }, + "-Epath.home=" + commandLineValue); + + if (pathHome != null) System.setProperty("es.path.home", pathHome); + else System.clearProperty("es.path.home"); + } + +} diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index af36d96f442..22b2ef39a88 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.util.SuppressForbidden; import org.elasticsearch.Version; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.UserError; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtilsForTesting; import org.elasticsearch.common.settings.Settings; @@ -54,8 +55,10 @@ import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; @@ -129,7 +132,7 @@ public class InstallPluginCommandTests extends ESTestCase { } /** Creates a test environment with bin, config and plugins directories. */ - static Environment createEnv(FileSystem fs, Function temp) throws IOException { + static Tuple createEnv(FileSystem fs, Function temp) throws IOException { Path home = temp.apply("install-plugin-command-tests"); Files.createDirectories(home.resolve("bin")); Files.createFile(home.resolve("bin").resolve("elasticsearch")); @@ -140,7 +143,7 @@ public class InstallPluginCommandTests extends ESTestCase { Settings settings = Settings.builder() .put("path.home", home) .build(); - return new Environment(settings); + return Tuple.tuple(home, new Environment(settings)); } static Path createPluginDir(Function temp) throws IOException { @@ -185,20 +188,22 @@ public class InstallPluginCommandTests extends ESTestCase { return writeZip(structure, "elasticsearch"); } - static MockTerminal installPlugin(String pluginUrl, Environment env) throws Exception { - return installPlugin(pluginUrl, env, false); + static MockTerminal installPlugin(String pluginUrl, Path home) throws Exception { + return installPlugin(pluginUrl, home, false); } - static MockTerminal installPlugin(String pluginUrl, Environment env, boolean jarHellCheck) throws Exception { + static MockTerminal installPlugin(String pluginUrl, Path home, boolean jarHellCheck) throws Exception { + Map settings = new HashMap<>(); + settings.put("path.home", home.toString()); MockTerminal terminal = new MockTerminal(); - new InstallPluginCommand(env) { + new InstallPluginCommand() { @Override void jarHellCheck(Path candidate, Path pluginsDir) throws Exception { if (jarHellCheck) { super.jarHellCheck(candidate, pluginsDir); } } - }.execute(terminal, pluginUrl, true); + }.execute(terminal, pluginUrl, true, settings); return terminal; } @@ -275,192 +280,176 @@ public class InstallPluginCommandTests extends ESTestCase { } public void testSomethingWorks() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testSpaceInUrl() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); Path pluginZipWithSpaces = createTempFile("foo bar", ".zip"); try (InputStream in = new URL(pluginZip).openStream()) { Files.copy(in, pluginZipWithSpaces, StandardCopyOption.REPLACE_EXISTING); } - installPlugin(pluginZipWithSpaces.toUri().toURL().toString(), env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZipWithSpaces.toUri().toURL().toString(), env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testMalformedUrlNotMaven() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); // has two colons, so it appears similar to maven coordinates - MalformedURLException e = expectThrows(MalformedURLException.class, () -> { - installPlugin("://host:1234", env); - }); + MalformedURLException e = expectThrows(MalformedURLException.class, () -> installPlugin("://host:1234", env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("no protocol")); } public void testPluginsDirMissing() throws Exception { - Environment env = createEnv(fs, temp); - Files.delete(env.pluginsFile()); + Tuple env = createEnv(fs, temp); + Files.delete(env.v2().pluginsFile()); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testPluginsDirReadOnly() throws Exception { assumeTrue("posix and filesystem", isPosix && isReal); - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); - try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.pluginsFile())) { + try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) { pluginsAttrs.setPermissions(new HashSet<>()); String pluginZip = createPlugin("fake", pluginDir); - IOException e = expectThrows(IOException.class, () -> { - installPlugin(pluginZip, env); - }); - assertTrue(e.getMessage(), e.getMessage().contains(env.pluginsFile().toString())); + IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); + assertTrue(e.getMessage(), e.getMessage().contains(env.v2().pluginsFile().toString())); } - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBuiltinModule() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("lang-groovy", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("is a system module")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testJarHell() throws Exception { // jar hell test needs a real filesystem assumeTrue("real filesystem", isReal); - Environment environment = createEnv(fs, temp); + Tuple environment = createEnv(fs, temp); Path pluginDirectory = createPluginDir(temp); writeJar(pluginDirectory.resolve("other.jar"), "FakePlugin"); String pluginZip = createPlugin("fake", pluginDirectory); // adds plugin.jar with FakePlugin - IllegalStateException e = expectThrows(IllegalStateException.class, () -> { - installPlugin(pluginZip, environment, true); - }); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, environment.v1(), true)); assertTrue(e.getMessage(), e.getMessage().contains("jar hell")); - assertInstallCleaned(environment); + assertInstallCleaned(environment.v2()); } public void testIsolatedPlugins() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); // these both share the same FakePlugin class Path pluginDir1 = createPluginDir(temp); String pluginZip1 = createPlugin("fake1", pluginDir1); - installPlugin(pluginZip1, env); + installPlugin(pluginZip1, env.v1()); Path pluginDir2 = createPluginDir(temp); String pluginZip2 = createPlugin("fake2", pluginDir2); - installPlugin(pluginZip2, env); - assertPlugin("fake1", pluginDir1, env); - assertPlugin("fake2", pluginDir2, env); + installPlugin(pluginZip2, env.v1()); + assertPlugin("fake1", pluginDir1, env.v2()); + assertPlugin("fake2", pluginDir2, env.v2()); } public void testExistingPlugin() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + installPlugin(pluginZip, env.v1()); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("already exists")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBin() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testBinNotDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createFile(binDir); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBinContainsDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path dirInBinDir = pluginDir.resolve("bin").resolve("foo"); Files.createDirectories(dirInBinDir); Files.createFile(dirInBinDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBinConflict() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("elasticsearch", pluginDir); - FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> { - installPlugin(pluginZip, env); - }); - assertTrue(e.getMessage(), e.getMessage().contains(env.binFile().resolve("elasticsearch").toString())); - assertInstallCleaned(env); + FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1())); + assertTrue(e.getMessage(), e.getMessage().contains(env.v2().binFile().resolve("elasticsearch").toString())); + assertInstallCleaned(env.v2()); } public void testBinPermissions() throws Exception { assumeTrue("posix filesystem", isPosix); - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.binFile())) { + try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) { Set perms = binAttrs.getCopyPermissions(); // make sure at least one execute perm is missing, so we know we forced it during installation perms.remove(PosixFilePermission.GROUP_EXECUTE); binAttrs.setPermissions(perms); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } } public void testConfig() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createDirectory(configDir); Files.createFile(configDir.resolve("custom.yaml")); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testExistingConfig() throws Exception { - Environment env = createEnv(fs, temp); - Path envConfigDir = env.configFile().resolve("fake"); + Tuple env = createEnv(fs, temp); + Path envConfigDir = env.v2().configFile().resolve("fake"); Files.createDirectories(envConfigDir); Files.write(envConfigDir.resolve("custom.yaml"), "existing config".getBytes(StandardCharsets.UTF_8)); Path pluginDir = createPluginDir(temp); @@ -469,8 +458,8 @@ public class InstallPluginCommandTests extends ESTestCase { Files.write(configDir.resolve("custom.yaml"), "new config".getBytes(StandardCharsets.UTF_8)); Files.createFile(configDir.resolve("other.yaml")); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); List configLines = Files.readAllLines(envConfigDir.resolve("custom.yaml"), StandardCharsets.UTF_8); assertEquals(1, configLines.size()); assertEquals("existing config", configLines.get(0)); @@ -478,80 +467,68 @@ public class InstallPluginCommandTests extends ESTestCase { } public void testConfigNotDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createFile(configDir); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testConfigContainsDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path dirInConfigDir = pluginDir.resolve("config").resolve("foo"); Files.createDirectories(dirInConfigDir); Files.createFile(dirInConfigDir.resolve("myconfig.yml")); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in config dir for plugin")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testConfigConflict() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createDirectory(configDir); Files.createFile(configDir.resolve("myconfig.yml")); String pluginZip = createPlugin("elasticsearch.yml", pluginDir); - FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> { - installPlugin(pluginZip, env); - }); - assertTrue(e.getMessage(), e.getMessage().contains(env.configFile().resolve("elasticsearch.yml").toString())); - assertInstallCleaned(env); + FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1())); + assertTrue(e.getMessage(), e.getMessage().contains(env.v2().configFile().resolve("elasticsearch.yml").toString())); + assertInstallCleaned(env.v2()); } public void testMissingDescriptor() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve("fake.yml")); String pluginZip = writeZip(pluginDir, "elasticsearch"); - NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> { - installPlugin(pluginZip, env); - }); + NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testMissingDirectory() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES)); String pluginZip = writeZip(pluginDir, null); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("`elasticsearch` directory is missing in the plugin zip")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testZipRelativeOutsideEntryName() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path zip = createTempDir().resolve("broken.zip"); try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) { stream.putNextEntry(new ZipEntry("elasticsearch/../blah")); } String pluginZip = zip.toUri().toURL().toString(); - IOException e = expectThrows(IOException.class, () -> { - installPlugin(pluginZip, env); - }); + IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("resolving outside of plugin directory")); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java index f26857e19af..1422280165c 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java @@ -25,35 +25,47 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import java.util.stream.Collectors; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.common.inject.spi.HasDependencies; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.Version; +import org.junit.Before; @LuceneTestCase.SuppressFileSystems("*") public class ListPluginsCommandTests extends ESTestCase { - Environment createEnv() throws IOException { - Path home = createTempDir(); + private Path home; + private Environment env; + + @Before + public void setUp() throws Exception { + super.setUp(); + home = createTempDir(); Files.createDirectories(home.resolve("plugins")); Settings settings = Settings.builder() - .put("path.home", home) - .build(); - return new Environment(settings); + .put("path.home", home) + .build(); + env = new Environment(settings); } - static MockTerminal listPlugins(Environment env) throws Exception { - return listPlugins(env, new String[0]); + static MockTerminal listPlugins(Path home) throws Exception { + return listPlugins(home, new String[0]); } - static MockTerminal listPlugins(Environment env, String[] args) throws Exception { + static MockTerminal listPlugins(Path home, String[] args) throws Exception { + String[] argsAndHome = new String[args.length + 1]; + System.arraycopy(args, 0, argsAndHome, 0, args.length); + argsAndHome[args.length] = "-Epath.home=" + home; MockTerminal terminal = new MockTerminal(); - int status = new ListPluginsCommand(env).main(args, terminal); + int status = new ListPluginsCommand().main(argsAndHome, terminal); assertEquals(ExitCodes.OK, status); return terminal; } @@ -74,49 +86,42 @@ public class ListPluginsCommandTests extends ESTestCase { public void testPluginsDirMissing() throws Exception { - Environment env = createEnv(); Files.delete(env.pluginsFile()); - IOException e = expectThrows(IOException.class, () -> { - listPlugins(env); - }); + IOException e = expectThrows(IOException.class, () -> listPlugins(home)); assertEquals(e.getMessage(), "Plugins directory missing: " + env.pluginsFile()); } public void testNoPlugins() throws Exception { - MockTerminal terminal = listPlugins(createEnv()); + MockTerminal terminal = listPlugins(home); assertTrue(terminal.getOutput(), terminal.getOutput().isEmpty()); } public void testOnePlugin() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc", "fake", "org.fake"); - MockTerminal terminal = listPlugins(env); + MockTerminal terminal = listPlugins(home); assertEquals(terminal.getOutput(), buildMultiline("fake")); } public void testTwoPlugins() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc", "fake1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake2", "org.fake"); - MockTerminal terminal = listPlugins(env); + MockTerminal terminal = listPlugins(home); assertEquals(terminal.getOutput(), buildMultiline("fake1", "fake2")); } public void testPluginWithVerbose() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc", "fake_plugin", "org.fake"); String[] params = { "-v" }; - MockTerminal terminal = listPlugins(env, params); + MockTerminal terminal = listPlugins(home, params); assertEquals(terminal.getOutput(), buildMultiline("Plugins directory: " + env.pluginsFile(), "fake_plugin", "- Plugin information:", "Name: fake_plugin", "Description: fake desc", "Version: 1.0", " * Classname: org.fake")); } public void testPluginWithVerboseMultiplePlugins() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); String[] params = { "-v" }; - MockTerminal terminal = listPlugins(env, params); + MockTerminal terminal = listPlugins(home, params); assertEquals(terminal.getOutput(), buildMultiline("Plugins directory: " + env.pluginsFile(), "fake_plugin1", "- Plugin information:", "Name: fake_plugin1", "Description: fake desc 1", "Version: 1.0", " * Classname: org.fake", "fake_plugin2", "- Plugin information:", "Name: fake_plugin2", @@ -124,26 +129,23 @@ public class ListPluginsCommandTests extends ESTestCase { } public void testPluginWithoutVerboseMultiplePlugins() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); - MockTerminal terminal = listPlugins(env, new String[0]); + MockTerminal terminal = listPlugins(home, new String[0]); String output = terminal.getOutput(); assertEquals(output, buildMultiline("fake_plugin1", "fake_plugin2")); } public void testPluginWithoutDescriptorFile() throws Exception{ - Environment env = createEnv(); Files.createDirectories(env.pluginsFile().resolve("fake1")); - NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listPlugins(env)); + NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listPlugins(home)); assertEquals(e.getFile(), env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString()); } public void testPluginWithWrongDescriptorFile() throws Exception{ - Environment env = createEnv(); PluginTestUtil.writeProperties(env.pluginsFile().resolve("fake1"), "description", "fake desc"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listPlugins(env)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listPlugins(home)); assertEquals(e.getMessage(), "Property [name] is missing in [" + env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString() + "]"); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index d9d5661b834..6528bbc0911 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -23,6 +23,8 @@ import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cli.UserError; @@ -30,25 +32,32 @@ import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; @LuceneTestCase.SuppressFileSystems("*") public class RemovePluginCommandTests extends ESTestCase { - /** Creates a test environment with bin, config and plugins directories. */ - static Environment createEnv() throws IOException { - Path home = createTempDir(); + private Path home; + private Environment env; + + @Before + public void setUp() throws Exception { + super.setUp(); + home = createTempDir(); Files.createDirectories(home.resolve("bin")); Files.createFile(home.resolve("bin").resolve("elasticsearch")); Files.createDirectories(home.resolve("plugins")); Settings settings = Settings.builder() - .put("path.home", home) - .build(); - return new Environment(settings); + .put("path.home", home) + .build(); + env = new Environment(settings); } - static MockTerminal removePlugin(String name, Environment env) throws Exception { + static MockTerminal removePlugin(String name, Path home) throws Exception { + Map settings = new HashMap<>(); + settings.put("path.home", home.toString()); MockTerminal terminal = new MockTerminal(); - new RemovePluginCommand(env).execute(terminal, name); + new RemovePluginCommand().execute(terminal, name, settings); return terminal; } @@ -63,33 +72,28 @@ public class RemovePluginCommandTests extends ESTestCase { } public void testMissing() throws Exception { - Environment env = createEnv(); - UserError e = expectThrows(UserError.class, () -> { - removePlugin("dne", env); - }); + UserError e = expectThrows(UserError.class, () -> removePlugin("dne", home)); assertTrue(e.getMessage(), e.getMessage().contains("Plugin dne not found")); assertRemoveCleaned(env); } public void testBasic() throws Exception { - Environment env = createEnv(); Files.createDirectory(env.pluginsFile().resolve("fake")); Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir")); Files.createDirectory(env.pluginsFile().resolve("other")); - removePlugin("fake", env); + removePlugin("fake", home); assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); assertTrue(Files.exists(env.pluginsFile().resolve("other"))); assertRemoveCleaned(env); } public void testBin() throws Exception { - Environment env = createEnv(); Files.createDirectories(env.pluginsFile().resolve("fake")); Path binDir = env.binFile().resolve("fake"); Files.createDirectories(binDir); Files.createFile(binDir.resolve("somescript")); - removePlugin("fake", env); + removePlugin("fake", home); assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); assertFalse(Files.exists(binDir)); @@ -97,14 +101,12 @@ public class RemovePluginCommandTests extends ESTestCase { } public void testBinNotDir() throws Exception { - Environment env = createEnv(); Files.createDirectories(env.pluginsFile().resolve("elasticsearch")); - UserError e = expectThrows(UserError.class, () -> { - removePlugin("elasticsearch", env); - }); + UserError e = expectThrows(UserError.class, () -> removePlugin("elasticsearch", home)); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertTrue(Files.exists(env.pluginsFile().resolve("elasticsearch"))); // did not remove assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); assertRemoveCleaned(env); } + } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 63c09890acc..f9cdf5b4f66 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -84,29 +84,10 @@ public class TribeUnitTests extends ESTestCase { tribe2 = null; } - public void testThatTribeClientsIgnoreGlobalSysProps() throws Exception { - System.setProperty("es.cluster.name", "tribe_node_cluster"); - System.setProperty("es.tribe.t1.cluster.name", "tribe1"); - System.setProperty("es.tribe.t2.cluster.name", "tribe2"); - System.setProperty("es.tribe.t1.node_id.seed", Long.toString(random().nextLong())); - System.setProperty("es.tribe.t2.node_id.seed", Long.toString(random().nextLong())); - - try { - assertTribeNodeSuccessfullyCreated(Settings.EMPTY); - } finally { - System.clearProperty("es.cluster.name"); - System.clearProperty("es.tribe.t1.cluster.name"); - System.clearProperty("es.tribe.t2.cluster.name"); - System.clearProperty("es.tribe.t1.node_id.seed"); - System.clearProperty("es.tribe.t2.node_id.seed"); - } - } - public void testThatTribeClientsIgnoreGlobalConfig() throws Exception { Path pathConf = getDataPath("elasticsearch.yml").getParent(); Settings settings = Settings .builder() - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(Environment.PATH_CONF_SETTING.getKey(), pathConf) .build(); assertTribeNodeSuccessfullyCreated(settings); diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 9976f072d42..6297ce244f9 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -75,7 +75,6 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { private static Client startClient(Path tempDir, TransportAddress... transportAddresses) { Settings clientSettings = Settings.builder() .put("node.name", "qa_smoke_client_" + counter.getAndIncrement()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index 07fea76bd8b..362b5d60341 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -103,7 +103,7 @@ fi echo "CONF_FILE=$CONF_FILE" >> /etc/sysconfig/elasticsearch; fi - run_elasticsearch_service 1 -Ees.default.config="$CONF_FILE" + run_elasticsearch_service 1 -Edefault.config="$CONF_FILE" # remove settings again otherwise cleaning up before next testrun will fail if is_dpkg ; then diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index 5f50dfc2850..c4dc8c96f58 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -340,7 +340,7 @@ run_elasticsearch_service() { local CONF_DIR="" local ES_PATH_CONF="" else - local ES_PATH_CONF="-Ees.path.conf=$CONF_DIR" + local ES_PATH_CONF="-Epath.conf=$CONF_DIR" fi # we must capture the exit code to compare so we don't want to start as background process in case we expect something other than 0 local background="" diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java new file mode 100644 index 00000000000..aa327ae2546 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static org.hamcrest.CoreMatchers.equalTo; + +abstract class ESElasticsearchCliTestCase extends ESTestCase { + + interface InitConsumer { + void accept(final boolean foreground, final String pidFile, final Map esSettings); + } + + void runTest( + final int expectedStatus, + final boolean expectedInit, + final Consumer outputConsumer, + final InitConsumer initConsumer, + String... args) throws Exception { + final MockTerminal terminal = new MockTerminal(); + try { + final AtomicBoolean init = new AtomicBoolean(); + final int status = Elasticsearch.main(args, new Elasticsearch() { + @Override + void init(final boolean daemonize, final String pidFile, final Map esSettings) { + init.set(true); + initConsumer.accept(!daemonize, pidFile, esSettings); + } + }, terminal); + assertThat(status, equalTo(expectedStatus)); + assertThat(init.get(), equalTo(expectedInit)); + outputConsumer.accept(terminal.getOutput()); + } catch (Throwable t) { + // if an unexpected exception is thrown, we log + // terminal output to aid debugging + logger.info(terminal.getOutput()); + // rethrow so the test fails + throw t; + } + } + +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 1e75f3d8261..7875f8fd20b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -185,7 +185,6 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { .put("http.enabled", false) .put(Node.NODE_LOCAL_SETTING.getKey(), true) .put(Node.NODE_DATA_SETTING.getKey(), true) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :) .put(nodeSettings()) // allow test cases to provide their own settings or override these .build(); Node build = new MockNode(settings, getVersion(), getPlugins()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 4625aa77e25..efdd6bad90f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -51,7 +51,6 @@ import java.util.concurrent.TimeUnit; final class ExternalNode implements Closeable { public static final Settings REQUIRED_SETTINGS = Settings.builder() - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we need network mode for this diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index 5372c319dae..71fe622d8c0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -73,7 +73,6 @@ public final class ExternalTestCluster extends TestCluster { Settings clientSettings = Settings.builder() .put(additionalSettings) .put("node.name", InternalTestCluster.TRANSPORT_CLIENT_PREFIX + EXTERNAL_CLUSTER_PREFIX + counter.getAndIncrement()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 3199a27b9a5..995ca5480f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -291,11 +291,10 @@ public final class InternalTestCluster extends TestCluster { builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); builder.put(TransportSettings.PORT.getKey(), TRANSPORT_BASE_PORT + "-" + (TRANSPORT_BASE_PORT + PORTS_PER_CLUSTER)); builder.put("http.port", HTTP_BASE_PORT + "-" + (HTTP_BASE_PORT + PORTS_PER_CLUSTER)); - builder.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true); builder.put(Node.NODE_MODE_SETTING.getKey(), nodeMode); builder.put("http.pipelining", enableHttpPipelining); - if (Strings.hasLength(System.getProperty("es.logger.level"))) { - builder.put("logger.level", System.getProperty("es.logger.level")); + if (Strings.hasLength(System.getProperty("tests.logger.level"))) { + builder.put("logger.level", System.getProperty("tests.logger.level")); } if (Strings.hasLength(System.getProperty("es.logger.prefix"))) { builder.put("logger.prefix", System.getProperty("es.logger.prefix")); @@ -319,14 +318,14 @@ public final class InternalTestCluster extends TestCluster { public static String configuredNodeMode() { Builder builder = Settings.builder(); - if (Strings.isEmpty(System.getProperty("es.node.mode")) && Strings.isEmpty(System.getProperty("es.node.local"))) { + if (Strings.isEmpty(System.getProperty("node.mode")) && Strings.isEmpty(System.getProperty("node.local"))) { return "local"; // default if nothing is specified } - if (Strings.hasLength(System.getProperty("es.node.mode"))) { - builder.put(Node.NODE_MODE_SETTING.getKey(), System.getProperty("es.node.mode")); + if (Strings.hasLength(System.getProperty("node.mode"))) { + builder.put(Node.NODE_MODE_SETTING.getKey(), System.getProperty("node.mode")); } - if (Strings.hasLength(System.getProperty("es.node.local"))) { - builder.put(Node.NODE_LOCAL_SETTING.getKey(), System.getProperty("es.node.local")); + if (Strings.hasLength(System.getProperty("node.local"))) { + builder.put(Node.NODE_LOCAL_SETTING.getKey(), System.getProperty("node.local")); } if (DiscoveryNode.isLocalNode(builder.build())) { return "local"; @@ -882,7 +881,6 @@ public final class InternalTestCluster extends TestCluster { .put(Node.NODE_MODE_SETTING.getKey(), Node.NODE_MODE_SETTING.exists(nodeSettings) ? Node.NODE_MODE_SETTING.get(nodeSettings) : nodeMode) .put("logger.prefix", nodeSettings.get("logger.prefix", "")) .put("logger.level", nodeSettings.get("logger.level", "INFO")) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(settings); if (Node.NODE_LOCAL_SETTING.exists(nodeSettings)) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 6142edb9394..75bc916ab94 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -137,10 +137,10 @@ public class ReproduceInfoPrinter extends RunListener { } public ReproduceErrorMessageBuilder appendESProperties() { - appendProperties("es.logger.level"); + appendProperties("tests.logger.level"); if (inVerifyPhase()) { // these properties only make sense for integration tests - appendProperties("es.node.mode", "es.node.local", TESTS_CLUSTER, + appendProperties("node.mode", "node.local", TESTS_CLUSTER, ESIntegTestCase.TESTS_ENABLE_MOCK_MODULES); } appendProperties("tests.assertion.disabled", "tests.security.manager", "tests.nightly", "tests.jvms", diff --git a/test/framework/src/main/resources/log4j.properties b/test/framework/src/main/resources/log4j.properties index 22f54ef68e5..11a864df0f3 100644 --- a/test/framework/src/main/resources/log4j.properties +++ b/test/framework/src/main/resources/log4j.properties @@ -1,5 +1,5 @@ -es.logger.level=INFO -log4j.rootLogger=${es.logger.level}, out +tests.logger.level=INFO +log4j.rootLogger=${tests.logger.level}, out log4j.logger.org.apache.http=INFO, out log4j.additivity.org.apache.http=false From 223cb6a7f0a3403aa60b48886a1bda980bc28fed Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 19 May 2016 18:03:36 -0400 Subject: [PATCH 36/36] [reindex] Mark test awaits fix because it is unstable Fix coming. --- .../test/java/org/elasticsearch/index/reindex/RetryTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index 0b557898552..ff821724316 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.reindex; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -43,6 +44,7 @@ import static org.hamcrest.Matchers.greaterThan; * Integration test for retry behavior. Useful because retrying relies on the way that the rest of Elasticsearch throws exceptions and unit * tests won't verify that. */ +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/18456") public class RetryTests extends ReindexTestCase { /** * The number of concurrent requests to test.