From a01e26a39baf0908cd30351c7f17e6a19d00276b Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 13 Jul 2018 13:13:21 +0100 Subject: [PATCH 001/260] Correct spelling of AnalysisPlugin#requriesAnalysisSettings (#32025) Because this is a static method on a public API, and one that we encourage plugin authors to use, the method with the typo is deprecated in 6.x rather than just renamed. --- .../migration/migrate_7_0/plugins.asciidoc | 5 ++++ .../analysis/common/CommonAnalysisPlugin.java | 26 +++++++++---------- .../indices/analysis/AnalysisModule.java | 4 +-- .../elasticsearch/plugins/AnalysisPlugin.java | 20 ++++++++++---- 4 files changed, 35 insertions(+), 20 deletions(-) diff --git a/docs/reference/migration/migrate_7_0/plugins.asciidoc b/docs/reference/migration/migrate_7_0/plugins.asciidoc index 829a93573c9..f8434993078 100644 --- a/docs/reference/migration/migrate_7_0/plugins.asciidoc +++ b/docs/reference/migration/migrate_7_0/plugins.asciidoc @@ -18,3 +18,8 @@ See {plugins}/repository-azure-repository-settings.html#repository-azure-reposit must now be specified in the client settings instead. See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Storage Client Settings]. + +==== Analysis Plugin changes + +* The misspelled helper method `requriesAnalysisSettings(AnalyzerProvider provider)` has been +renamed to `requiresAnalysisSettings` \ No newline at end of file diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index ca2f74b5efe..d95af920a30 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -135,7 +135,7 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; -import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings; +import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings; public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin { @@ -201,11 +201,11 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin { filters.put("cjk_width", CJKWidthFilterFactory::new); filters.put("classic", ClassicFilterFactory::new); filters.put("czech_stem", CzechStemTokenFilterFactory::new); - filters.put("common_grams", requriesAnalysisSettings(CommonGramsTokenFilterFactory::new)); + filters.put("common_grams", requiresAnalysisSettings(CommonGramsTokenFilterFactory::new)); filters.put("decimal_digit", DecimalDigitFilterFactory::new); filters.put("delimited_payload_filter", LegacyDelimitedPayloadTokenFilterFactory::new); filters.put("delimited_payload", DelimitedPayloadTokenFilterFactory::new); - filters.put("dictionary_decompounder", requriesAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new)); + filters.put("dictionary_decompounder", requiresAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new)); filters.put("dutch_stem", DutchStemTokenFilterFactory::new); filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new); filters.put("edgeNGram", EdgeNGramTokenFilterFactory::new); @@ -216,11 +216,11 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin { filters.put("german_normalization", GermanNormalizationFilterFactory::new); filters.put("german_stem", GermanStemTokenFilterFactory::new); filters.put("hindi_normalization", HindiNormalizationFilterFactory::new); - filters.put("hyphenation_decompounder", requriesAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new)); + filters.put("hyphenation_decompounder", requiresAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new)); filters.put("indic_normalization", IndicNormalizationFilterFactory::new); - filters.put("keep", requriesAnalysisSettings(KeepWordFilterFactory::new)); - filters.put("keep_types", requriesAnalysisSettings(KeepTypesFilterFactory::new)); - filters.put("keyword_marker", requriesAnalysisSettings(KeywordMarkerTokenFilterFactory::new)); + filters.put("keep", requiresAnalysisSettings(KeepWordFilterFactory::new)); + filters.put("keep_types", requiresAnalysisSettings(KeepTypesFilterFactory::new)); + filters.put("keyword_marker", requiresAnalysisSettings(KeywordMarkerTokenFilterFactory::new)); filters.put("kstem", KStemTokenFilterFactory::new); filters.put("length", LengthTokenFilterFactory::new); filters.put("limit", LimitTokenCountFilterFactory::new); @@ -229,8 +229,8 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin { filters.put("multiplexer", MultiplexerTokenFilterFactory::new); filters.put("ngram", NGramTokenFilterFactory::new); filters.put("nGram", NGramTokenFilterFactory::new); - filters.put("pattern_capture", requriesAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new)); - filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new)); + filters.put("pattern_capture", requiresAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new)); + filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceTokenFilterFactory::new)); filters.put("persian_normalization", PersianNormalizationFilterFactory::new); filters.put("porter_stem", PorterStemTokenFilterFactory::new); filters.put("remove_duplicates", RemoveDuplicatesTokenFilterFactory::new); @@ -241,10 +241,10 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin { filters.put("serbian_normalization", SerbianNormalizationFilterFactory::new); filters.put("snowball", SnowballTokenFilterFactory::new); filters.put("sorani_normalization", SoraniNormalizationFilterFactory::new); - filters.put("stemmer_override", requriesAnalysisSettings(StemmerOverrideTokenFilterFactory::new)); + filters.put("stemmer_override", requiresAnalysisSettings(StemmerOverrideTokenFilterFactory::new)); filters.put("stemmer", StemmerTokenFilterFactory::new); filters.put("trim", TrimTokenFilterFactory::new); - filters.put("truncate", requriesAnalysisSettings(TruncateTokenFilterFactory::new)); + filters.put("truncate", requiresAnalysisSettings(TruncateTokenFilterFactory::new)); filters.put("unique", UniqueTokenFilterFactory::new); filters.put("uppercase", UpperCaseTokenFilterFactory::new); filters.put("word_delimiter_graph", WordDelimiterGraphTokenFilterFactory::new); @@ -256,8 +256,8 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin { public Map> getCharFilters() { Map> filters = new TreeMap<>(); filters.put("html_strip", HtmlStripCharFilterFactory::new); - filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceCharFilterFactory::new)); - filters.put("mapping", requriesAnalysisSettings(MappingCharFilterFactory::new)); + filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceCharFilterFactory::new)); + filters.put("mapping", requiresAnalysisSettings(MappingCharFilterFactory::new)); return filters; } diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 364732dc183..1ecdc797073 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -54,7 +54,7 @@ import java.util.Locale; import java.util.Map; import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings; +import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings; /** * Sets up {@link AnalysisRegistry}. @@ -118,7 +118,7 @@ public final class AnalysisModule { tokenFilters.register("stop", StopTokenFilterFactory::new); tokenFilters.register("standard", StandardTokenFilterFactory::new); tokenFilters.register("shingle", ShingleTokenFilterFactory::new); - tokenFilters.register("hunspell", requriesAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory + tokenFilters.register("hunspell", requiresAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory (indexSettings, name, settings, hunspellService))); tokenFilters.extractAndRegister(plugins, AnalysisPlugin::getTokenFilters); diff --git a/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java b/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java index e740fddc6ec..27b5667d3bb 100644 --- a/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java @@ -57,12 +57,12 @@ import static java.util.Collections.emptyMap; * } * * Elasticsearch doesn't have any automatic mechanism to share these components between indexes. If any component is heavy enough to warrant - * such sharing then it is the Pugin's responsibility to do it in their {@link AnalysisProvider} implementation. We recommend against doing + * such sharing then it is the Plugin's responsibility to do it in their {@link AnalysisProvider} implementation. We recommend against doing * this unless absolutely necessary because it can be difficult to get the caching right given things like behavior changes across versions. */ public interface AnalysisPlugin { /** - * Override to add additional {@link CharFilter}s. See {@link #requriesAnalysisSettings(AnalysisProvider)} + * Override to add additional {@link CharFilter}s. See {@link #requiresAnalysisSettings(AnalysisProvider)} * how to on get the configuration from the index. */ default Map> getCharFilters() { @@ -70,7 +70,7 @@ public interface AnalysisPlugin { } /** - * Override to add additional {@link TokenFilter}s. See {@link #requriesAnalysisSettings(AnalysisProvider)} + * Override to add additional {@link TokenFilter}s. See {@link #requiresAnalysisSettings(AnalysisProvider)} * how to on get the configuration from the index. */ default Map> getTokenFilters() { @@ -78,7 +78,7 @@ public interface AnalysisPlugin { } /** - * Override to add additional {@link Tokenizer}s. See {@link #requriesAnalysisSettings(AnalysisProvider)} + * Override to add additional {@link Tokenizer}s. See {@link #requiresAnalysisSettings(AnalysisProvider)} * how to on get the configuration from the index. */ default Map> getTokenizers() { @@ -86,7 +86,7 @@ public interface AnalysisPlugin { } /** - * Override to add additional {@link Analyzer}s. See {@link #requriesAnalysisSettings(AnalysisProvider)} + * Override to add additional {@link Analyzer}s. See {@link #requiresAnalysisSettings(AnalysisProvider)} * how to on get the configuration from the index. */ default Map>> getAnalyzers() { @@ -130,8 +130,18 @@ public interface AnalysisPlugin { /** * Mark an {@link AnalysisProvider} as requiring the index's settings. + * + * @deprecated use {@link #requiresAnalysisSettings(AnalysisProvider)} */ + @Deprecated static AnalysisProvider requriesAnalysisSettings(AnalysisProvider provider) { + return requiresAnalysisSettings(provider); + } + + /** + * Mark an {@link AnalysisProvider} as requiring the index's settings. + */ + static AnalysisProvider requiresAnalysisSettings(AnalysisProvider provider) { return new AnalysisProvider() { @Override public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { From 92a9bb2b965fb401543aedf71ea7707da55e1a15 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 13 Jul 2018 14:34:02 +0200 Subject: [PATCH 002/260] Re-instate link in StringFunctionUtils javadocs The previous errors in compileJava were not cause by the brackets but my the content of the @link section. Corrected this so its a working javadoc link again. --- .../function/scalar/string/StringFunctionUtils.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java index 75db52ed099..cef826d37ed 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.string; abstract class StringFunctionUtils { /** - * Trims the trailing whitespace characters from the given String. Uses @link java.lang.Character.isWhitespace(char) + * Trims the trailing whitespace characters from the given String. Uses {@link Character#isWhitespace(char)} * to determine if a character is whitespace or not. * * @param s the original String @@ -27,7 +27,7 @@ abstract class StringFunctionUtils { } /** - * Trims the leading whitespace characters from the given String. Uses @link java.lang.Character.isWhitespace(char) + * Trims the leading whitespace characters from the given String. Uses {@link Character#isWhitespace(char)} * to determine if a character is whitespace or not. * * @param s the original String From 9a928756e979e6f2f1469aa00b89c2c050788d68 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 13 Jul 2018 15:40:38 +0200 Subject: [PATCH 003/260] Docs: Change formatting of Cloud options --- docs/reference/setup/install.asciidoc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index 7675e5ad146..4433ffb8c38 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -1,11 +1,16 @@ [[install-elasticsearch]] == Installing Elasticsearch +[float] +=== Hosted Elasticsearch Elasticsearch can be run on your own hardware or using our hosted Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is available on AWS and GCP. You can https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free. +[float] +=== Installing Elasticsearch Yourself + Elasticsearch is provided in the following package formats: [horizontal] From bc1284eb28a68cc2b5d18a3181ac72ff12804348 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 13 Jul 2018 15:48:14 +0200 Subject: [PATCH 004/260] Docs: Restyled cloud link in getting started --- docs/reference/getting-started.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index a29a743fed8..ff00c310a43 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -104,10 +104,13 @@ With that out of the way, let's get started with the fun part... == Installation +[TIP] +============== You can skip installation completely by using our hosted Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is available on AWS and GCP. You can https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free. +============== Elasticsearch requires at least Java 8. Specifically as of this writing, it is recommended that you use the Oracle JDK version {jdk}. Java installation varies from platform to platform so we won't go into those details here. Oracle's recommended installation documentation can be found on http://docs.oracle.com/javase/8/docs/technotes/guides/install/install_overview.html[Oracle's website]. Suffice to say, before you install Elasticsearch, please check your Java version first by running (and then install/upgrade accordingly if needed): From b7f07f03edb9c5be2665d65eafc8fe9ad95d0fa1 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Fri, 13 Jul 2018 10:07:42 -0400 Subject: [PATCH 005/260] [Rollup] Use composite's missing_bucket (#31402) We can leverage the composite agg's new `missing_bucket` feature on terms groupings. This means the aggregation criteria used in the indexer will now return null buckets for missing keys. Because all buckets are now returned (even if a key is null), we can guarantee correct doc counts with "combined" jobs (where a job rolls up multiple schemas). This was previously impossible since composite would ignore documents that didn't have _all_ the keys, meaning non-overlapping schemas would cause composite to return no buckets. Note: date_histo does not use `missing_bucket`, since a timestamp is always required. The docs have been adjusted to recommend a single, combined job. It also makes reference to the previous issue to help users that are upgrading (rather than just deleting the sections). --- .../en/rollup/understanding-groups.asciidoc | 237 +----------------- .../core/rollup/job/DateHistoGroupConfig.java | 1 - .../core/rollup/job/HistoGroupConfig.java | 1 + .../core/rollup/job/TermsGroupConfig.java | 1 + .../rollup/RollupRestTestStateCleaner.java | 24 +- .../rollup/RollupResponseTranslator.java | 1 + .../action/TransportPutRollupJobAction.java | 8 + .../xpack/rollup/job/IndexerUtils.java | 15 +- .../xpack/rollup/job/RollupIndexer.java | 1 + .../RollupResponseTranslationTests.java | 48 ++++ .../action/PutJobStateMachineTests.java | 55 +++- .../xpack/rollup/job/IndexerUtilsTests.java | 106 +++++++- .../xpack/test/rest/XPackRestIT.java | 2 +- .../xpack/restart/FullClusterRestartIT.java | 61 ++++- 14 files changed, 298 insertions(+), 263 deletions(-) diff --git a/x-pack/docs/en/rollup/understanding-groups.asciidoc b/x-pack/docs/en/rollup/understanding-groups.asciidoc index f57f905ae04..803555b2d73 100644 --- a/x-pack/docs/en/rollup/understanding-groups.asciidoc +++ b/x-pack/docs/en/rollup/understanding-groups.asciidoc @@ -121,16 +121,15 @@ if a field is useful for aggregating later, and how you might wish to use it (te === Grouping Limitations with heterogeneous indices -There is a known limitation to Rollup groups, due to some internal implementation details at this time. The Rollup feature leverages -the `composite` aggregation from Elasticsearch. At the moment, the composite agg only returns buckets when all keys in the tuple are non-null. -Put another way, if the you request keys `[A,B,C]` in the composite aggregation, the only documents that are aggregated are those that have -_all_ of the keys `A, B` and `C`. +There was previously a limitation in how Rollup could handle indices that had heterogeneous mappings (multiple, unrelated/non-overlapping +mappings). The recommendation at the time was to configure a separate job per data "type". For example, you might configure a separate +job for each Beats module that you had enabled (one for `process`, another for `filesystem`, etc). -Because Rollup uses the composite agg during the indexing process, it inherits this behavior. Practically speaking, if all of the documents -in your index are homogeneous (they have the same mapping), you can ignore this limitation and stop reading now. +This recommendation was driven by internal implementation details that caused document counts to be potentially incorrect if a single "merged" +job was used. -However, if you have a heterogeneous collection of documents that you wish to roll up, you may need to configure two or more jobs to -accurately cover the original data. +This limitation has since been alleviated. As of 6.4.0, it is now considered best practice to combine all rollup configurations +into a single job. As an example, if your index has two types of documents: @@ -157,7 +156,7 @@ and -------------------------------------------------- // NOTCONSOLE -it may be tempting to create a single, combined rollup job which covers both of these document types, something like this: +the best practice is to combine them into a single rollup job which covers both of these document types, like this: [source,js] -------------------------------------------------- @@ -191,222 +190,10 @@ PUT _xpack/rollup/job/combined -------------------------------------------------- // NOTCONSOLE -You can see that it includes a `terms` grouping on both "node" and "title", fields that are mutually exclusive in the document types. -*This will not work.* Because the `composite` aggregation (and by extension, Rollup) only returns buckets when all keys are non-null, -and there are no documents that have both a "node" field and a "title" field, this rollup job will not produce any rollups. - -Instead, you should configure two independent jobs (sharing the same index, or going to separate indices): - -[source,js] --------------------------------------------------- -PUT _xpack/rollup/job/sensor -{ - "index_pattern": "data-*", - "rollup_index": "data_rollup", - "cron": "*/30 * * * * ?", - "page_size" :1000, - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d" - }, - "terms": { - "fields": ["node"] - } - }, - "metrics": [ - { - "field": "temperature", - "metrics": ["min", "max", "sum"] - } - ] -} --------------------------------------------------- -// NOTCONSOLE - -[source,js] --------------------------------------------------- -PUT _xpack/rollup/job/purchases -{ - "index_pattern": "data-*", - "rollup_index": "data_rollup", - "cron": "*/30 * * * * ?", - "page_size" :1000, - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d" - }, - "terms": { - "fields": ["title"] - } - }, - "metrics": [ - { - "field": "price", - "metrics": ["avg"] - } - ] -} --------------------------------------------------- -// NOTCONSOLE - -Notice that each job now deals with a single "document type", and will not run into the limitations described above. We are working on changes -in core Elasticsearch to remove this limitation from the `composite` aggregation, and the documentation will be updated accordingly -when this particular scenario is fixed. - === Doc counts and overlapping jobs -There is an issue with doc counts, related to the above grouping limitation. Imagine you have two Rollup jobs saving to the same index, where -one job is a "subset" of another job. +There was previously an issue with document counts on "overlapping" job configurations, driven by the same internal implementation detail. +If there were two Rollup jobs saving to the same index, where one job is a "subset" of another job, it was possible that document counts +could be incorrect for certain aggregation arrangements. -For example, you might have jobs with these two groupings: - -[source,js] --------------------------------------------------- -PUT _xpack/rollup/job/sensor-all -{ - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d" - }, - "terms": { - "fields": ["node"] - } - }, - "metrics": [ - { - "field": "price", - "metrics": ["avg"] - } - ] - ... -} --------------------------------------------------- -// NOTCONSOLE - -and - -[source,js] --------------------------------------------------- -PUT _xpack/rollup/job/sensor-building -{ - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d" - }, - "terms": { - "fields": ["node", "building"] - } - } - ... -} --------------------------------------------------- -// NOTCONSOLE - - -The first job `sensor-all` contains the groupings and metrics that apply to all data in the index. The second job is rolling up a subset -of data (in different buildings) which also include a building identifier. You did this because combining them would run into the limitation -described in the previous section. - -This _mostly_ works, but can sometimes return incorrect `doc_counts` when you search. All metrics will be valid however. - -The issue arises from the composite agg limitation described before, combined with search-time optimization. Imagine you try to run the -following aggregation: - -[source,js] --------------------------------------------------- -"aggs" : { - "nodes": { - "terms": { - "field": "node" - } - } -} --------------------------------------------------- -// NOTCONSOLE - -This aggregation could be serviced by either `sensor-all` or `sensor-building` job, since they both group on the node field. So the RollupSearch -API will search both of them and merge results. This will result in *correct* doc_counts and *correct* metrics. No problem here. - -The issue arises from an aggregation that can _only_ be serviced by `sensor-building`, like this one: - -[source,js] --------------------------------------------------- -"aggs" : { - "nodes": { - "terms": { - "field": "node" - }, - "aggs": { - "building": { - "terms": { - "field": "building" - } - } - } - } -} --------------------------------------------------- -// NOTCONSOLE - -Now we run into a problem. The RollupSearch API will correctly identify that only `sensor-building` job has all the required components -to answer the aggregation, and will search it exclusively. Unfortunately, due to the composite aggregation limitation, that job only -rolled up documents that have both a "node" and a "building" field. Meaning that the doc_counts for the `"nodes"` aggregation will not -include counts for any document that doesn't have `[node, building]` fields. - -- The `doc_count` for `"nodes"` aggregation will be incorrect because it only contains counts for `nodes` that also have buildings -- The `doc_count` for `"buildings"` aggregation will be correct -- Any metrics, on any level, will be correct - -==== Workarounds - -There are two main workarounds if you find yourself with a schema like the above. - -Easiest and most robust method: use separate indices to store your rollups. The limitations arise because you have several document -schemas co-habitating in a single index, which makes it difficult for rollups to correctly summarize. If you make several rollup -jobs and store them in separate indices, these sorts of difficulties do not arise. It does, however, keep you from searching across several -different rollup indices at the same time. - -The other workaround is to include an "off-target" aggregation in the query, which pulls in the "superset" job and corrects the doc counts. -The RollupSearch API determines the best job to search for each "leaf node" in the aggregation tree. So if we include a metric agg on `price`, -which was only defined in the `sensor-all` job, that will "pull in" the other job: - -[source,js] --------------------------------------------------- -"aggs" : { - "nodes": { - "terms": { - "field": "node" - }, - "aggs": { - "building": { - "terms": { - "field": "building" - } - }, - "avg_price": { - "avg": { "field": "price" } <1> - } - } - } -} --------------------------------------------------- -// NOTCONSOLE -<1> Adding an avg aggregation here will fix the doc counts - -Because only `sensor-all` job had an `avg` on the price field, the RollupSearch API is forced to pull in that additional job for searching, -and will merge/correct the doc_counts as appropriate. This sort of workaround applies to any additional aggregation -- metric or bucketing -- -although it can be tedious to look through the jobs and determine the right one to add. - -==== Status - -We realize this is an onerous limitation, and somewhat breaks the rollup contract of "pick the fields to rollup, we do the rest". We are -actively working to get the limitation to `composite` agg fixed, and the related issues in Rollup. The documentation will be updated when -the fix is implemented. \ No newline at end of file +This issue has also since been eliminated in 6.4.0. \ No newline at end of file diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistoGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistoGroupConfig.java index 4b4e4cf7b7c..4a9fbde61d6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistoGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistoGroupConfig.java @@ -159,7 +159,6 @@ public class DateHistoGroupConfig implements Writeable, ToXContentFragment { vsBuilder.dateHistogramInterval(interval); vsBuilder.field(field); vsBuilder.timeZone(timeZone); - return Collections.singletonList(vsBuilder); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java index 8b8d53b4ce9..2b1511077d9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java @@ -96,6 +96,7 @@ public class HistoGroupConfig implements Writeable, ToXContentFragment { = new HistogramValuesSourceBuilder(RollupField.formatIndexerAggName(f, HistogramAggregationBuilder.NAME)); vsBuilder.interval(interval); vsBuilder.field(f); + vsBuilder.missingBucket(true); return vsBuilder; }).collect(Collectors.toList()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java index 2f1c35a73ed..da73020f008 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java @@ -80,6 +80,7 @@ public class TermsGroupConfig implements Writeable, ToXContentFragment { TermsValuesSourceBuilder vsBuilder = new TermsValuesSourceBuilder(RollupField.formatIndexerAggName(f, TermsAggregationBuilder.NAME)); vsBuilder.field(f); + vsBuilder.missingBucket(true); return vsBuilder; }).collect(Collectors.toList()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java index 9938f3a4196..ae171f138cf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.core.rollup; import org.apache.http.HttpStatus; -import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -27,21 +26,13 @@ import static org.junit.Assert.assertEquals; public class RollupRestTestStateCleaner { - private final Logger logger; - private final RestClient adminClient; - - public RollupRestTestStateCleaner(Logger logger, RestClient adminClient) { - this.logger = logger; - this.adminClient = adminClient; - } - - public void clearRollupMetadata() throws Exception { - deleteAllJobs(); - waitForPendingTasks(); + public static void clearRollupMetadata(RestClient adminClient) throws Exception { + deleteAllJobs(adminClient); + waitForPendingTasks(adminClient); // indices will be deleted by the ESRestTestCase class } - private void waitForPendingTasks() throws Exception { + private static void waitForPendingTasks(RestClient adminClient) throws Exception { ESTestCase.assertBusy(() -> { try { Response response = adminClient.performRequest("GET", "/_cat/tasks", @@ -71,7 +62,7 @@ public class RollupRestTestStateCleaner { } @SuppressWarnings("unchecked") - private void deleteAllJobs() throws Exception { + private static void deleteAllJobs(RestClient adminClient) throws Exception { Response response = adminClient.performRequest("GET", "/_xpack/rollup/job/_all"); Map jobs = ESRestTestCase.entityAsMap(response); @SuppressWarnings("unchecked") @@ -83,9 +74,7 @@ public class RollupRestTestStateCleaner { } for (Map jobConfig : jobConfigs) { - logger.debug(jobConfig); String jobId = (String) ((Map) jobConfig.get("config")).get("id"); - logger.debug("Deleting job " + jobId); try { response = adminClient.performRequest("DELETE", "/_xpack/rollup/job/" + jobId); } catch (Exception e) { @@ -95,7 +84,8 @@ public class RollupRestTestStateCleaner { } private static String responseEntityToString(Response response) throws Exception { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), + StandardCharsets.UTF_8))) { return reader.lines().collect(Collectors.joining("\n")); } } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index ba1002896c0..4042e98ef93 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -382,6 +382,7 @@ public class RollupResponseTranslator { }); } else if (rolled instanceof StringTerms) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { + BytesRef key = new BytesRef(bucket.getKeyAsString().getBytes(StandardCharsets.UTF_8)); assert bucketCount >= 0; //TODO expose getFormatter(), keyed upstream in Core diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index 081b97b4ee7..889dfa3ac8e 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -173,6 +173,14 @@ public class TransportPutRollupJobAction extends TransportMasterNodeAction rollupMeta = (Map)((Map) m).get(RollupField.ROLLUP_META); + + String stringVersion = (String)((Map) m).get(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD); + if (stringVersion == null) { + listener.onFailure(new IllegalStateException("Could not determine version of existing rollup metadata for index [" + + indexName + "]")); + return; + } + if (rollupMeta.get(job.getConfig().getId()) != null) { String msg = "Cannot create rollup job [" + job.getConfig().getId() + "] because job was previously created (existing metadata)."; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java index e180e34c4cc..efac4c2d61b 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java @@ -45,7 +45,7 @@ class IndexerUtils { * @param rollupIndex The index that holds rollups for this job * @return A list of rolled documents derived from the response */ - static List processBuckets(CompositeAggregation agg, String rollupIndex, RollupJobStats stats, + static List processBuckets(CompositeAggregation agg, String rollupIndex, RollupJobStats stats, GroupConfig groupConfig, String jobId) { logger.debug("Buckets: [" + agg.getBuckets().size() + "][" + jobId + "]"); @@ -80,6 +80,7 @@ class IndexerUtils { doc.put(k + "." + RollupField.COUNT_FIELD, count); if (k.endsWith("." + DateHistogramAggregationBuilder.NAME)) { + assert v != null; doc.put(k + "." + RollupField.TIMESTAMP, v); doc.put(k + "." + RollupField.INTERVAL, groupConfig.getDateHisto().getInterval()); doc.put(k + "." + DateHistoGroupConfig.TIME_ZONE, groupConfig.getDateHisto().getTimeZone().toString()); @@ -87,10 +88,18 @@ class IndexerUtils { } else if (k.endsWith("." + HistogramAggregationBuilder.NAME)) { doc.put(k + "." + RollupField.VALUE, v); doc.put(k + "." + RollupField.INTERVAL, groupConfig.getHisto().getInterval()); - docID.update(Numbers.doubleToBytes((Double)v), 0, 8); + if (v == null) { + // Arbitrary value to update the doc ID with for nulls + docID.update(19); + } else { + docID.update(Numbers.doubleToBytes((Double) v), 0, 8); + } } else if (k.endsWith("." + TermsAggregationBuilder.NAME)) { doc.put(k + "." + RollupField.VALUE, v); - if (v instanceof String) { + if (v == null) { + // Arbitrary value to update the doc ID with for nulls + docID.update(19); + } else if (v instanceof String) { byte[] vs = ((String) v).getBytes(StandardCharsets.UTF_8); docID.update(vs, 0, vs.length); } else if (v instanceof Long) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index a07f1e7d32e..1711c0e34eb 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -401,6 +401,7 @@ public abstract class RollupIndexer { composite.setMetaData(metadata); } composite.size(config.getPageSize()); + return composite; } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 7b03d8e8d03..98e3ad8197a 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -1082,6 +1082,54 @@ public class RollupResponseTranslationTests extends AggregatorTestCase { assertThat(unrolled.toString(), not(equalTo(responses.get(1).toString()))); } + public void testStringTermsNullValue() throws IOException { + TermsAggregationBuilder nonRollupTerms = new TermsAggregationBuilder("terms", ValueType.STRING) + .field("stringField"); + + TermsAggregationBuilder rollupTerms = new TermsAggregationBuilder("terms", ValueType.STRING) + .field("stringfield.terms." + RollupField.VALUE) + .subAggregation(new SumAggregationBuilder("terms." + COUNT_FIELD) + .field("stringfield.terms." + RollupField.COUNT_FIELD)); + + KeywordFieldMapper.Builder nrBuilder = new KeywordFieldMapper.Builder("terms"); + KeywordFieldMapper.KeywordFieldType nrFTterm = nrBuilder.fieldType(); + nrFTterm.setHasDocValues(true); + nrFTterm.setName(nonRollupTerms.field()); + + KeywordFieldMapper.Builder rBuilder = new KeywordFieldMapper.Builder("terms"); + KeywordFieldMapper.KeywordFieldType rFTterm = rBuilder.fieldType(); + rFTterm.setHasDocValues(true); + rFTterm.setName(rollupTerms.field()); + + NumberFieldMapper.Builder valueBuilder = new NumberFieldMapper.Builder("terms." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG); + MappedFieldType rFTvalue = valueBuilder.fieldType(); + rFTvalue.setHasDocValues(true); + rFTvalue.setName("stringfield.terms." + RollupField.COUNT_FIELD); + + List responses = doQueries(new MatchAllDocsQuery(), + iw -> { + iw.addDocument(stringValueDoc("abc")); + iw.addDocument(stringValueDoc("abc")); + iw.addDocument(stringValueDoc("abc")); + + // off target + Document doc = new Document(); + doc.add(new SortedSetDocValuesField("otherField", new BytesRef("other"))); + iw.addDocument(doc); + }, nonRollupTerms, + iw -> { + iw.addDocument(stringValueRollupDoc("abc", 3)); + }, rollupTerms, + new MappedFieldType[]{nrFTterm}, new MappedFieldType[]{rFTterm, rFTvalue}); + + InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); + + // The null_value placeholder should be removed from the response and not visible here + assertThat(unrolled.toString(), equalTo(responses.get(0).toString())); + assertThat(unrolled.toString(), not(equalTo(responses.get(1).toString()))); + } + public void testLongTerms() throws IOException { TermsAggregationBuilder nonRollupTerms = new TermsAggregationBuilder("terms", ValueType.LONG) .field("longField"); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java index 64cf9d2e3fe..58fa9d4533b 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java @@ -28,9 +28,12 @@ import org.elasticsearch.xpack.core.rollup.action.PutRollupJobAction; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; +import org.elasticsearch.xpack.rollup.Rollup; import org.mockito.ArgumentCaptor; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -203,6 +206,43 @@ public class PutJobStateMachineTests extends ESTestCase { verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any()); } + @SuppressWarnings("unchecked") + public void testNoMappingVersion() { + RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + + ActionListener testListener = ActionListener.wrap(response -> { + fail("Listener success should not have been triggered."); + }, e -> { + assertThat(e.getMessage(), equalTo("Could not determine version of existing rollup metadata for index [" + + job.getConfig().getRollupIndex() + "]")); + }); + + Logger logger = mock(Logger.class); + Client client = mock(Client.class); + + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ActionListener.class); + doAnswer(invocation -> { + GetMappingsResponse response = mock(GetMappingsResponse.class); + Map m = new HashMap<>(2); + m.put(RollupField.ROLLUP_META, + Collections.singletonMap(job.getConfig().getId(), job.getConfig())); + MappingMetaData meta = new MappingMetaData(RollupField.TYPE_NAME, + Collections.singletonMap("_meta", m)); + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(1); + builder.put(RollupField.TYPE_NAME, meta); + + ImmutableOpenMap.Builder> builder2 = ImmutableOpenMap.builder(1); + builder2.put(job.getConfig().getRollupIndex(), builder.build()); + + when(response.getMappings()).thenReturn(builder2.build()); + requestCaptor.getValue().onResponse(response); + return null; + }).when(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), requestCaptor.capture()); + + TransportPutRollupJobAction.updateMapping(job, testListener, mock(PersistentTasksService.class), client, logger); + verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any()); + } + @SuppressWarnings("unchecked") public void testJobAlreadyInMapping() { RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); @@ -219,10 +259,12 @@ public class PutJobStateMachineTests extends ESTestCase { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ActionListener.class); doAnswer(invocation -> { GetMappingsResponse response = mock(GetMappingsResponse.class); + Map m = new HashMap<>(2); + m.put(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD, Version.V_6_4_0); + m.put(RollupField.ROLLUP_META, + Collections.singletonMap(job.getConfig().getId(), job.getConfig())); MappingMetaData meta = new MappingMetaData(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, - Collections.singletonMap(job.getConfig().getId(), job.getConfig())))); + Collections.singletonMap("_meta", m)); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(1); builder.put(RollupField.TYPE_NAME, meta); @@ -258,9 +300,12 @@ public class PutJobStateMachineTests extends ESTestCase { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ActionListener.class); doAnswer(invocation -> { GetMappingsResponse response = mock(GetMappingsResponse.class); + Map m = new HashMap<>(2); + m.put(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD, Version.V_6_4_0); + m.put(RollupField.ROLLUP_META, + Collections.singletonMap(unrelatedJob.getId(), unrelatedJob)); MappingMetaData meta = new MappingMetaData(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, - Collections.singletonMap(unrelatedJob.getId(), unrelatedJob)))); + Collections.singletonMap("_meta", m)); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(1); builder.put(RollupField.TYPE_NAME, meta); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index 53421faa9bc..07ad0af7f1c 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -33,12 +34,13 @@ import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggre import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistoGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; +import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.joda.time.DateTime; import org.mockito.stubbing.Answer; @@ -50,8 +52,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import static org.mockito.Mockito.mock; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class IndexerUtilsTests extends AggregatorTestCase { @@ -359,6 +361,106 @@ public class IndexerUtilsTests extends AggregatorTestCase { assertThat(docs.get(0).id(), equalTo("1237859798")); } + public void testNullKeys() { + CompositeAggregation composite = mock(CompositeAggregation.class); + + when(composite.getBuckets()).thenAnswer((Answer>) invocationOnMock -> { + List foos = new ArrayList<>(); + + CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class); + LinkedHashMap keys = new LinkedHashMap<>(3); + keys.put("bar.terms", null); + keys.put("abc.histogram", null); + when(bucket.getKey()).thenReturn(keys); + + Aggregations aggs = new Aggregations(Collections.emptyList()); + when(bucket.getAggregations()).thenReturn(aggs); + when(bucket.getDocCount()).thenReturn(1L); + + foos.add(bucket); + + return foos; + }); + + GroupConfig.Builder groupConfig = ConfigTestHelpers.getGroupConfig(); + groupConfig.setHisto(ConfigTestHelpers.getHisto().setFields(Collections.singletonList("abc")).build()); + + List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig.build(), "foo"); + assertThat(docs.size(), equalTo(1)); + assertFalse(Strings.isNullOrEmpty(docs.get(0).id())); + } + + public void testMissingBuckets() throws IOException { + String indexName = randomAlphaOfLengthBetween(1, 10); + RollupJobStats stats= new RollupJobStats(0, 0, 0, 0); + + String metricField = "metric_field"; + String valueField = "value_field"; + + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + + int numDocs = 10; + + for (int i = 0; i < numDocs; i++) { + Document document = new Document(); + + // Every other doc omit the valueField, so that we get some null buckets + if (i % 2 == 0) { + document.add(new SortedNumericDocValuesField(valueField, i)); + document.add(new LongPoint(valueField, i)); + } + document.add(new SortedNumericDocValuesField(metricField, i)); + document.add(new LongPoint(metricField, i)); + indexWriter.addDocument(document); + } + + indexWriter.close(); + + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newIndexSearcher(indexReader); + + MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + valueFieldType.setName(valueField); + valueFieldType.setHasDocValues(true); + valueFieldType.setName(valueField); + + MappedFieldType metricFieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + metricFieldType.setName(metricField); + metricFieldType.setHasDocValues(true); + metricFieldType.setName(metricField); + + // Setup the composite agg + TermsGroupConfig termsGroupConfig = new TermsGroupConfig.Builder().setFields(Collections.singletonList(valueField)).build(); + CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, + termsGroupConfig.toBuilders()).size(numDocs*2); + + MetricConfig metricConfig = new MetricConfig.Builder().setField(metricField).setMetrics(Collections.singletonList("max")).build(); + metricConfig.toBuilders().forEach(compositeBuilder::subAggregation); + + Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, valueFieldType, metricFieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + CompositeAggregation composite = (CompositeAggregation) aggregator.buildAggregation(0L); + indexReader.close(); + directory.close(); + + List docs = IndexerUtils.processBuckets(composite, indexName, stats, + ConfigTestHelpers.getGroupConfig().build(), "foo"); + + assertThat(docs.size(), equalTo(6)); + for (IndexRequest doc : docs) { + Map map = doc.sourceAsMap(); + Object value = map.get(valueField + "." + TermsAggregationBuilder.NAME + "." + RollupField.VALUE); + if (value == null) { + assertThat(map.get(valueField + "." + TermsAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(5)); + } else { + assertThat(map.get(valueField + "." + TermsAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); + } + } + } + interface Mock { List getBuckets(); } diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 412c75f0e63..f1d9eb1fb3f 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -263,7 +263,7 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase { */ private void clearRollupState() throws Exception { if (isRollupTest()) { - new RollupRestTestStateCleaner(logger, adminClient()).clearRollupMetadata(); + RollupRestTestStateCleaner.clearRollupMetadata(adminClient()); } } diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 5276abdbfb1..ba6f9e91678 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; @@ -531,7 +532,10 @@ public class FullClusterRestartIT extends ESRestTestCase { // check that the rollup job is started using the RollUp API final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); Map getRollupJobResponse = toMap(client().performRequest(getRollupJobRequest)); - assertThat(ObjectPath.eval("jobs.0.status.job_state", getRollupJobResponse), expectedStates); + Map job = getJob(getRollupJobResponse, rollupJob); + if (job != null) { + assertThat(ObjectPath.eval("status.job_state", job), expectedStates); + } // check that the rollup job is started using the Tasks API final Request taskRequest = new Request("GET", "_tasks"); @@ -547,15 +551,27 @@ public class FullClusterRestartIT extends ESRestTestCase { // check that the rollup job is started using the Cluster State API final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata"); Map clusterStateResponse = toMap(client().performRequest(clusterStateRequest)); - Map rollupJobTask = ObjectPath.eval("metadata.persistent_tasks.tasks.0", clusterStateResponse); - assertThat(ObjectPath.eval("id", rollupJobTask), equalTo("rollup-job-test")); + List> rollupJobTasks = ObjectPath.eval("metadata.persistent_tasks.tasks", clusterStateResponse); - // Persistent task state field has been renamed in 6.4.0 from "status" to "state" - final String stateFieldName = (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_4_0)) ? "status" : "state"; + boolean hasRollupTask = false; + for (Map task : rollupJobTasks) { + if (ObjectPath.eval("id", task).equals(rollupJob)) { + hasRollupTask = true; + + // Persistent task state field has been renamed in 6.4.0 from "status" to "state" + final String stateFieldName + = (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_4_0)) ? "status" : "state"; + + final String jobStateField = "task.xpack/rollup/job." + stateFieldName + ".job_state"; + assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + task.get("id"), + ObjectPath.eval(jobStateField, task), expectedStates); + break; + } + } + if (hasRollupTask == false) { + fail("Expected persistent task for [" + rollupJob + "] but none found."); + } - final String jobStateField = "task.xpack/rollup/job." + stateFieldName + ".job_state"; - assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + rollupJobTask, - ObjectPath.eval(jobStateField, rollupJobTask), expectedStates); } private void waitForRollUpJob(final String rollupJob, final Matcher expectedStates) throws Exception { @@ -563,7 +579,34 @@ public class FullClusterRestartIT extends ESRestTestCase { final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); Response getRollupJobResponse = client().performRequest(getRollupJobRequest); assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); - assertThat(ObjectPath.eval("jobs.0.status.job_state", toMap(getRollupJobResponse)), expectedStates); + + Map job = getJob(getRollupJobResponse, rollupJob); + if (job != null) { + assertThat(ObjectPath.eval("status.job_state", job), expectedStates); + } }, 30L, TimeUnit.SECONDS); } + + private Map getJob(Response response, String targetJobId) throws IOException { + return getJob(ESRestTestCase.entityAsMap(response), targetJobId); + } + + @SuppressWarnings("unchecked") + private Map getJob(Map jobsMap, String targetJobId) throws IOException { + + List> jobs = + (List>) XContentMapValues.extractValue("jobs", jobsMap); + + if (jobs == null) { + return null; + } + + for (Map job : jobs) { + String jobId = (String) ((Map) job.get("config")).get("id"); + if (jobId.equals(targetJobId)) { + return job; + } + } + return null; + } } From 1bf205f862a32640d4febcd61cea49a04e406fa0 Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 13 Jul 2018 09:33:29 -0500 Subject: [PATCH 006/260] Test: Fix a second case of bad watch creation There was still a case with a null text that allowed for 0 attachments to be created. This commit ensures that greater than zero are created if the text is null. Otherwise, it uses the same logic to create 0 to 3 random attachments. Closes #31948 --- .../notification/slack/message/SlackMessageTests.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java index 14d732064e5..10544e464ac 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java @@ -461,7 +461,6 @@ public class SlackMessageTests extends ESTestCase { assertThat(parsed, equalTo(template)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31948") public void testTemplateRender() throws Exception { Settings settings = SlackMessageDefaultsTests.randomSettings(); SlackMessageDefaults defaults = new SlackMessageDefaults(settings); @@ -483,7 +482,9 @@ public class SlackMessageTests extends ESTestCase { templateBuilder.setText(randomAlphaOfLength(10)); } if (templateBuilder.text == null || randomBoolean()) { - int count = randomIntBetween(0, 3); + // ensure at least one attachment in the event the text is null + int minimumAttachments = templateBuilder.text == null ? 1 : 0; + int count = randomIntBetween(minimumAttachments, 3); for (int i = 0; i < count; i++) { Attachment.Template.Builder attachmentBuilder = createRandomAttachmentTemplateBuilder(); templateBuilder.addAttachments(attachmentBuilder); From f9791cf158ad6403b2872897d45ff7c773269569 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 13 Jul 2018 15:49:26 +0100 Subject: [PATCH 007/260] Remove deprecated AnalysisPlugin#requriesAnalysisSettings method (#32037) --- .../java/org/elasticsearch/plugins/AnalysisPlugin.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java b/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java index 27b5667d3bb..c85981f8dcb 100644 --- a/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java @@ -128,16 +128,6 @@ public interface AnalysisPlugin { return emptyMap(); } - /** - * Mark an {@link AnalysisProvider} as requiring the index's settings. - * - * @deprecated use {@link #requiresAnalysisSettings(AnalysisProvider)} - */ - @Deprecated - static AnalysisProvider requriesAnalysisSettings(AnalysisProvider provider) { - return requiresAnalysisSettings(provider); - } - /** * Mark an {@link AnalysisProvider} as requiring the index's settings. */ From 80492cacfcadbb0ea513914e1012a725f8739b55 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Fri, 13 Jul 2018 11:40:03 -0400 Subject: [PATCH 008/260] Add second level of field collapsing (#31808) * Put second level collapse under inner_hits Closes #24855 --- .../search/request/collapse.asciidoc | 102 +++++++++++++ .../search/115_multiple_field_collapsing.yml | 141 ++++++++++++++++++ .../action/search/ExpandSearchPhase.java | 8 +- .../index/query/InnerHitBuilder.java | 49 +++++- 4 files changed, 296 insertions(+), 4 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml diff --git a/docs/reference/search/request/collapse.asciidoc b/docs/reference/search/request/collapse.asciidoc index 97d85329330..192495e5d6d 100644 --- a/docs/reference/search/request/collapse.asciidoc +++ b/docs/reference/search/request/collapse.asciidoc @@ -116,3 +116,105 @@ The default is based on the number of data nodes and the default search thread p WARNING: `collapse` cannot be used in conjunction with <>, <> or <>. + +==== Second level of collapsing + +Second level of collapsing is also supported and is applied to `inner_hits`. +For example, the following request finds the top scored tweets for +each country, and within each country finds the top scored tweets +for each user. + +[source,js] +-------------------------------------------------- +GET /twitter/_search +{ + "query": { + "match": { + "message": "elasticsearch" + } + }, + "collapse" : { + "field" : "country", + "inner_hits" : { + "name": "by_location", + "collapse" : {"field" : "user"}, + "size": 3 + } + } +} +-------------------------------------------------- +// NOTCONSOLE + + +Response: +[source,js] +-------------------------------------------------- +{ + ... + "hits": [ + { + "_index": "twitter", + "_type": "_doc", + "_id": "9", + "_score": ..., + "_source": {...}, + "fields": {"country": ["UK"]}, + "inner_hits":{ + "by_location": { + "hits": { + ..., + "hits": [ + { + ... + "fields": {"user" : ["user124"]} + }, + { + ... + "fields": {"user" : ["user589"]} + }, + { + ... + "fields": {"user" : ["user001"]} + } + ] + } + } + } + }, + { + "_index": "twitter", + "_type": "_doc", + "_id": "1", + "_score": .., + "_source": {...}, + "fields": {"country": ["Canada"]}, + "inner_hits":{ + "by_location": { + "hits": { + ..., + "hits": [ + { + ... + "fields": {"user" : ["user444"]} + }, + { + ... + "fields": {"user" : ["user1111"]} + }, + { + ... + "fields": {"user" : ["user999"]} + } + ] + } + } + } + + }, + .... + ] +} +-------------------------------------------------- +// NOTCONSOLE + +NOTE: Second level of of collapsing doesn't allow `inner_hits`. \ No newline at end of file diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml new file mode 100644 index 00000000000..212ce6785a1 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml @@ -0,0 +1,141 @@ +--- +"two levels fields collapsing": + - skip: + version: " - 6.99.99" + reason: using multiple field collapsing from 7.0 on + - do: + indices.create: + index: addresses + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + mappings: + _doc: + properties: + country: {"type": "keyword"} + city: {"type": "keyword"} + address: {"type": "text"} + + - do: + bulk: + refresh: true + body: + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "1" } }' + - '{"country" : "Canada", "city" : "Saskatoon", "address" : "701 Victoria Avenue" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "2" } }' + - '{"country" : "Canada", "city" : "Toronto", "address" : "74 Victoria Street, Suite, 74 Victoria Street, Suite 300" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "3" } }' + - '{"country" : "Canada", "city" : "Toronto", "address" : "350 Victoria St" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "4" } }' + - '{"country" : "Canada", "city" : "Toronto", "address" : "20 Victoria Street" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "5" } }' + - '{"country" : "UK", "city" : "London", "address" : "58 Victoria Street" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "6" } }' + - '{"country" : "UK", "city" : "London", "address" : "Victoria Street Victoria Palace Theatre" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "7" } }' + - '{"country" : "UK", "city" : "Manchester", "address" : "75 Victoria street Westminster" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "8" } }' + - '{"country" : "UK", "city" : "London", "address" : "Victoria Station Victoria Arcade" }' + + + # ************* error if internal collapse contains inner_hits + - do: + catch: /parse_exception/ + search: + index: addresses + body: + query: { "match" : { "address" : "victoria" }} + collapse: + field: country + inner_hits: + collapse: + field : city + inner_hits: {} + + + # ************* error if internal collapse contains another collapse + - do: + catch: /parse_exception/ + search: + index: addresses + body: + query: { "match" : { "address" : "victoria" }} + collapse: + field: country + inner_hits: + collapse: + field : city + collapse: { field: city } + + + + # ************* top scored + - do: + search: + index: addresses + body: + query: { "match" : { "address" : "victoria" }} + collapse: + field: country + inner_hits: + name: by_location + size: 3 + collapse: + field : city + + - match: { hits.total: 8 } + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields.country: ["UK"] } + - match: { hits.hits.0.inner_hits.by_location.hits.total: 4 } + # 2 inner hits returned instead of requested 3 as they are collapsed by city + - length: { hits.hits.0.inner_hits.by_location.hits.hits : 2} + - match: { hits.hits.0.inner_hits.by_location.hits.hits.0._id: "8" } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.0.fields.city: ["London"] } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.1._id: "7" } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.1.fields.city: ["Manchester"] } + + - match: { hits.hits.1.fields.country: ["Canada"] } + - match: { hits.hits.1.inner_hits.by_location.hits.total: 4 } + # 2 inner hits returned instead of requested 3 as they are collapsed by city + - length: { hits.hits.1.inner_hits.by_location.hits.hits : 2 } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.0._id: "1" } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.0.fields.city: ["Saskatoon"] } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.1._id: "3" } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.1.fields.city: ["Toronto"] } + + + # ************* sorted + - do: + search: + index: addresses + body: + query: { "match" : { "address" : "victoria" }} + collapse: + field: country + inner_hits: + name: by_location + size: 3 + sort: [{ "city": "desc" }] + collapse: + field : city + + - match: { hits.total: 8 } + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields.country: ["UK"] } + - match: { hits.hits.0.inner_hits.by_location.hits.total: 4 } + # 2 inner hits returned instead of requested 3 as they are collapsed by city + - length: { hits.hits.0.inner_hits.by_location.hits.hits : 2} + - match: { hits.hits.0.inner_hits.by_location.hits.hits.0._id: "7" } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.0.fields.city: ["Manchester"] } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.1._id: "5" } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.1.fields.city: ["London"] } + + - match: { hits.hits.1.fields.country: ["Canada"] } + - match: { hits.hits.1.inner_hits.by_location.hits.total: 4 } + # 2 inner hits returned instead of requested 3 as they are collapsed by city + - length: { hits.hits.1.inner_hits.by_location.hits.hits : 2 } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.0._id: "2" } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.0.fields.city: ["Toronto"] } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.1._id: "1" } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.1.fields.city: ["Saskatoon"] } diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index a6a99137dc9..917ff06c573 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -87,7 +87,8 @@ final class ExpandSearchPhase extends SearchPhase { groupQuery.must(origQuery); } for (InnerHitBuilder innerHitBuilder : innerHitBuilders) { - SearchSourceBuilder sourceBuilder = buildExpandSearchSourceBuilder(innerHitBuilder) + CollapseBuilder innerCollapseBuilder = innerHitBuilder.getInnerCollapseBuilder(); + SearchSourceBuilder sourceBuilder = buildExpandSearchSourceBuilder(innerHitBuilder, innerCollapseBuilder) .query(groupQuery) .postFilter(searchRequest.source().postFilter()); SearchRequest groupRequest = buildExpandSearchRequest(searchRequest, sourceBuilder); @@ -135,7 +136,7 @@ final class ExpandSearchPhase extends SearchPhase { return groupRequest; } - private SearchSourceBuilder buildExpandSearchSourceBuilder(InnerHitBuilder options) { + private SearchSourceBuilder buildExpandSearchSourceBuilder(InnerHitBuilder options, CollapseBuilder innerCollapseBuilder) { SearchSourceBuilder groupSource = new SearchSourceBuilder(); groupSource.from(options.getFrom()); groupSource.size(options.getSize()); @@ -167,6 +168,9 @@ final class ExpandSearchPhase extends SearchPhase { groupSource.explain(options.isExplain()); groupSource.trackScores(options.isTrackScores()); groupSource.version(options.isVersion()); + if (innerCollapseBuilder != null) { + groupSource.collapse(innerCollapseBuilder); + } return groupSource; } } diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 92da1bc3b65..6bdc55d31cd 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -37,6 +37,7 @@ import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFor import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; +import org.elasticsearch.search.collapse.CollapseBuilder; import java.io.IOException; import java.util.ArrayList; @@ -55,6 +56,8 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { public static final ParseField NAME_FIELD = new ParseField("name"); public static final ParseField IGNORE_UNMAPPED = new ParseField("ignore_unmapped"); public static final QueryBuilder DEFAULT_INNER_HIT_QUERY = new MatchAllQueryBuilder(); + public static final ParseField COLLAPSE_FIELD = new ParseField("collapse"); + public static final ParseField FIELD_FIELD = new ParseField("field"); private static final ObjectParser PARSER = new ObjectParser<>("inner_hits", InnerHitBuilder::new); @@ -91,6 +94,28 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { }, SearchSourceBuilder._SOURCE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING); PARSER.declareObject(InnerHitBuilder::setHighlightBuilder, (p, c) -> HighlightBuilder.fromXContent(p), SearchSourceBuilder.HIGHLIGHT_FIELD); + PARSER.declareField((parser, builder, context) -> { + Boolean isParsedCorrectly = false; + String field; + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + if (FIELD_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { + if (parser.nextToken() == XContentParser.Token.VALUE_STRING){ + field = parser.text(); + if (parser.nextToken() == XContentParser.Token.END_OBJECT){ + isParsedCorrectly = true; + CollapseBuilder cb = new CollapseBuilder(field); + builder.setInnerCollapse(cb); + } + } + } + } + } + if (isParsedCorrectly == false) { + throw new ParsingException(parser.getTokenLocation(), "Invalid token in the inner collapse"); + } + + }, COLLAPSE_FIELD, ObjectParser.ValueType.OBJECT); } private String name; @@ -109,6 +134,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { private Set scriptFields; private HighlightBuilder highlightBuilder; private FetchSourceContext fetchSourceContext; + private CollapseBuilder innerCollapseBuilder = null; public InnerHitBuilder() { this.name = null; @@ -173,6 +199,9 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { boolean hasChildren = in.readBoolean(); assert hasChildren == false; } + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + this.innerCollapseBuilder = in.readOptionalWriteable(CollapseBuilder::new); + } } @Override @@ -218,6 +247,9 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { } } out.writeOptionalWriteable(highlightBuilder); + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeOptionalWriteable(innerCollapseBuilder); + } } /** @@ -501,6 +533,15 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { return query; } + public InnerHitBuilder setInnerCollapse(CollapseBuilder innerCollapseBuilder) { + this.innerCollapseBuilder = innerCollapseBuilder; + return this; + } + + public CollapseBuilder getInnerCollapseBuilder() { + return innerCollapseBuilder; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -550,6 +591,9 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { if (highlightBuilder != null) { builder.field(SearchSourceBuilder.HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder, params); } + if (innerCollapseBuilder != null) { + builder.field(COLLAPSE_FIELD.getPreferredName(), innerCollapseBuilder); + } builder.endObject(); return builder; } @@ -572,13 +616,14 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { Objects.equals(scriptFields, that.scriptFields) && Objects.equals(fetchSourceContext, that.fetchSourceContext) && Objects.equals(sorts, that.sorts) && - Objects.equals(highlightBuilder, that.highlightBuilder); + Objects.equals(highlightBuilder, that.highlightBuilder) && + Objects.equals(innerCollapseBuilder, that.innerCollapseBuilder); } @Override public int hashCode() { return Objects.hash(name, ignoreUnmapped, from, size, explain, version, trackScores, - storedFieldsContext, docValueFields, scriptFields, fetchSourceContext, sorts, highlightBuilder); + storedFieldsContext, docValueFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, innerCollapseBuilder); } public static InnerHitBuilder fromXContent(XContentParser parser) throws IOException { From 82cdb574cf746ef2c4336181a8399b8bcb11ff3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 13 Jul 2018 17:41:28 +0200 Subject: [PATCH 009/260] Mute ML AutodetectMemoryLimitIT#testTooManyPartitions on Windows (#32044) Adding assumption to not run this test on Windows temporarily. Relates to #32033 --- .../xpack/ml/integration/AutodetectMemoryLimitIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java index 2a332fb05d5..03860ea9ae0 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ml.integration; +import org.apache.lucene.util.Constants; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; @@ -38,6 +39,7 @@ public class AutodetectMemoryLimitIT extends MlNativeAutodetectIntegTestCase { } public void testTooManyPartitions() throws Exception { + assumeFalse("AwaitsFix(bugUrl = \"https://github.com/elastic/elasticsearch/issues/32033\")", Constants.WINDOWS); Detector.Builder detector = new Detector.Builder("count", null); detector.setPartitionFieldName("user"); From c1a81e552fa014c5654c42bcda002eae7d10236c Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 13 Jul 2018 11:12:03 -0500 Subject: [PATCH 010/260] Watcher: cleanup ensureWatchExists use (#31926) Previously, the ensureWatchExists was overridable. This commit makes it final so that it cannot be overridden, and cleans up some redundant code in the process. --- .../execution/WatchExecutionContext.java | 2 +- .../execution/ManualExecutionContext.java | 21 +++++-------------- 2 files changed, 6 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java index 4cdd4bb0e35..62216ff681e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java @@ -82,7 +82,7 @@ public abstract class WatchExecutionContext { return watch; } - public void ensureWatchExists(CheckedSupplier supplier) throws Exception { + public final void ensureWatchExists(CheckedSupplier supplier) throws Exception { if (watch == null) { watch = supplier.get(); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java index c161b24e856..abf1e5aec0d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.execution; -import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.watcher.actions.Action; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; @@ -29,18 +28,19 @@ public class ManualExecutionContext extends WatchExecutionContext { private final Map actionModes; private final boolean recordExecution; private final boolean knownWatch; - private final Watch watch; ManualExecutionContext(Watch watch, boolean knownWatch, DateTime executionTime, ManualTriggerEvent triggerEvent, TimeValue defaultThrottlePeriod, Input.Result inputResult, Condition.Result conditionResult, - Map actionModes, boolean recordExecution) { + Map actionModes, boolean recordExecution) throws Exception { super(watch.id(), executionTime, triggerEvent, defaultThrottlePeriod); this.actionModes = actionModes; this.recordExecution = recordExecution; this.knownWatch = knownWatch; - this.watch = watch; + + // set the watch early to ensure calls to watch() below succeed. + super.ensureWatchExists(() -> watch); if (inputResult != null) { onInputResult(inputResult); @@ -66,12 +66,6 @@ public class ManualExecutionContext extends WatchExecutionContext { } } - // a noop operation, as the watch is already loaded via ctor - @Override - public void ensureWatchExists(CheckedSupplier supplier) throws Exception { - super.ensureWatchExists(() -> watch); - } - @Override public boolean knownWatch() { return knownWatch; @@ -107,11 +101,6 @@ public class ManualExecutionContext extends WatchExecutionContext { return recordExecution; } - @Override - public Watch watch() { - return watch; - } - public static Builder builder(Watch watch, boolean knownWatch, ManualTriggerEvent event, TimeValue defaultThrottlePeriod) { return new Builder(watch, knownWatch, event, defaultThrottlePeriod); } @@ -173,7 +162,7 @@ public class ManualExecutionContext extends WatchExecutionContext { return this; } - public ManualExecutionContext build() { + public ManualExecutionContext build() throws Exception { if (executionTime == null) { executionTime = DateTime.now(DateTimeZone.UTC); } From bf7689071b4e580cc3056ac231c0329fabd7f01d Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 13 Jul 2018 11:13:10 -0500 Subject: [PATCH 011/260] Add secure setting for watcher email password (#31620) Other watcher actions already account for secure settings in their sensitive settings, whereas the email sending action did not. This adds the ability to optionally set a secure_password for email accounts. --- .../watcher/notification/email/Account.java | 40 ++++++++++++++++--- .../notification/email/EmailService.java | 10 ++++- .../notification/email/AccountTests.java | 30 +++++++++++++- 3 files changed, 72 insertions(+), 8 deletions(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java index 8ba8d030524..02c0e1167dd 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.watcher.notification.email; import org.apache.logging.log4j.Logger; import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.TimeValue; @@ -24,10 +27,13 @@ import java.security.PrivilegedAction; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.Properties; +import java.util.Set; public class Account { static final String SMTP_PROTOCOL = "smtp"; + private static final String SMTP_PASSWORD = "password"; + private static final Setting SECURE_PASSWORD_SETTING = SecureSetting.secureString("secure_" + SMTP_PASSWORD, null); static { SecurityManager sm = System.getSecurityManager(); @@ -101,7 +107,7 @@ public class Account { if (auth != null && auth.password() != null) { password = new String(auth.password().text(cryptoService)); } else if (config.smtp.password != null) { - password = new String(config.smtp.password); + password = new String(config.smtp.password.getChars()); } if (profile == null) { @@ -199,18 +205,40 @@ public class Account { final String host; final int port; final String user; - final char[] password; + final SecureString password; final Properties properties; Smtp(Settings settings) { host = settings.get("host", settings.get("localaddress", settings.get("local_address"))); + port = settings.getAsInt("port", settings.getAsInt("localport", settings.getAsInt("local_port", 25))); user = settings.get("user", settings.get("from", null)); - String passStr = settings.get("password", null); - password = passStr != null ? passStr.toCharArray() : null; + password = getSecureSetting(SMTP_PASSWORD, settings, SECURE_PASSWORD_SETTING); + //password = passStr != null ? passStr.toCharArray() : null; properties = loadSmtpProperties(settings); } + /** + * Finds a setting, and then a secure setting if the setting is null, or returns null if one does not exist. This differs + * from other getSetting calls in that it allows for null whereas the other methods throw an exception. + * + * Note: if your setting was not previously secure, than the string reference that is in the setting object is still + * insecure. This is only constructing a new SecureString with the char[] of the insecure setting. + */ + private static SecureString getSecureSetting(String settingName, Settings settings, Setting secureSetting) { + String value = settings.get(settingName); + if (value == null) { + SecureString secureString = secureSetting.get(settings); + if (secureString != null && secureString.length() > 0) { + return secureString; + } else { + return null; + } + } else { + return new SecureString(value.toCharArray()); + } + } + /** * loads the standard Java Mail properties as settings from the given account settings. * The standard settings are not that readable, therefore we enabled the user to configure @@ -231,7 +259,9 @@ public class Account { settings = builder.build(); Properties props = new Properties(); - for (String key : settings.keySet()) { + // Secure strings can not be retreived out of a settings object and should be handled differently + Set insecureSettings = settings.filter(s -> s.startsWith("secure_") == false).keySet(); + for (String key : insecureSettings) { props.setProperty(SMTP_SETTINGS_PREFIX + key, settings.get(key)); } return props; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java index 3d2ea583edd..15859a5e044 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.watcher.notification.email; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -63,6 +65,10 @@ public class EmailService extends NotificationService { Setting.affixKeySetting("xpack.notification.email.account.", "smtp.password", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); + private static final Setting.AffixSetting SETTING_SECURE_PASSWORD = + Setting.affixKeySetting("xpack.notification.email.account.", "smtp.secure_password", + (key) -> SecureSetting.secureString(key, null)); + private static final Setting.AffixSetting SETTING_SMTP_TIMEOUT = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.timeout", (key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope)); @@ -111,6 +117,7 @@ public class EmailService extends NotificationService { clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PORT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_USER, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PASSWORD, (s, o) -> {}, (s, o) -> {}); + clusterSettings.addAffixUpdateConsumer(SETTING_SECURE_PASSWORD, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_CONNECTION_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WRITE_TIMEOUT, (s, o) -> {}, (s, o) -> {}); @@ -172,7 +179,8 @@ public class EmailService extends NotificationService { return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_PROFILE, SETTING_EMAIL_DEFAULTS, SETTING_SMTP_AUTH, SETTING_SMTP_HOST, SETTING_SMTP_PASSWORD, SETTING_SMTP_PORT, SETTING_SMTP_STARTTLS_ENABLE, SETTING_SMTP_USER, SETTING_SMTP_STARTTLS_REQUIRED, SETTING_SMTP_TIMEOUT, SETTING_SMTP_CONNECTION_TIMEOUT, SETTING_SMTP_WRITE_TIMEOUT, SETTING_SMTP_LOCAL_ADDRESS, - SETTING_SMTP_LOCAL_PORT, SETTING_SMTP_SEND_PARTIAL, SETTING_SMTP_WAIT_ON_QUIT, SETTING_SMTP_SSL_TRUST_ADDRESS); + SETTING_SMTP_LOCAL_PORT, SETTING_SMTP_SEND_PARTIAL, SETTING_SMTP_WAIT_ON_QUIT, SETTING_SMTP_SSL_TRUST_ADDRESS, + SETTING_SECURE_PASSWORD); } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java index 8e83d30ffa5..1cbaecef8fe 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.watcher.notification.email; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -16,7 +17,6 @@ import org.junit.Before; import javax.mail.Address; import javax.mail.Message; import javax.mail.internet.InternetAddress; - import java.util.Properties; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -149,7 +149,7 @@ public class AccountTests extends ESTestCase { assertThat(config.smtp.host, is(host)); assertThat(config.smtp.user, is(user)); if (password != null) { - assertThat(config.smtp.password, is(password.toCharArray())); + assertThat(config.smtp.password.getChars(), is(password.toCharArray())); } else { assertThat(config.smtp.password, nullValue()); } @@ -292,4 +292,30 @@ public class AccountTests extends ESTestCase { .build()), null, logger); }); } + + public void testEnsurePasswordSetAsSecureSetting() { + String password = "password"; + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("smtp.secure_password", password); + + Settings settings = Settings.builder() + .put("smtp.host", "localhost") + .put("smtp.port", server.port()) + .put("smtp.connection_timeout", TimeValue.timeValueMinutes(4)) + .setSecureSettings(secureSettings) + .build(); + + Account.Config config = new Account.Config("default", settings); + assertThat(config.smtp.password.getChars(), equalTo(password.toCharArray())); + + settings = Settings.builder() + .put("smtp.host", "localhost") + .put("smtp.port", server.port()) + .put("smtp.connection_timeout", TimeValue.timeValueMinutes(4)) + .put("smtp.password", password) + .build(); + + config = new Account.Config("default", settings); + assertThat(config.smtp.password.getChars(), equalTo(password.toCharArray())); + } } From 2c3ea43f45a6364b215acca428f29a7af7f66d10 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 13 Jul 2018 09:33:27 -0700 Subject: [PATCH 012/260] HLRC: Add xpack usage api (#31975) This commit adds the _xpack/usage api to the high level rest client. Currently in the transport api, the usage data is exposed in a limited fashion, at most giving one level of helper methods for the inner keys of data, but then exposing thos subobjects as maps of objects. Rather than making parsers for every set of usage data from each feature, this PR exposes the entire set of usage data as a map of maps. --- .../client/RequestConverters.java | 8 +++ .../org/elasticsearch/client/XPackClient.java | 23 ++++++++ .../MiscellaneousDocumentationIT.java | 49 ++++++++++++++++ .../miscellaneous/x-pack-usage.asciidoc | 54 ++++++++++++++++++ .../action/TransportXPackUsageAction.java | 1 + .../xpack/core/action/XPackUsageRequest.java | 18 ------ .../core/action/XPackUsageRequestBuilder.java | 1 + .../cluster/ClusterStatsCollectorTests.java | 2 +- .../watcher/WatcherXpackUsageStatsTests.java | 2 +- .../protocol/xpack/XPackUsageRequest.java | 31 ++++++++++ .../protocol/xpack/XPackUsageResponse.java | 57 +++++++++++++++++++ 11 files changed, 226 insertions(+), 20 deletions(-) create mode 100644 docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 126a9c7d4b4..9dbd4916c77 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -106,6 +106,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; @@ -1096,6 +1097,13 @@ final class RequestConverters { return request; } + static Request xpackUsage(XPackUsageRequest usageRequest) { + Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage"); + Params parameters = new Params(request); + parameters.withMasterTimeout(usageRequest.masterNodeTimeout()); + return request; + } + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java index 5942bfa35a4..a497619b987 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -22,6 +22,8 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageResponse; import java.io.IOException; @@ -70,4 +72,25 @@ public final class XPackClient { restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackInfo, options, XPackInfoResponse::fromXContent, listener, emptySet()); } + + /** + * Fetch usage information about X-Pack features from the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public XPackUsageResponse usage(XPackUsageRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xpackUsage, options, + XPackUsageResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously fetch usage information about X-Pack features from the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void usageAsync(XPackUsageRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xpackUsage, options, + XPackUsageResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java index 75c14097c45..a99b991620a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java @@ -35,12 +35,17 @@ import org.elasticsearch.protocol.xpack.XPackInfoResponse; import org.elasticsearch.protocol.xpack.XPackInfoResponse.BuildInfo; import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo; import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageResponse; import java.io.IOException; import java.util.EnumSet; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.is; + /** * Documentation for miscellaneous APIs in the high level java client. * Code wrapped in {@code tag} and {@code end} tags is included in the docs. @@ -129,6 +134,50 @@ public class MiscellaneousDocumentationIT extends ESRestHighLevelClientTestCase } } + public void testXPackUsage() throws Exception { + RestHighLevelClient client = highLevelClient(); + { + //tag::x-pack-usage-execute + XPackUsageRequest request = new XPackUsageRequest(); + XPackUsageResponse response = client.xpack().usage(request, RequestOptions.DEFAULT); + //end::x-pack-usage-execute + + //tag::x-pack-usage-response + Map> usages = response.getUsages(); + Map monitoringUsage = usages.get("monitoring"); + assertThat(monitoringUsage.get("available"), is(true)); + assertThat(monitoringUsage.get("enabled"), is(true)); + assertThat(monitoringUsage.get("collection_enabled"), is(false)); + //end::x-pack-usage-response + } + { + XPackUsageRequest request = new XPackUsageRequest(); + // tag::x-pack-usage-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(XPackUsageResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-usage-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-usage-execute-async + client.xpack().usageAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-usage-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testInitializationFromClientBuilder() throws IOException { //tag::rest-high-level-client-init RestHighLevelClient client = new RestHighLevelClient( diff --git a/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc b/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc new file mode 100644 index 00000000000..0927ae71c0b --- /dev/null +++ b/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc @@ -0,0 +1,54 @@ +[[java-rest-high-x-pack-usage]] +=== X-Pack Usage API + +[[java-rest-high-x-pack-usage-execution]] +==== Execution + +Detailed information about the usage of features from {xpack} can be +retrieved using the `usage()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-info-response]] +==== Response + +The returned `XPackUsageResponse` contains a `Map` keyed by feature name. +Every feature map has an `available` key, indicating whether that +feature is available given the current license, and an `enabled` key, +indicating whether that feature is currently enabled. Other keys +are specific to each feature. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-response] +-------------------------------------------------- + +[[java-rest-high-x-pack-usage-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute-async] +-------------------------------------------------- +<1> The call to execute the usage api and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `XPackUsageResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java index f3abad5e68b..6b7d5b96d20 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java deleted file mode 100644 index d578249c147..00000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.action; - -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeRequest; - -public class XPackUsageRequest extends MasterNodeRequest { - - @Override - public ActionRequestValidationException validate() { - return null; - } - -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java index 789460f1339..92c2ba75ec1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.action; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; public class XPackUsageRequestBuilder extends MasterNodeOperationRequestBuilder { diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java index 6784b00361b..49355d51495 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.license.LicenseService; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackUsageAction; -import org.elasticsearch.xpack.core.action.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java index 3a314640d74..7c07c98eb47 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.watcher; import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.action.XPackUsageAction; -import org.elasticsearch.xpack.core.action.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.watcher.WatcherFeatureSetUsage; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java new file mode 100644 index 00000000000..f5f6d9d949b --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeRequest; + +public class XPackUsageRequest extends MasterNodeRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } + +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java new file mode 100644 index 00000000000..3459403bd61 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Response object from calling the xpack usage api. + * + * Usage information for each feature is accessible through {@link #getUsages()}. + */ +public class XPackUsageResponse { + + private final Map> usages; + + private XPackUsageResponse(Map> usages) throws IOException { + this.usages = usages; + } + + @SuppressWarnings("unchecked") + private static Map castMap(Object value) { + return (Map)value; + } + + /** Return a map from feature name to usage information for that feature. */ + public Map> getUsages() { + return usages; + } + + public static XPackUsageResponse fromXContent(XContentParser parser) throws IOException { + Map rawMap = parser.map(); + Map> usages = rawMap.entrySet().stream().collect( + Collectors.toMap(Map.Entry::getKey, e -> castMap(e.getValue()))); + return new XPackUsageResponse(usages); + } +} From 0edb096eb4763e2149c6c67aa8842198ecefb108 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Fri, 13 Jul 2018 18:08:35 +0100 Subject: [PATCH 013/260] Adds a new auto-interval date histogram (#28993) * Adds a new auto-interval date histogram This change adds a new type of histogram aggregation called `auto_date_histogram` where you can specify the target number of buckets you require and it will find an appropriate interval for the returned buckets. The aggregation works by first collecting documents in buckets at second interval, when it has created more than the target number of buckets it merges these buckets into minute interval bucket and continues collecting until it reaches the target number of buckets again. It will keep merging buckets when it exceeds the target until either collection is finished or the highest interval (currently years) is reached. A similar process happens at reduce time. This aggregation intentionally does not support min_doc_count, offest and extended_bounds to keep the already complex logic from becoming more complex. The aggregation accepts sub-aggregations but will always operate in `breadth_first` mode deferring the computation of sub-aggregations until the final buckets from the shard are known. min_doc_count is effectively hard-coded to zero meaning that we will insert empty buckets where necessary. Closes #9572 * Adds documentation * Added sub aggregator test * Fixes failing docs test * Brings branch up to date with master changes * trying to get tests to pass again * Fixes multiBucketConsumer accounting * Collects more buckets than needed on shards This gives us more options at reduce time in terms of how we do the final merge of the buckeets to produce the final result * Revert "Collects more buckets than needed on shards" This reverts commit 993c782d117892af9a3c86a51921cdee630a3ac5. * Adds ability to merge within a rounding * Fixes nonn-timezone doc test failure * Fix time zone tests * iterates on tests * Adds test case and documentation changes Added some notes in the documentation about the intervals that can bbe returned. Also added a test case that utilises the merging of conseecutive buckets * Fixes performance bug The bug meant that getAppropriate rounding look a huge amount of time if the range of the data was large but also sparsely populated. In these situations the rounding would be very low so iterating through the rounding values from the min key to the max keey look a long time (~120 seconds in one test). The solution is to add a rough estimate first which chooses the rounding based just on the long values of the min and max keeys alone but selects the rounding one lower than the one it thinks is appropriate so the accurate method can choose the final rounding taking into account the fact that intervals are not always fixed length. Thee commit also adds more tests * Changes to only do complex reduction on final reduce * merge latest with master * correct tests and add a new test case for 10k buckets * refactor to perform bucket number check in innerBuild * correctly derive bucket setting, update tests to increase bucket threshold * fix checkstyle * address code review comments * add documentation for default buckets * fix typo --- .../client/RestHighLevelClient.java | 3 + docs/reference/aggregations/bucket.asciidoc | 2 + .../autodatehistogram-aggregation.asciidoc | 283 ++++ .../elasticsearch/search/SearchModule.java | 4 + .../bucket/BucketsAggregator.java | 13 + .../MergingBucketsDeferringCollector.java | 236 +++ .../AutoDateHistogramAggregationBuilder.java | 218 +++ .../AutoDateHistogramAggregator.java | 199 +++ .../AutoDateHistogramAggregatorFactory.java | 72 + .../histogram/DateHistogramAggregator.java | 4 +- .../histogram/InternalAutoDateHistogram.java | 601 ++++++++ .../histogram/InternalDateHistogram.java | 2 +- .../histogram/ParsedAutoDateHistogram.java | 91 ++ .../aggregations/AggregationsTests.java | 2 + .../bucket/AutoDateHistogramTests.java | 44 + .../AutoDateHistogramAggregatorTests.java | 1332 +++++++++++++++++ .../InternalAutoDateHistogramTests.java | 154 ++ .../aggregations/AggregatorTestCase.java | 3 +- .../test/InternalAggregationTestCase.java | 3 + ...nternalMultiBucketAggregationTestCase.java | 3 +- 20 files changed, 3263 insertions(+), 6 deletions(-) create mode 100644 docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index df674ea898e..b9e41b87932 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -85,8 +85,10 @@ import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBu import org.elasticsearch.search.aggregations.bucket.geogrid.ParsedGeoHashGrid; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.ParsedGlobal; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.ParsedAutoDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.ParsedHistogram; import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; @@ -1004,6 +1006,7 @@ public class RestHighLevelClient implements Closeable { map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c)); map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c)); map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)); + map.put(AutoDateHistogramAggregationBuilder.NAME, (p, c) -> ParsedAutoDateHistogram.fromXContent(p, (String) c)); map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)); map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)); map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)); diff --git a/docs/reference/aggregations/bucket.asciidoc b/docs/reference/aggregations/bucket.asciidoc index 1233e0d9b73..ddb55e8d34c 100644 --- a/docs/reference/aggregations/bucket.asciidoc +++ b/docs/reference/aggregations/bucket.asciidoc @@ -19,6 +19,8 @@ the limit will fail with an exception. include::bucket/adjacency-matrix-aggregation.asciidoc[] +include::bucket/autodatehistogram-aggregation.asciidoc[] + include::bucket/children-aggregation.asciidoc[] include::bucket/composite-aggregation.asciidoc[] diff --git a/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc new file mode 100644 index 00000000000..28cb65ce6cc --- /dev/null +++ b/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc @@ -0,0 +1,283 @@ +[[search-aggregations-bucket-autodatehistogram-aggregation]] +=== Auto-interval Date Histogram Aggregation + +A multi-bucket aggregation similar to the <> except +instead of providing an interval to use as the width of each bucket, a target number of buckets is provided +indicating the number of buckets needed and the interval of the buckets is automatically chosen to best achieve +that target. The number of buckets returned will always be less than or equal to this target number. + +The buckets field is optional, and will default to 10 buckets if not specified. + +Requesting a target of 10 buckets. + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sales_over_time" : { + "auto_date_histogram" : { + "field" : "date", + "buckets" : 10 + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +==== Keys + +Internally, a date is represented as a 64 bit number representing a timestamp +in milliseconds-since-the-epoch. These timestamps are returned as the bucket +++key++s. The `key_as_string` is the same timestamp converted to a formatted +date string using the format specified with the `format` parameter: + +TIP: If no `format` is specified, then it will use the first date +<> specified in the field mapping. + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sales_over_time" : { + "auto_date_histogram" : { + "field" : "date", + "buckets" : 5, + "format" : "yyyy-MM-dd" <1> + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +<1> Supports expressive date <> + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + "aggregations": { + "sales_over_time": { + "buckets": [ + { + "key_as_string": "2015-01-01", + "key": 1420070400000, + "doc_count": 3 + }, + { + "key_as_string": "2015-02-01", + "key": 1422748800000, + "doc_count": 2 + }, + { + "key_as_string": "2015-03-01", + "key": 1425168000000, + "doc_count": 2 + } + ] + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] + +=== Intervals + +The interval of the returned buckets is selected based on the data collected by the +aggregation so that the number of buckets returned is less than or equal to the number +requested. The possible intervals returned are: + +[horizontal] +seconds:: In multiples of 1, 5, 10 and 30 +minutes:: In multiples of 1, 5, 10 and 30 +hours:: In multiples of 1, 3 and 12 +days:: In multiples of 1, and 7 +months:: In multiples of 1, and 3 +years:: In multiples of 1, 5, 10, 20, 50 and 100 + +In the worst case, where the number of daily buckets are too many for the requested +number of buckets, the number of buckets returned will be 1/7th of the number of +buckets requested. + +==== Time Zone + +Date-times are stored in Elasticsearch in UTC. By default, all bucketing and +rounding is also done in UTC. The `time_zone` parameter can be used to indicate +that bucketing should use a different time zone. + +Time zones may either be specified as an ISO 8601 UTC offset (e.g. `+01:00` or +`-08:00`) or as a timezone id, an identifier used in the TZ database like +`America/Los_Angeles`. + +Consider the following example: + +[source,js] +--------------------------------- +PUT my_index/log/1?refresh +{ + "date": "2015-10-01T00:30:00Z" +} + +PUT my_index/log/2?refresh +{ + "date": "2015-10-01T01:30:00Z" +} + +PUT my_index/log/3?refresh +{ + "date": "2015-10-01T02:30:00Z" +} + +GET my_index/_search?size=0 +{ + "aggs": { + "by_day": { + "auto_date_histogram": { + "field": "date", + "buckets" : 3 + } + } + } +} +--------------------------------- +// CONSOLE + +UTC is used if no time zone is specified, three 1-hour buckets are returned +starting at midnight UTC on 1 October 2015: + +[source,js] +--------------------------------- +{ + ... + "aggregations": { + "by_day": { + "buckets": [ + { + "key_as_string": "2015-10-01T00:00:00.000Z", + "key": 1443657600000, + "doc_count": 1 + }, + { + "key_as_string": "2015-10-01T01:00:00.000Z", + "key": 1443661200000, + "doc_count": 1 + }, + { + "key_as_string": "2015-10-01T02:00:00.000Z", + "key": 1443664800000, + "doc_count": 1 + } + ] + } + } +} +--------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] + +If a `time_zone` of `-01:00` is specified, then midnight starts at one hour before +midnight UTC: + +[source,js] +--------------------------------- +GET my_index/_search?size=0 +{ + "aggs": { + "by_day": { + "auto_date_histogram": { + "field": "date", + "buckets" : 3, + "time_zone": "-01:00" + } + } + } +} +--------------------------------- +// CONSOLE +// TEST[continued] + + +Now three 1-hour buckets are still returned but the first bucket starts at +11:00pm on 30 September 2015 since that is the local time for the bucket in +the specified time zone. + +[source,js] +--------------------------------- +{ + ... + "aggregations": { + "by_day": { + "buckets": [ + { + "key_as_string": "2015-09-30T23:00:00.000-01:00", + "key": 1443657600000, + "doc_count": 1 + }, + { + "key_as_string": "2015-10-01T00:00:00.000-01:00", + "key": 1443661200000, + "doc_count": 1 + }, + { + "key_as_string": "2015-10-01T01:00:00.000-01:00", + "key": 1443664800000, + "doc_count": 1 + } + ] + } + } +} +--------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] + +<1> The `key_as_string` value represents midnight on each day + in the specified time zone. + +WARNING: When using time zones that follow DST (daylight savings time) changes, +buckets close to the moment when those changes happen can have slightly different +sizes than neighbouring buckets. +For example, consider a DST start in the `CET` time zone: on 27 March 2016 at 2am, +clocks were turned forward 1 hour to 3am local time. If the result of the aggregation +was daily buckets, the bucket covering that day will only hold data for 23 hours +instead of the usual 24 hours for other buckets. The same is true for shorter intervals +like e.g. 12h. Here, we will have only a 11h bucket on the morning of 27 March when the +DST shift happens. + +==== Scripts + +Like with the normal <>, both document level +scripts and value level scripts are supported. This aggregation does not however, support the `min_doc_count`, +`extended_bounds` and `order` parameters. + +==== Missing value + +The `missing` parameter defines how documents that are missing a value should be treated. +By default they will be ignored but it is also possible to treat them as if they +had a value. + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sale_date" : { + "auto_date_histogram" : { + "field" : "date", + "buckets": 10, + "missing": "2000/01/01" <1> + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +<1> Documents without a value in the `publish_date` field will fall into the same bucket as documents that have the value `2000-01-01`. + diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 199d2278bf7..efef1aeb04f 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -109,8 +109,10 @@ import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBu import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing; @@ -395,6 +397,8 @@ public class SearchModule { HistogramAggregationBuilder::parse).addResultReader(InternalHistogram::new)); registerAggregation(new AggregationSpec(DateHistogramAggregationBuilder.NAME, DateHistogramAggregationBuilder::new, DateHistogramAggregationBuilder::parse).addResultReader(InternalDateHistogram::new)); + registerAggregation(new AggregationSpec(AutoDateHistogramAggregationBuilder.NAME, AutoDateHistogramAggregationBuilder::new, + AutoDateHistogramAggregationBuilder::parse).addResultReader(InternalAutoDateHistogram::new)); registerAggregation(new AggregationSpec(GeoDistanceAggregationBuilder.NAME, GeoDistanceAggregationBuilder::new, GeoDistanceAggregationBuilder::parse).addResultReader(InternalGeoDistance::new)); registerAggregation(new AggregationSpec(GeoGridAggregationBuilder.NAME, GeoGridAggregationBuilder::new, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 504758e7a4e..7b09ac9d618 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -84,6 +84,19 @@ public abstract class BucketsAggregator extends AggregatorBase { subCollector.collect(doc, bucketOrd); } + public final void mergeBuckets(long[] mergeMap, long newNumBuckets) { + try (IntArray oldDocCounts = docCounts) { + docCounts = bigArrays.newIntArray(newNumBuckets, true); + docCounts.fill(0, newNumBuckets, 0); + for (int i = 0; i < oldDocCounts.size(); i++) { + int docCount = oldDocCounts.get(i); + if (docCount != 0) { + docCounts.increment(mergeMap[i], docCount); + } + } + } + } + public IntArray getDocCounts() { return docCounts; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java new file mode 100644 index 00000000000..f357e9d286f --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java @@ -0,0 +1,236 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.packed.PackedInts; +import org.apache.lucene.util.packed.PackedLongValues; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.BucketCollector; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * A specialization of {@link DeferringBucketCollector} that collects all + * matches and then is able to replay a given subset of buckets. Exposes + * mergeBuckets, which can be invoked by the aggregator when increasing the + * rounding interval. + */ +public class MergingBucketsDeferringCollector extends DeferringBucketCollector { + + List entries = new ArrayList<>(); + BucketCollector collector; + final SearchContext searchContext; + LeafReaderContext context; + PackedLongValues.Builder docDeltas; + PackedLongValues.Builder buckets; + long maxBucket = -1; + boolean finished = false; + LongHash selectedBuckets; + + public MergingBucketsDeferringCollector(SearchContext context) { + this.searchContext = context; + } + + @Override + public void setDeferredCollector(Iterable deferredCollectors) { + this.collector = BucketCollector.wrap(deferredCollectors); + } + + @Override + public boolean needsScores() { + if (collector == null) { + throw new IllegalStateException(); + } + return collector.needsScores(); + } + + @Override + public void preCollection() throws IOException { + collector.preCollection(); + } + + private void finishLeaf() { + if (context != null) { + entries.add(new Entry(context, docDeltas.build(), buckets.build())); + } + context = null; + docDeltas = null; + buckets = null; + } + + @Override + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException { + finishLeaf(); + + context = ctx; + docDeltas = PackedLongValues.packedBuilder(PackedInts.DEFAULT); + buckets = PackedLongValues.packedBuilder(PackedInts.DEFAULT); + + return new LeafBucketCollector() { + int lastDoc = 0; + + @Override + public void collect(int doc, long bucket) { + docDeltas.add(doc - lastDoc); + buckets.add(bucket); + lastDoc = doc; + maxBucket = Math.max(maxBucket, bucket); + } + }; + } + + public void mergeBuckets(long[] mergeMap) { + + List newEntries = new ArrayList<>(entries.size()); + for (Entry sourceEntry : entries) { + PackedLongValues.Builder newBuckets = PackedLongValues.packedBuilder(PackedInts.DEFAULT); + for (PackedLongValues.Iterator itr = sourceEntry.buckets.iterator(); itr.hasNext();) { + long bucket = itr.next(); + newBuckets.add(mergeMap[Math.toIntExact(bucket)]); + } + newEntries.add(new Entry(sourceEntry.context, sourceEntry.docDeltas, newBuckets.build())); + } + entries = newEntries; + + // if there are buckets that have been collected in the current segment + // we need to update the bucket ordinals there too + if (buckets.size() > 0) { + PackedLongValues currentBuckets = buckets.build(); + PackedLongValues.Builder newBuckets = PackedLongValues.packedBuilder(PackedInts.DEFAULT); + for (PackedLongValues.Iterator itr = currentBuckets.iterator(); itr.hasNext();) { + long bucket = itr.next(); + newBuckets.add(mergeMap[Math.toIntExact(bucket)]); + } + buckets = newBuckets; + } + } + + @Override + public void postCollection() { + finishLeaf(); + finished = true; + } + + /** + * Replay the wrapped collector, but only on a selection of buckets. + */ + @Override + public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { + if (finished == false) { + throw new IllegalStateException("Cannot replay yet, collection is not finished: postCollect() has not been called"); + } + if (this.selectedBuckets != null) { + throw new IllegalStateException("Already been replayed"); + } + + final LongHash hash = new LongHash(selectedBuckets.length, BigArrays.NON_RECYCLING_INSTANCE); + for (long bucket : selectedBuckets) { + hash.add(bucket); + } + this.selectedBuckets = hash; + + boolean needsScores = collector.needsScores(); + Weight weight = null; + if (needsScores) { + weight = searchContext.searcher().createNormalizedWeight(searchContext.query(), true); + } + for (Entry entry : entries) { + final LeafBucketCollector leafCollector = collector.getLeafCollector(entry.context); + DocIdSetIterator docIt = null; + if (needsScores && entry.docDeltas.size() > 0) { + Scorer scorer = weight.scorer(entry.context); + // We don't need to check if the scorer is null + // since we are sure that there are documents to replay + // (entry.docDeltas it not empty). + docIt = scorer.iterator(); + leafCollector.setScorer(scorer); + } + final PackedLongValues.Iterator docDeltaIterator = entry.docDeltas.iterator(); + final PackedLongValues.Iterator buckets = entry.buckets.iterator(); + int doc = 0; + for (long i = 0, end = entry.docDeltas.size(); i < end; ++i) { + doc += docDeltaIterator.next(); + final long bucket = buckets.next(); + final long rebasedBucket = hash.find(bucket); + if (rebasedBucket != -1) { + if (needsScores) { + if (docIt.docID() < doc) { + docIt.advance(doc); + } + // aggregations should only be replayed on matching + // documents + assert docIt.docID() == doc; + } + leafCollector.collect(doc, rebasedBucket); + } + } + } + + collector.postCollection(); + } + + /** + * Wrap the provided aggregator so that it behaves (almost) as if it had + * been collected directly. + */ + @Override + public Aggregator wrap(final Aggregator in) { + + return new WrappedAggregator(in) { + + @Override + public InternalAggregation buildAggregation(long bucket) throws IOException { + if (selectedBuckets == null) { + throw new IllegalStateException("Collection has not been replayed yet."); + } + final long rebasedBucket = selectedBuckets.find(bucket); + if (rebasedBucket == -1) { + throw new IllegalStateException("Cannot build for a bucket which has not been collected [" + bucket + "]"); + } + return in.buildAggregation(rebasedBucket); + } + + }; + } + + private static class Entry { + final LeafReaderContext context; + final PackedLongValues docDeltas; + final PackedLongValues buckets; + + Entry(LeafReaderContext context, PackedLongValues docDeltas, PackedLongValues buckets) { + this.context = context; + this.docDeltas = docDeltas; + this.buckets = buckets; + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java new file mode 100644 index 00000000000..366060835d8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java @@ -0,0 +1,218 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.rounding.DateTimeUnit; +import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; +import java.util.Objects; + +public class AutoDateHistogramAggregationBuilder + extends ValuesSourceAggregationBuilder { + + public static final String NAME = "auto_date_histogram"; + + public static final ParseField NUM_BUCKETS_FIELD = new ParseField("buckets"); + + private static final ObjectParser PARSER; + static { + PARSER = new ObjectParser<>(AutoDateHistogramAggregationBuilder.NAME); + ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, true); + + PARSER.declareInt(AutoDateHistogramAggregationBuilder::setNumBuckets, NUM_BUCKETS_FIELD); + } + + public static AutoDateHistogramAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { + return PARSER.parse(parser, new AutoDateHistogramAggregationBuilder(aggregationName), null); + } + + private int numBuckets = 10; + + /** Create a new builder with the given name. */ + public AutoDateHistogramAggregationBuilder(String name) { + super(name, ValuesSourceType.NUMERIC, ValueType.DATE); + } + + /** Read from a stream, for internal use only. */ + public AutoDateHistogramAggregationBuilder(StreamInput in) throws IOException { + super(in, ValuesSourceType.NUMERIC, ValueType.DATE); + numBuckets = in.readVInt(); + } + + protected AutoDateHistogramAggregationBuilder(AutoDateHistogramAggregationBuilder clone, Builder factoriesBuilder, + Map metaData) { + super(clone, factoriesBuilder, metaData); + this.numBuckets = clone.numBuckets; + } + + @Override + protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map metaData) { + return new AutoDateHistogramAggregationBuilder(this, factoriesBuilder, metaData); + } + + @Override + protected void innerWriteTo(StreamOutput out) throws IOException { + out.writeVInt(numBuckets); + } + + @Override + public String getType() { + return NAME; + } + + public AutoDateHistogramAggregationBuilder setNumBuckets(int numBuckets) { + if (numBuckets <= 0) { + throw new IllegalArgumentException(NUM_BUCKETS_FIELD.getPreferredName() + " must be greater than 0 for [" + name + "]"); + } + this.numBuckets = numBuckets; + return this; + } + + public int getNumBuckets() { + return numBuckets; + } + + @Override + protected ValuesSourceAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config, + AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { + RoundingInfo[] roundings = new RoundingInfo[6]; + roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE), 1000L, 1, 5, 10, 30); + roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR), 60 * 1000L, 1, 5, 10, 30); + roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY), 60 * 60 * 1000L, 1, 3, 12); + roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH), 24 * 60 * 60 * 1000L, 1, 7); + roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR), 30 * 24 * 60 * 60 * 1000L, 1, 3); + roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY), 365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100); + + int maxRoundingInterval = Arrays.stream(roundings,0, roundings.length-1) + .map(rounding -> rounding.innerIntervals) + .flatMapToInt(Arrays::stream) + .boxed() + .reduce(Integer::max).get(); + Settings settings = context.getQueryShardContext().getIndexSettings().getNodeSettings(); + int maxBuckets = MultiBucketConsumerService.MAX_BUCKET_SETTING.get(settings); + int bucketCeiling = maxBuckets / maxRoundingInterval; + if (numBuckets > bucketCeiling) { + throw new IllegalArgumentException(NUM_BUCKETS_FIELD.getPreferredName()+ + " must be less than " + bucketCeiling); + } + return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData); + } + + private Rounding createRounding(DateTimeUnit interval) { + Rounding.Builder tzRoundingBuilder = Rounding.builder(interval); + if (timeZone() != null) { + tzRoundingBuilder.timeZone(timeZone()); + } + Rounding rounding = tzRoundingBuilder.build(); + return rounding; + } + + @Override + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.field(NUM_BUCKETS_FIELD.getPreferredName(), numBuckets); + return builder; + } + + @Override + protected int innerHashCode() { + return Objects.hash(numBuckets); + } + + @Override + protected boolean innerEquals(Object obj) { + AutoDateHistogramAggregationBuilder other = (AutoDateHistogramAggregationBuilder) obj; + return Objects.equals(numBuckets, other.numBuckets); + } + + public static class RoundingInfo implements Writeable { + final Rounding rounding; + final int[] innerIntervals; + final long roughEstimateDurationMillis; + + public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, int... innerIntervals) { + this.rounding = rounding; + this.roughEstimateDurationMillis = roughEstimateDurationMillis; + this.innerIntervals = innerIntervals; + } + + public RoundingInfo(StreamInput in) throws IOException { + rounding = Rounding.Streams.read(in); + roughEstimateDurationMillis = in.readVLong(); + innerIntervals = in.readIntArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + Rounding.Streams.write(rounding, out); + out.writeVLong(roughEstimateDurationMillis); + out.writeIntArray(innerIntervals); + } + + public int getMaximumInnerInterval() { + return innerIntervals[innerIntervals.length - 1]; + } + + public long getRoughEstimateDurationMillis() { + return roughEstimateDurationMillis; + } + + @Override + public int hashCode() { + return Objects.hash(rounding, Arrays.hashCode(innerIntervals)); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != getClass()) { + return false; + } + RoundingInfo other = (RoundingInfo) obj; + return Objects.equals(rounding, other.rounding) && + Objects.deepEquals(innerIntervals, other.innerIntervals); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java new file mode 100644 index 00000000000..f86145386f1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -0,0 +1,199 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; +import org.elasticsearch.search.aggregations.bucket.DeferableBucketAggregator; +import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector; +import org.elasticsearch.search.aggregations.bucket.MergingBucketsDeferringCollector; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * An aggregator for date values. Every date is rounded down using a configured + * {@link Rounding}. + * + * @see Rounding + */ +class AutoDateHistogramAggregator extends DeferableBucketAggregator { + + private final ValuesSource.Numeric valuesSource; + private final DocValueFormat formatter; + private final RoundingInfo[] roundingInfos; + private int roundingIdx = 0; + + private LongHash bucketOrds; + private int targetBuckets; + private MergingBucketsDeferringCollector deferringCollector; + + AutoDateHistogramAggregator(String name, AggregatorFactories factories, int numBuckets, RoundingInfo[] roundingInfos, + @Nullable ValuesSource.Numeric valuesSource, DocValueFormat formatter, SearchContext aggregationContext, Aggregator parent, + List pipelineAggregators, Map metaData) throws IOException { + + super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); + this.targetBuckets = numBuckets; + this.valuesSource = valuesSource; + this.formatter = formatter; + this.roundingInfos = roundingInfos; + + bucketOrds = new LongHash(1, aggregationContext.bigArrays()); + + } + + @Override + public boolean needsScores() { + return (valuesSource != null && valuesSource.needsScores()) || super.needsScores(); + } + + @Override + protected boolean shouldDefer(Aggregator aggregator) { + return true; + } + + @Override + public DeferringBucketCollector getDeferringCollector() { + deferringCollector = new MergingBucketsDeferringCollector(context); + return deferringCollector; + } + + @Override + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, + final LeafBucketCollector sub) throws IOException { + if (valuesSource == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } + final SortedNumericDocValues values = valuesSource.longValues(ctx); + return new LeafBucketCollectorBase(sub, values) { + @Override + public void collect(int doc, long bucket) throws IOException { + assert bucket == 0; + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); + + long previousRounded = Long.MIN_VALUE; + for (int i = 0; i < valuesCount; ++i) { + long value = values.nextValue(); + long rounded = roundingInfos[roundingIdx].rounding.round(value); + assert rounded >= previousRounded; + if (rounded == previousRounded) { + continue; + } + long bucketOrd = bucketOrds.add(rounded); + if (bucketOrd < 0) { // already seen + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { + collectBucket(sub, doc, bucketOrd); + while (roundingIdx < roundingInfos.length - 1 + && bucketOrds.size() > (targetBuckets * roundingInfos[roundingIdx].getMaximumInnerInterval())) { + increaseRounding(); + } + } + previousRounded = rounded; + } + } + } + + private void increaseRounding() { + try (LongHash oldBucketOrds = bucketOrds) { + LongHash newBucketOrds = new LongHash(1, context.bigArrays()); + long[] mergeMap = new long[(int) oldBucketOrds.size()]; + Rounding newRounding = roundingInfos[++roundingIdx].rounding; + for (int i = 0; i < oldBucketOrds.size(); i++) { + long oldKey = oldBucketOrds.get(i); + long newKey = newRounding.round(oldKey); + long newBucketOrd = newBucketOrds.add(newKey); + if (newBucketOrd >= 0) { + mergeMap[i] = newBucketOrd; + } else { + mergeMap[i] = -1 - newBucketOrd; + } + } + mergeBuckets(mergeMap, newBucketOrds.size()); + if (deferringCollector != null) { + deferringCollector.mergeBuckets(mergeMap); + } + bucketOrds = newBucketOrds; + } + } + }; + } + + @Override + public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { + assert owningBucketOrdinal == 0; + consumeBucketsAndMaybeBreak((int) bucketOrds.size()); + + long[] bucketOrdArray = new long[(int) bucketOrds.size()]; + for (int i = 0; i < bucketOrds.size(); i++) { + bucketOrdArray[i] = i; + } + + runDeferredCollections(bucketOrdArray); + + List buckets = new ArrayList<>((int) bucketOrds.size()); + for (long i = 0; i < bucketOrds.size(); i++) { + buckets.add(new InternalAutoDateHistogram.Bucket(bucketOrds.get(i), bucketDocCount(i), formatter, bucketAggregations(i))); + } + + // the contract of the histogram aggregation is that shards must return + // buckets ordered by key in ascending order + CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator(this)); + + // value source will be null for unmapped fields + InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx, + buildEmptySubAggregations()); + + return new InternalAutoDateHistogram(name, buckets, targetBuckets, emptyBucketInfo, formatter, pipelineAggregators(), metaData()); + } + + @Override + public InternalAggregation buildEmptyAggregation() { + InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx, + buildEmptySubAggregations()); + return new InternalAutoDateHistogram(name, Collections.emptyList(), targetBuckets, emptyBucketInfo, formatter, + pipelineAggregators(), metaData()); + } + + @Override + public void doClose() { + Releasables.close(bucketOrds); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java new file mode 100644 index 00000000000..051f2f9f6e7 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public final class AutoDateHistogramAggregatorFactory + extends ValuesSourceAggregatorFactory { + + private final int numBuckets; + private RoundingInfo[] roundingInfos; + + public AutoDateHistogramAggregatorFactory(String name, ValuesSourceConfig config, int numBuckets, RoundingInfo[] roundingInfos, + SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, + Map metaData) throws IOException { + super(name, config, context, parent, subFactoriesBuilder, metaData); + this.numBuckets = numBuckets; + this.roundingInfos = roundingInfos; + } + + @Override + protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket, + List pipelineAggregators, Map metaData) throws IOException { + if (collectsFromSingleBucket == false) { + return asMultiBucketAggregator(this, context, parent); + } + return createAggregator(valuesSource, parent, pipelineAggregators, metaData); + } + + private Aggregator createAggregator(ValuesSource.Numeric valuesSource, Aggregator parent, List pipelineAggregators, + Map metaData) throws IOException { + return new AutoDateHistogramAggregator(name, factories, numBuckets, roundingInfos, valuesSource, config.format(), context, parent, + pipelineAggregators, + metaData); + } + + @Override + protected Aggregator createUnmapped(Aggregator parent, List pipelineAggregators, Map metaData) + throws IOException { + return createAggregator(null, parent, pipelineAggregators, metaData); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index e0b64d2cd5b..8b1f0c46421 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -28,13 +28,13 @@ import org.elasticsearch.common.util.LongHash; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.BucketOrder; -import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java new file mode 100644 index 00000000000..27c195cbdae --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -0,0 +1,601 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.search.aggregations.KeyComparable; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; +import java.util.Objects; + +/** + * Implementation of {@link Histogram}. + */ +public final class InternalAutoDateHistogram extends + InternalMultiBucketAggregation implements Histogram, HistogramFactory { + + public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket, KeyComparable { + + final long key; + final long docCount; + final InternalAggregations aggregations; + protected final transient DocValueFormat format; + + public Bucket(long key, long docCount, DocValueFormat format, + InternalAggregations aggregations) { + this.format = format; + this.key = key; + this.docCount = docCount; + this.aggregations = aggregations; + } + + /** + * Read from a stream. + */ + public Bucket(StreamInput in, DocValueFormat format) throws IOException { + this.format = format; + key = in.readLong(); + docCount = in.readVLong(); + aggregations = InternalAggregations.readAggregations(in); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != InternalAutoDateHistogram.Bucket.class) { + return false; + } + InternalAutoDateHistogram.Bucket that = (InternalAutoDateHistogram.Bucket) obj; + // No need to take the keyed and format parameters into account, + // they are already stored and tested on the InternalDateHistogram object + return key == that.key + && docCount == that.docCount + && Objects.equals(aggregations, that.aggregations); + } + + @Override + public int hashCode() { + return Objects.hash(getClass(), key, docCount, aggregations); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(key); + out.writeVLong(docCount); + aggregations.writeTo(out); + } + + @Override + public String getKeyAsString() { + return format.format(key).toString(); + } + + @Override + public Object getKey() { + return new DateTime(key, DateTimeZone.UTC); + } + + @Override + public long getDocCount() { + return docCount; + } + + @Override + public Aggregations getAggregations() { + return aggregations; + } + + Bucket reduce(List buckets, Rounding rounding, ReduceContext context) { + List aggregations = new ArrayList<>(buckets.size()); + long docCount = 0; + for (Bucket bucket : buckets) { + docCount += bucket.docCount; + aggregations.add((InternalAggregations) bucket.getAggregations()); + } + InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); + return new InternalAutoDateHistogram.Bucket(rounding.round(key), docCount, format, aggs); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + String keyAsString = format.format(key).toString(); + builder.startObject(); + if (format != DocValueFormat.RAW) { + builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), keyAsString); + } + builder.field(CommonFields.KEY.getPreferredName(), key); + builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); + aggregations.toXContentInternal(builder, params); + builder.endObject(); + return builder; + } + + @Override + public int compareKey(Bucket other) { + return Long.compare(key, other.key); + } + + public DocValueFormat getFormatter() { + return format; + } + } + + static class BucketInfo { + + final RoundingInfo[] roundingInfos; + final int roundingIdx; + final InternalAggregations emptySubAggregations; + + BucketInfo(RoundingInfo[] roundings, int roundingIdx, InternalAggregations subAggregations) { + this.roundingInfos = roundings; + this.roundingIdx = roundingIdx; + this.emptySubAggregations = subAggregations; + } + + BucketInfo(StreamInput in) throws IOException { + int size = in.readVInt(); + roundingInfos = new RoundingInfo[size]; + for (int i = 0; i < size; i++) { + roundingInfos[i] = new RoundingInfo(in); + } + roundingIdx = in.readVInt(); + emptySubAggregations = InternalAggregations.readAggregations(in); + } + + void writeTo(StreamOutput out) throws IOException { + out.writeVInt(roundingInfos.length); + for (RoundingInfo roundingInfo : roundingInfos) { + roundingInfo.writeTo(out); + } + out.writeVInt(roundingIdx); + emptySubAggregations.writeTo(out); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + BucketInfo that = (BucketInfo) obj; + return Objects.deepEquals(roundingInfos, that.roundingInfos) + && Objects.equals(roundingIdx, that.roundingIdx) + && Objects.equals(emptySubAggregations, that.emptySubAggregations); + } + + @Override + public int hashCode() { + return Objects.hash(getClass(), Arrays.hashCode(roundingInfos), roundingIdx, emptySubAggregations); + } + } + + private final List buckets; + private final DocValueFormat format; + private final BucketInfo bucketInfo; + private final int targetBuckets; + + + InternalAutoDateHistogram(String name, List buckets, int targetBuckets, BucketInfo emptyBucketInfo, DocValueFormat formatter, + List pipelineAggregators, Map metaData) { + super(name, pipelineAggregators, metaData); + this.buckets = buckets; + this.bucketInfo = emptyBucketInfo; + this.format = formatter; + this.targetBuckets = targetBuckets; + } + + /** + * Stream from a stream. + */ + public InternalAutoDateHistogram(StreamInput in) throws IOException { + super(in); + bucketInfo = new BucketInfo(in); + format = in.readNamedWriteable(DocValueFormat.class); + buckets = in.readList(stream -> new Bucket(stream, format)); + this.targetBuckets = in.readVInt(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + bucketInfo.writeTo(out); + out.writeNamedWriteable(format); + out.writeList(buckets); + out.writeVInt(targetBuckets); + } + + @Override + public String getWriteableName() { + return AutoDateHistogramAggregationBuilder.NAME; + } + + @Override + public List getBuckets() { + return Collections.unmodifiableList(buckets); + } + + DocValueFormat getFormatter() { + return format; + } + + public int getTargetBuckets() { + return targetBuckets; + } + + public BucketInfo getBucketInfo() { + return bucketInfo; + } + + @Override + public InternalAutoDateHistogram create(List buckets) { + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators(), metaData); + } + + @Override + public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { + return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); + } + + private static class IteratorAndCurrent { + + private final Iterator iterator; + private Bucket current; + + IteratorAndCurrent(Iterator iterator) { + this.iterator = iterator; + current = iterator.next(); + } + + } + + /** + * This method works almost exactly the same as + * InternalDateHistogram#reduceBuckets(List, ReduceContext), the different + * here is that we need to round all the keys we see using the highest level + * rounding returned across all the shards so the resolution of the buckets + * is the same and they can be reduced together. + */ + private BucketReduceResult reduceBuckets(List aggregations, ReduceContext reduceContext) { + + // First we need to find the highest level rounding used across all the + // shards + int reduceRoundingIdx = 0; + for (InternalAggregation aggregation : aggregations) { + int aggRoundingIdx = ((InternalAutoDateHistogram) aggregation).bucketInfo.roundingIdx; + if (aggRoundingIdx > reduceRoundingIdx) { + reduceRoundingIdx = aggRoundingIdx; + } + } + // This rounding will be used to reduce all the buckets + RoundingInfo reduceRoundingInfo = bucketInfo.roundingInfos[reduceRoundingIdx]; + Rounding reduceRounding = reduceRoundingInfo.rounding; + + final PriorityQueue pq = new PriorityQueue(aggregations.size()) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current.key < b.current.key; + } + }; + for (InternalAggregation aggregation : aggregations) { + InternalAutoDateHistogram histogram = (InternalAutoDateHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent(histogram.buckets.iterator())); + } + } + + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + double key = reduceRounding.round(pq.top().current.key); + + do { + final IteratorAndCurrent top = pq.top(); + + if (reduceRounding.round(top.current.key) != key) { + // the key changes, reduce what we already buffered and reset the buffer for current buckets + final Bucket reduced = currentBuckets.get(0).reduce(currentBuckets, reduceRounding, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); + reducedBuckets.add(reduced); + currentBuckets.clear(); + key = reduceRounding.round(top.current.key); + } + + currentBuckets.add(top.current); + + if (top.iterator.hasNext()) { + final Bucket next = top.iterator.next(); + assert next.key > top.current.key : "shards must return data sorted by key"; + top.current = next; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = currentBuckets.get(0).reduce(currentBuckets, reduceRounding, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); + reducedBuckets.add(reduced); + } + } + + return mergeBucketsIfNeeded(reducedBuckets, reduceRoundingIdx, reduceRoundingInfo, reduceContext); + } + + private BucketReduceResult mergeBucketsIfNeeded(List reducedBuckets, int reduceRoundingIdx, RoundingInfo reduceRoundingInfo, + ReduceContext reduceContext) { + while (reducedBuckets.size() > (targetBuckets * reduceRoundingInfo.getMaximumInnerInterval()) + && reduceRoundingIdx < bucketInfo.roundingInfos.length - 1) { + reduceRoundingIdx++; + reduceRoundingInfo = bucketInfo.roundingInfos[reduceRoundingIdx]; + reducedBuckets = mergeBuckets(reducedBuckets, reduceRoundingInfo.rounding, reduceContext); + } + return new BucketReduceResult(reducedBuckets, reduceRoundingInfo, reduceRoundingIdx); + } + + private List mergeBuckets(List reducedBuckets, Rounding reduceRounding, ReduceContext reduceContext) { + List mergedBuckets = new ArrayList<>(); + + List sameKeyedBuckets = new ArrayList<>(); + double key = Double.NaN; + for (Bucket bucket : reducedBuckets) { + long roundedBucketKey = reduceRounding.round(bucket.key); + if (Double.isNaN(key)) { + key = roundedBucketKey; + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); + sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + } else if (roundedBucketKey == key) { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); + sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + } else { + reduceContext.consumeBucketsAndMaybeBreak(1); + mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, reduceRounding, reduceContext)); + sameKeyedBuckets.clear(); + key = roundedBucketKey; + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); + sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + } + } + if (sameKeyedBuckets.isEmpty() == false) { + reduceContext.consumeBucketsAndMaybeBreak(1); + mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, reduceRounding, reduceContext)); + } + reducedBuckets = mergedBuckets; + return reducedBuckets; + } + + private static class BucketReduceResult { + List buckets; + RoundingInfo roundingInfo; + int roundingIdx; + + BucketReduceResult(List buckets, RoundingInfo roundingInfo, int roundingIdx) { + this.buckets = buckets; + this.roundingInfo = roundingInfo; + this.roundingIdx = roundingIdx; + + } + } + + private BucketReduceResult addEmptyBuckets(BucketReduceResult currentResult, ReduceContext reduceContext) { + List list = currentResult.buckets; + if (list.isEmpty()) { + return currentResult; + } + int roundingIdx = getAppropriateRounding(list.get(0).key, list.get(list.size() - 1).key, currentResult.roundingIdx, + bucketInfo.roundingInfos); + RoundingInfo roundingInfo = bucketInfo.roundingInfos[roundingIdx]; + Rounding rounding = roundingInfo.rounding; + // merge buckets using the new rounding + list = mergeBuckets(list, rounding, reduceContext); + + Bucket lastBucket = null; + ListIterator iter = list.listIterator(); + InternalAggregations reducedEmptySubAggs = InternalAggregations.reduce(Collections.singletonList(bucketInfo.emptySubAggregations), + reduceContext); + + // Add the empty buckets within the data, + // e.g. if the data series is [1,2,3,7] there're 3 empty buckets that will be created for 4,5,6 + while (iter.hasNext()) { + Bucket nextBucket = list.get(iter.nextIndex()); + if (lastBucket != null) { + long key = rounding.nextRoundingValue(lastBucket.key); + while (key < nextBucket.key) { + reduceContext.consumeBucketsAndMaybeBreak(1); + iter.add(new InternalAutoDateHistogram.Bucket(key, 0, format, reducedEmptySubAggs)); + key = rounding.nextRoundingValue(key); + } + assert key == nextBucket.key : "key: " + key + ", nextBucket.key: " + nextBucket.key; + } + lastBucket = iter.next(); + } + return new BucketReduceResult(list, roundingInfo, roundingIdx); + } + + private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, RoundingInfo[] roundings) { + if (roundingIdx == roundings.length - 1) { + return roundingIdx; + } + int currentRoundingIdx = roundingIdx; + + // Getting the accurate number of required buckets can be slow for large + // ranges at low roundings so get a rough estimate of the rounding first + // so we are at most 1 away from the correct rounding and then get the + // accurate rounding value + for (int i = currentRoundingIdx + 1; i < roundings.length; i++) { + long dataDuration = maxKey - minKey; + long roughEstimateRequiredBuckets = dataDuration / roundings[i].getRoughEstimateDurationMillis(); + if (roughEstimateRequiredBuckets < targetBuckets * roundings[i].getMaximumInnerInterval()) { + currentRoundingIdx = i - 1; + break; + } else if (i == roundingIdx - 1) { + currentRoundingIdx = i; + break; + } + } + + int requiredBuckets = 0; + do { + Rounding currentRounding = roundings[currentRoundingIdx].rounding; + long currentKey = minKey; + requiredBuckets = 0; + while (currentKey < maxKey) { + requiredBuckets++; + currentKey = currentRounding.nextRoundingValue(currentKey); + } + currentRoundingIdx++; + } while (requiredBuckets > (targetBuckets * roundings[roundingIdx].getMaximumInnerInterval()) + && currentRoundingIdx < roundings.length); + // The loop will increase past the correct rounding index here so we + // need to subtract one to get the rounding index we need + return currentRoundingIdx - 1; + } + + @Override + public InternalAggregation doReduce(List aggregations, ReduceContext reduceContext) { + BucketReduceResult reducedBucketsResult = reduceBuckets(aggregations, reduceContext); + + if (reduceContext.isFinalReduce()) { + // adding empty buckets if needed + reducedBucketsResult = addEmptyBuckets(reducedBucketsResult, reduceContext); + + // Adding empty buckets may have tipped us over the target so merge the buckets again if needed + reducedBucketsResult = mergeBucketsIfNeeded(reducedBucketsResult.buckets, reducedBucketsResult.roundingIdx, + reducedBucketsResult.roundingInfo, reduceContext); + + // Now finally see if we need to merge consecutive buckets together to make a coarser interval at the same rounding + reducedBucketsResult = maybeMergeConsecutiveBuckets(reducedBucketsResult, reduceContext); + } + + BucketInfo bucketInfo = new BucketInfo(this.bucketInfo.roundingInfos, reducedBucketsResult.roundingIdx, + this.bucketInfo.emptySubAggregations); + + return new InternalAutoDateHistogram(getName(), reducedBucketsResult.buckets, targetBuckets, bucketInfo, format, + pipelineAggregators(), getMetaData()); + } + + private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult, ReduceContext reduceContext) { + List buckets = reducedBucketsResult.buckets; + RoundingInfo roundingInfo = reducedBucketsResult.roundingInfo; + int roundingIdx = reducedBucketsResult.roundingIdx; + if (buckets.size() > targetBuckets) { + for (int interval : roundingInfo.innerIntervals) { + int resultingBuckets = buckets.size() / interval; + if (resultingBuckets <= targetBuckets) { + return mergeConsecutiveBuckets(buckets, interval, roundingIdx, roundingInfo, reduceContext); + } + } + } + return reducedBucketsResult; + } + + private BucketReduceResult mergeConsecutiveBuckets(List reducedBuckets, int mergeInterval, int roundingIdx, + RoundingInfo roundingInfo, ReduceContext reduceContext) { + List mergedBuckets = new ArrayList<>(); + List sameKeyedBuckets = new ArrayList<>(); + + double key = roundingInfo.rounding.round(reducedBuckets.get(0).key); + for (int i = 0; i < reducedBuckets.size(); i++) { + Bucket bucket = reducedBuckets.get(i); + if (i % mergeInterval == 0 && sameKeyedBuckets.isEmpty() == false) { + reduceContext.consumeBucketsAndMaybeBreak(1); + mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, roundingInfo.rounding, reduceContext)); + sameKeyedBuckets.clear(); + key = roundingInfo.rounding.round(bucket.key); + } + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); + sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + } + if (sameKeyedBuckets.isEmpty() == false) { + reduceContext.consumeBucketsAndMaybeBreak(1); + mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, roundingInfo.rounding, reduceContext)); + } + return new BucketReduceResult(mergedBuckets, roundingInfo, roundingIdx); + } + + @Override + public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.startArray(CommonFields.BUCKETS.getPreferredName()); + for (Bucket bucket : buckets) { + bucket.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + // HistogramFactory method impls + + @Override + public Number getKey(MultiBucketsAggregation.Bucket bucket) { + return ((Bucket) bucket).key; + } + + @Override + public Number nextKey(Number key) { + return bucketInfo.roundingInfos[bucketInfo.roundingIdx].rounding.nextRoundingValue(key.longValue()); + } + + @Override + public InternalAggregation createAggregation(List buckets) { + // convert buckets to the right type + List buckets2 = new ArrayList<>(buckets.size()); + for (Object b : buckets) { + buckets2.add((Bucket) b); + } + buckets2 = Collections.unmodifiableList(buckets2); + return new InternalAutoDateHistogram(name, buckets2, targetBuckets, bucketInfo, format, pipelineAggregators(), getMetaData()); + } + + @Override + public Bucket createBucket(Number key, long docCount, InternalAggregations aggregations) { + return new Bucket(key.longValue(), docCount, format, aggregations); + } + + @Override + protected boolean doEquals(Object obj) { + InternalAutoDateHistogram that = (InternalAutoDateHistogram) obj; + return Objects.equals(buckets, that.buckets) + && Objects.equals(format, that.format) + && Objects.equals(bucketInfo, that.bucketInfo); + } + + @Override + protected int doHashCode() { + return Objects.hash(buckets, format, bucketInfo); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 84dec2c983e..669bda5574d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -424,7 +424,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< iter.add(new InternalDateHistogram.Bucket(key, 0, keyed, format, reducedEmptySubAggs)); key = nextKey(key).longValue(); } - assert key == nextBucket.key; + assert key == nextBucket.key : "key: " + key + ", nextBucket.key: " + nextBucket.key; } lastBucket = iter.next(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java new file mode 100644 index 00000000000..caca44f9f2e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.io.IOException; +import java.util.List; + +public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation implements Histogram { + + @Override + public String getType() { + return AutoDateHistogramAggregationBuilder.NAME; + } + + @Override + public List getBuckets() { + return buckets; + } + + private static ObjectParser PARSER = + new ObjectParser<>(ParsedAutoDateHistogram.class.getSimpleName(), true, ParsedAutoDateHistogram::new); + static { + declareMultiBucketAggregationFields(PARSER, + parser -> ParsedBucket.fromXContent(parser, false), + parser -> ParsedBucket.fromXContent(parser, true)); + } + + public static ParsedAutoDateHistogram fromXContent(XContentParser parser, String name) throws IOException { + ParsedAutoDateHistogram aggregation = PARSER.parse(parser, null); + aggregation.setName(name); + return aggregation; + } + + public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket { + + private Long key; + + @Override + public Object getKey() { + if (key != null) { + return new DateTime(key, DateTimeZone.UTC); + } + return null; + } + + @Override + public String getKeyAsString() { + String keyAsString = super.getKeyAsString(); + if (keyAsString != null) { + return keyAsString; + } + if (key != null) { + return Long.toString(key); + } + return null; + } + + @Override + protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { + return builder.field(CommonFields.KEY.getPreferredName(), key); + } + + static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException { + return parseXContent(parser, keyed, ParsedBucket::new, (p, bucket) -> bucket.key = p.longValue()); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index 79984f58949..fcafce3936e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.InternalFilterTests; import org.elasticsearch.search.aggregations.bucket.filter.InternalFiltersTests; import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGridTests; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobalTests; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogramTests; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogramTests; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogramTests; import org.elasticsearch.search.aggregations.bucket.missing.InternalMissingTests; @@ -125,6 +126,7 @@ public class AggregationsTests extends ESTestCase { aggsTests.add(new InternalGeoCentroidTests()); aggsTests.add(new InternalHistogramTests()); aggsTests.add(new InternalDateHistogramTests()); + aggsTests.add(new InternalAutoDateHistogramTests()); aggsTests.add(new LongTermsTests()); aggsTests.add(new DoubleTermsTests()); aggsTests.add(new StringTermsTests()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java new file mode 100644 index 00000000000..3a10edf1833 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket; + +import org.elasticsearch.search.aggregations.BaseAggregationTestCase; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; + +public class AutoDateHistogramTests extends BaseAggregationTestCase { + + @Override + protected AutoDateHistogramAggregationBuilder createTestAggregatorBuilder() { + AutoDateHistogramAggregationBuilder builder = new AutoDateHistogramAggregationBuilder(randomAlphaOfLengthBetween(1, 10)); + builder.field(INT_FIELD_NAME); + builder.setNumBuckets(randomIntBetween(1, 100000)); + if (randomBoolean()) { + builder.format("###.##"); + } + if (randomBoolean()) { + builder.missing(randomIntBetween(0, 10)); + } + if (randomBoolean()) { + builder.timeZone(randomDateTimeZone()); + } + return builder; + } + +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java new file mode 100644 index 00000000000..7cf29e3aa9c --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java @@ -0,0 +1,1332 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService; +import org.elasticsearch.search.aggregations.metrics.stats.Stats; +import org.hamcrest.Matchers; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.chrono.ISOChronology; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +import static org.hamcrest.Matchers.containsString; + +public class AutoDateHistogramAggregatorTests extends AggregatorTestCase { + + private static final String DATE_FIELD = "date"; + private static final String INSTANT_FIELD = "instant"; + + private static final List dataset = Arrays.asList( + "2010-03-12T01:07:45", + "2010-04-27T03:43:34", + "2012-05-18T04:11:00", + "2013-05-29T05:11:31", + "2013-10-31T08:24:05", + "2015-02-13T13:09:32", + "2015-06-24T13:47:43", + "2015-11-13T16:14:34", + "2016-03-04T17:09:50", + "2017-12-12T22:55:46"); + + public void testMatchNoDocs() throws IOException { + testBothCases(new MatchNoDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + } + + public void testMatchAllDocs() throws IOException { + Query query = new MatchAllDocsQuery(); + + testSearchCase(query, dataset, + aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), + histogram -> assertEquals(10, histogram.getBuckets().size()) + ); + testSearchAndReduceCase(query, dataset, + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), + histogram -> assertEquals(8, histogram.getBuckets().size()) + ); + } + + public void testSubAggregations() throws IOException { + Query query = new MatchAllDocsQuery(); + testSearchAndReduceCase(query, dataset, + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD) + .subAggregation(AggregationBuilders.stats("stats").field(DATE_FIELD)), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(8, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2010-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + Stats stats = bucket.getAggregations().get("stats"); + assertEquals("2010-03-12T01:07:45.000Z", stats.getMinAsString()); + assertEquals("2010-04-27T03:43:34.000Z", stats.getMaxAsString()); + assertEquals(2L, stats.getCount()); + + bucket = buckets.get(1); + assertEquals("2011-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertTrue(Double.isInfinite(stats.getMin())); + assertTrue(Double.isInfinite(stats.getMax())); + assertEquals(0L, stats.getCount()); + + bucket = buckets.get(2); + assertEquals("2012-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertEquals("2012-05-18T04:11:00.000Z", stats.getMinAsString()); + assertEquals("2012-05-18T04:11:00.000Z", stats.getMaxAsString()); + assertEquals(1L, stats.getCount()); + + bucket = buckets.get(3); + assertEquals("2013-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertEquals("2013-05-29T05:11:31.000Z", stats.getMinAsString()); + assertEquals("2013-10-31T08:24:05.000Z", stats.getMaxAsString()); + assertEquals(2L, stats.getCount()); + + bucket = buckets.get(4); + assertEquals("2014-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertTrue(Double.isInfinite(stats.getMin())); + assertTrue(Double.isInfinite(stats.getMax())); + assertEquals(0L, stats.getCount()); + + bucket = buckets.get(5); + assertEquals("2015-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertEquals("2015-02-13T13:09:32.000Z", stats.getMinAsString()); + assertEquals("2015-11-13T16:14:34.000Z", stats.getMaxAsString()); + assertEquals(3L, stats.getCount()); + + bucket = buckets.get(6); + assertEquals("2016-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertEquals("2016-03-04T17:09:50.000Z", stats.getMinAsString()); + assertEquals("2016-03-04T17:09:50.000Z", stats.getMaxAsString()); + assertEquals(1L, stats.getCount()); + + bucket = buckets.get(7); + assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertEquals("2017-12-12T22:55:46.000Z", stats.getMinAsString()); + assertEquals("2017-12-12T22:55:46.000Z", stats.getMaxAsString()); + assertEquals(1L, stats.getCount()); + }); + } + + public void testNoDocs() throws IOException { + Query query = new MatchNoDocsQuery(); + List dates = Collections.emptyList(); + Consumer aggregation = agg -> agg.setNumBuckets(10).field(DATE_FIELD); + + testSearchCase(query, dates, aggregation, + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + testSearchAndReduceCase(query, dates, aggregation, + histogram -> assertNull(histogram) + ); + } + + public void testAggregateWrongField() throws IOException { + testBothCases(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(10).field("wrong_field"), + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + } + + public void testIntervalYear() throws IOException { + testSearchCase(LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2015-01-01"), asLong("2017-12-31")), dataset, + aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(5, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2015-02-13T13:09:32.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2015-06-24T13:47:43.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2015-11-13T16:14:34.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2016-03-04T17:09:50.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-12-12T22:55:46.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testSearchAndReduceCase(LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2015-01-01"), asLong("2017-12-31")), dataset, + aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2015-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2016-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + } + + public void testIntervalMonth() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), + aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(6, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-03-04T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-03-05T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-03-06T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), + aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-03-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + public void testWithLargeNumberOfBuckets() { + Query query = new MatchAllDocsQuery(); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> testSearchCase(query, dataset, + aggregation -> aggregation.setNumBuckets(MultiBucketConsumerService.DEFAULT_MAX_BUCKETS+1).field(DATE_FIELD), + // since an exception is thrown, this assertion won't be invoked. + histogram -> assertTrue(false) + )); + assertThat(exception.getMessage(), containsString("must be less than")); + } + + public void testIntervalDay() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD), histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(4, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(5, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-04T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + } + + public void testIntervalDayWithTZ() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(4, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-01-31T23:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T23:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-02T23:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-04T23:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(5, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-01-31T00:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-02T00:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-03T00:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-04T00:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + }); + } + + public void testIntervalHour() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(10, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:02:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:35:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T10:15:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T13:06:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T14:04:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T14:05:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T15:59:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T16:06:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(8); + assertEquals("2017-02-01T16:48:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(9); + assertEquals("2017-02-01T16:59:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(8, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T10:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T11:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T12:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T13:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T14:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T15:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T16:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + public void testIntervalHourWithTZ() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(10, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T08:02:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T08:35:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T09:15:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T12:06:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T13:04:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T13:05:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T14:59:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T15:06:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(8); + assertEquals("2017-02-01T15:48:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(9); + assertEquals("2017-02-01T15:59:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(8, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T08:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T10:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T11:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T12:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T13:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T14:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T15:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + public void testAllSecondIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 600; i++) { + DateTime date = startDate.plusSeconds(i); + dataset.add(format.print(date)); + } + + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(600).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(600, buckets.size()); + for (int i = 0; i < 600; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusSeconds(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(120, buckets.size()); + for (int i = 0; i < 120; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusSeconds(i * 5), bucket.getKey()); + assertEquals(5, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(100).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(60, buckets.size()); + for (int i = 0; i < 60; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusSeconds(i * 10), bucket.getKey()); + assertEquals(10, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(50).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(20, buckets.size()); + for (int i = 0; i < 20; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusSeconds(i * 30), bucket.getKey()); + assertEquals(30, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(10, buckets.size()); + for (int i = 0; i < 10; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMinutes(i), bucket.getKey()); + assertEquals(60, bucket.getDocCount()); + } + }); + } + + public void testAllMinuteIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 600; i++) { + DateTime date = startDate.plusMinutes(i); + dataset.add(format.print(date)); + } + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(600).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(600, buckets.size()); + for (int i = 0; i < 600; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMinutes(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(120, buckets.size()); + for (int i = 0; i < 120; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMinutes(i * 5), bucket.getKey()); + assertEquals(5, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(100).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(60, buckets.size()); + for (int i = 0; i < 60; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMinutes(i * 10), bucket.getKey()); + assertEquals(10, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(50).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(20, buckets.size()); + for (int i = 0; i < 20; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMinutes(i * 30), bucket.getKey()); + assertEquals(30, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(10, buckets.size()); + for (int i = 0; i < 10; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusHours(i), bucket.getKey()); + assertEquals(60, bucket.getDocCount()); + } + }); + } + + public void testAllHourIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 600; i++) { + DateTime date = startDate.plusHours(i); + dataset.add(format.print(date)); + } + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(600).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(600, buckets.size()); + for (int i = 0; i < 600; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusHours(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(200, buckets.size()); + for (int i = 0; i < 200; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusHours(i * 3), bucket.getKey()); + assertEquals(3, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(100).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(50, buckets.size()); + for (int i = 0; i < 50; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusHours(i * 12), bucket.getKey()); + assertEquals(12, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(30).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(25, buckets.size()); + for (int i = 0; i < 25; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusDays(i), bucket.getKey()); + assertEquals(24, bucket.getDocCount()); + } + }); + } + + public void testAllDayIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 700; i++) { + DateTime date = startDate.plusDays(i); + dataset.add(format.print(date)); + } + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(700).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(700, buckets.size()); + for (int i = 0; i < 700; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusDays(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(100, buckets.size()); + for (int i = 0; i < 100; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusDays(i * 7), bucket.getKey()); + assertEquals(7, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(30).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(24, buckets.size()); + for (int i = 0; i < 24; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMonths(i), bucket.getKey()); + assertThat(bucket.getDocCount(), Matchers.lessThanOrEqualTo(31L)); + } + }); + } + + public void testAllMonthIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 600; i++) { + DateTime date = startDate.plusMonths(i); + dataset.add(format.print(date)); + } + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(600).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(600, buckets.size()); + for (int i = 0; i < 600; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMonths(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(200, buckets.size()); + for (int i = 0; i < 200; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMonths(i * 3), bucket.getKey()); + assertEquals(3, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(60).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(50, buckets.size()); + for (int i = 0; i < 50; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i), bucket.getKey()); + assertEquals(12, bucket.getDocCount()); + } + }); + } + + public void testAllYearIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 600; i++) { + DateTime date = startDate.plusYears(i); + dataset.add(format.print(date)); + } + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(600).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(600, buckets.size()); + for (int i = 0; i < 600; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(120, buckets.size()); + for (int i = 0; i < 120; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i * 5), bucket.getKey()); + assertEquals(5, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(100).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(60, buckets.size()); + for (int i = 0; i < 60; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i * 10), bucket.getKey()); + assertEquals(10, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(50).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(30, buckets.size()); + for (int i = 0; i < 30; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i * 20), bucket.getKey()); + assertEquals(20, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(20).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(12, buckets.size()); + for (int i = 0; i < 12; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i * 50), bucket.getKey()); + assertEquals(50, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(6, buckets.size()); + for (int i = 0; i < 6; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i * 100), bucket.getKey()); + assertEquals(100, bucket.getDocCount()); + } + }); + } + + public void testInterval3Hour() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(10, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:02:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:35:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T10:15:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T13:06:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T14:04:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T14:05:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T15:59:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T16:06:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(8); + assertEquals("2017-02-01T16:48:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(9); + assertEquals("2017-02-01T16:59:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T12:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T15:00:00.000Z", bucket.getKeyAsString()); + assertEquals(4, bucket.getDocCount()); + } + ); + } + + public void testIntervalMinute() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:35.000Z", + "2017-02-01T09:02:59.000Z", + "2017-02-01T09:15:37.000Z", + "2017-02-01T09:16:04.000Z", + "2017-02-01T09:16:42.000Z" + ), + aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(5, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:02:35.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:02:59.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T09:15:37.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T09:16:04.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T09:16:42.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:35.000Z", + "2017-02-01T09:02:59.000Z", + "2017-02-01T09:15:37.000Z", + "2017-02-01T09:16:04.000Z", + "2017-02-01T09:16:42.000Z" + ), + aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(15, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:02:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:03:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T09:04:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T09:05:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T09:06:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T09:07:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T09:08:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T09:09:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(8); + assertEquals("2017-02-01T09:10:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(9); + assertEquals("2017-02-01T09:11:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(10); + assertEquals("2017-02-01T09:12:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(11); + assertEquals("2017-02-01T09:13:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(12); + assertEquals("2017-02-01T09:14:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(13); + assertEquals("2017-02-01T09:15:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(14); + assertEquals("2017-02-01T09:16:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + } + ); + } + + public void testIntervalSecond() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList("2017-02-01T00:00:05.015Z", "2017-02-01T00:00:07.299Z", "2017-02-01T00:00:07.074Z", + "2017-02-01T00:00:11.688Z", "2017-02-01T00:00:11.210Z", "2017-02-01T00:00:11.380Z"), + aggregation -> aggregation.setNumBuckets(7).field(DATE_FIELD), histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:07.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T00:00:11.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T00:00:05.015Z", + "2017-02-01T00:00:07.299Z", + "2017-02-01T00:00:07.074Z", + "2017-02-01T00:00:11.688Z", + "2017-02-01T00:00:11.210Z", + "2017-02-01T00:00:11.380Z" + ), + aggregation -> aggregation.setNumBuckets(7).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(7, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:06.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T00:00:07.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T00:00:08.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T00:00:09.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T00:00:10.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T00:00:11.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + private void testSearchCase(Query query, List dataset, + Consumer configure, + Consumer verify) throws IOException { + executeTestCase(false, query, dataset, configure, verify); + } + + private void testSearchAndReduceCase(Query query, List dataset, + Consumer configure, + Consumer verify) throws IOException { + executeTestCase(true, query, dataset, configure, verify); + } + + private void testBothCases(Query query, List dataset, + Consumer configure, + Consumer verify) throws IOException { + testSearchCase(query, dataset, configure, verify); + testSearchAndReduceCase(query, dataset, configure, verify); + } + + @Override + protected IndexSettings createIndexSettings() { + Settings nodeSettings = Settings.builder() + .put("search.max_buckets", 100000).build(); + return new IndexSettings( + IndexMetaData.builder("_index").settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .creationDate(System.currentTimeMillis()) + .build(), + nodeSettings + ); + } + + private void executeTestCase(boolean reduced, Query query, List dataset, + Consumer configure, + Consumer verify) throws IOException { + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + for (String date : dataset) { + if (frequently()) { + indexWriter.commit(); + } + + long instant = asLong(date); + document.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); + document.add(new LongPoint(INSTANT_FIELD, instant)); + indexWriter.addDocument(document); + document.clear(); + } + } + + try (IndexReader indexReader = DirectoryReader.open(directory)) { + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + AutoDateHistogramAggregationBuilder aggregationBuilder = new AutoDateHistogramAggregationBuilder("_name"); + if (configure != null) { + configure.accept(aggregationBuilder); + } + + DateFieldMapper.Builder builder = new DateFieldMapper.Builder("_name"); + DateFieldMapper.DateFieldType fieldType = builder.fieldType(); + fieldType.setHasDocValues(true); + fieldType.setName(aggregationBuilder.field()); + + InternalAutoDateHistogram histogram; + if (reduced) { + histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); + } else { + histogram = search(indexSearcher, query, aggregationBuilder, fieldType); + } + verify.accept(histogram); + } + } + } + + private static long asLong(String dateTime) { + return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(dateTime).getMillis(); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java new file mode 100644 index 00000000000..389371efd79 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -0,0 +1,154 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.rounding.DateTimeUnit; +import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogram.BucketInfo; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; +import org.joda.time.DateTime; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import static org.elasticsearch.common.unit.TimeValue.timeValueHours; +import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; +import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; + +public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregationTestCase { + + private DocValueFormat format; + private RoundingInfo[] roundingInfos; + + @Override + public void setUp() throws Exception { + super.setUp(); + format = randomNumericDocValueFormat(); + + roundingInfos = new RoundingInfo[6]; + roundingInfos[0] = new RoundingInfo(Rounding.builder(DateTimeUnit.SECOND_OF_MINUTE).build(), 1, 5, 10, 30); + roundingInfos[1] = new RoundingInfo(Rounding.builder(DateTimeUnit.MINUTES_OF_HOUR).build(), 1, 5, 10, 30); + roundingInfos[2] = new RoundingInfo(Rounding.builder(DateTimeUnit.HOUR_OF_DAY).build(), 1, 3, 12); + roundingInfos[3] = new RoundingInfo(Rounding.builder(DateTimeUnit.DAY_OF_MONTH).build(), 1, 7); + roundingInfos[4] = new RoundingInfo(Rounding.builder(DateTimeUnit.MONTH_OF_YEAR).build(), 1, 3); + roundingInfos[5] = new RoundingInfo(Rounding.builder(DateTimeUnit.YEAR_OF_CENTURY).build(), 1, 10, 20, 50, 100); + } + + @Override + protected InternalAutoDateHistogram createTestInstance(String name, + List pipelineAggregators, + Map metaData, + InternalAggregations aggregations) { + int nbBuckets = randomNumberOfBuckets(); + int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1); + List buckets = new ArrayList<>(nbBuckets); + long startingDate = System.currentTimeMillis(); + + long interval = randomIntBetween(1, 3); + long intervalMillis = randomFrom(timeValueSeconds(interval), timeValueMinutes(interval), timeValueHours(interval)).getMillis(); + + for (int i = 0; i < nbBuckets; i++) { + long key = startingDate + (intervalMillis * i); + buckets.add(i, new InternalAutoDateHistogram.Bucket(key, randomIntBetween(1, 100), format, aggregations)); + } + InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList()); + BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations); + + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); + } + + @Override + protected void assertReduced(InternalAutoDateHistogram reduced, List inputs) { + int roundingIdx = 0; + for (InternalAutoDateHistogram histogram : inputs) { + if (histogram.getBucketInfo().roundingIdx > roundingIdx) { + roundingIdx = histogram.getBucketInfo().roundingIdx; + } + } + Map expectedCounts = new TreeMap<>(); + for (Histogram histogram : inputs) { + for (Histogram.Bucket bucket : histogram.getBuckets()) { + expectedCounts.compute(roundingInfos[roundingIdx].rounding.round(((DateTime) bucket.getKey()).getMillis()), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + } + } + Map actualCounts = new TreeMap<>(); + for (Histogram.Bucket bucket : reduced.getBuckets()) { + actualCounts.compute(((DateTime) bucket.getKey()).getMillis(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + } + assertEquals(expectedCounts, actualCounts); + } + + @Override + protected Writeable.Reader instanceReader() { + return InternalAutoDateHistogram::new; + } + + @Override + protected Class implementationClass() { + return ParsedAutoDateHistogram.class; + } + + @Override + protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram instance) { + String name = instance.getName(); + List buckets = instance.getBuckets(); + int targetBuckets = instance.getTargetBuckets(); + BucketInfo bucketInfo = instance.getBucketInfo(); + List pipelineAggregators = instance.pipelineAggregators(); + Map metaData = instance.getMetaData(); + switch (between(0, 3)) { + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + buckets = new ArrayList<>(buckets); + buckets.add(new InternalAutoDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, + InternalAggregations.EMPTY)); + break; + case 2: + int roundingIdx = bucketInfo.roundingIdx == bucketInfo.roundingInfos.length - 1 ? 0 : bucketInfo.roundingIdx + 1; + bucketInfo = new BucketInfo(bucketInfo.roundingInfos, roundingIdx, bucketInfo.emptySubAggregations); + break; + case 3: + if (metaData == null) { + metaData = new HashMap<>(1); + } else { + metaData = new HashMap<>(instance.getMetaData()); + } + metaData.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); + } + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 07f25986655..e84f2a99a11 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -87,7 +87,6 @@ import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS; /** * Base class for testing {@link Aggregator} implementations. @@ -229,7 +228,7 @@ public abstract class AggregatorTestCase extends ESTestCase { }); when(searchContext.bitsetFilterCache()).thenReturn(new BitsetFilterCache(indexSettings, mock(Listener.class))); doAnswer(invocation -> { - /* Store the releasables so we can release them at the end of the test case. This is important because aggregations don't + /* Store the release-ables so we can release them at the end of the test case. This is important because aggregations don't * close their sub-aggregations. This is fairly similar to what the production code does. */ releasables.add((Releasable) invocation.getArguments()[0]); return null; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index 838b0e315ea..15e44853a97 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -53,8 +53,10 @@ import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBu import org.elasticsearch.search.aggregations.bucket.geogrid.ParsedGeoHashGrid; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.ParsedGlobal; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.ParsedAutoDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.ParsedHistogram; import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; @@ -181,6 +183,7 @@ public abstract class InternalAggregationTestCase map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c)); map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c)); map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)); + map.put(AutoDateHistogramAggregationBuilder.NAME, (p, c) -> ParsedAutoDateHistogram.fromXContent(p, (String) c)); map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)); map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)); map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java index 952b6c02794..6f0aebe2396 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java @@ -149,7 +149,8 @@ public abstract class InternalMultiBucketAggregationTestCase parsedClass = implementationClass(); assertNotNull("Parsed aggregation class must not be null", parsedClass); - assertTrue(parsedClass.isInstance(actual)); + assertTrue("Unexpected parsed class, expected instance of: " + actual + ", but was: " + parsedClass, + parsedClass.isInstance(actual)); assertTrue(expected instanceof InternalAggregation); assertEquals(expected.getName(), actual.getName()); From b1bf643e41b2bf2bc1360887b4768836c9159214 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 13 Jul 2018 20:05:49 +0200 Subject: [PATCH 014/260] lazy snapshot repository initialization (#31606) lazy snapshot repository initialization --- .../repositories/url/URLRepository.java | 33 +++-- .../repositories/url/URLRepositoryTests.java | 51 ++++++- .../repositories/azure/AzureRepository.java | 42 ++++-- .../azure/AzureRepositorySettingsTests.java | 29 ++-- .../gcs/GoogleCloudStorageRepository.java | 24 ++-- ...eCloudStorageBlobStoreRepositoryTests.java | 10 +- .../repositories/hdfs/HdfsRepository.java | 26 ++-- .../repositories/s3/S3Repository.java | 71 +++++---- .../repositories/s3/S3RepositoryPlugin.java | 4 +- .../s3/RepositoryCredentialsTests.java | 26 +++- .../s3/S3BlobStoreRepositoryTests.java | 15 +- .../repositories/s3/S3RepositoryTests.java | 38 +++-- .../repositories/RepositoriesService.java | 73 ++++++---- .../VerifyNodeRepositoryAction.java | 2 +- .../blobstore/BlobStoreRepository.java | 136 ++++++++++++++---- .../repositories/fs/FsRepository.java | 32 +++-- ...ClusterStateServiceRandomUpdatesTests.java | 2 +- .../BlobStoreRepositoryRestoreTests.java | 11 +- .../blobstore/BlobStoreRepositoryTests.java | 35 ++++- .../fs}/FsBlobStoreRepositoryIT.java | 11 +- .../SharedClusterSnapshotRestoreIT.java | 23 ++- .../snapshots/mockstore/MockRepository.java | 13 +- .../ESBlobStoreRepositoryIntegTestCase.java | 56 ++++++-- 23 files changed, 543 insertions(+), 220 deletions(-) rename server/src/test/java/org/elasticsearch/{snapshots => repositories/fs}/FsBlobStoreRepositoryIT.java (79%) diff --git a/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java b/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java index c1128fd683a..98b8c0a1945 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java @@ -20,6 +20,7 @@ package org.elasticsearch.repositories.url; import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.url.URLBlobStore; @@ -31,7 +32,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; @@ -71,33 +71,44 @@ public class URLRepository extends BlobStoreRepository { private final Environment environment; - private final URLBlobStore blobStore; - private final BlobPath basePath; + private final URL url; + /** * Constructs a read-only URL-based repository */ public URLRepository(RepositoryMetaData metadata, Environment environment, - NamedXContentRegistry namedXContentRegistry) throws IOException { + NamedXContentRegistry namedXContentRegistry) { super(metadata, environment.settings(), namedXContentRegistry); if (URL_SETTING.exists(metadata.settings()) == false && REPOSITORIES_URL_SETTING.exists(settings) == false) { throw new RepositoryException(metadata.name(), "missing url"); } + this.environment = environment; supportedProtocols = SUPPORTED_PROTOCOLS_SETTING.get(settings); urlWhiteList = ALLOWED_URLS_SETTING.get(settings).toArray(new URIPattern[]{}); - this.environment = environment; - - URL url = URL_SETTING.exists(metadata.settings()) ? URL_SETTING.get(metadata.settings()) : REPOSITORIES_URL_SETTING.get(settings); - URL normalizedURL = checkURL(url); - blobStore = new URLBlobStore(settings, normalizedURL); basePath = BlobPath.cleanPath(); + url = URL_SETTING.exists(metadata.settings()) + ? URL_SETTING.get(metadata.settings()) : REPOSITORIES_URL_SETTING.get(settings); } @Override - protected BlobStore blobStore() { - return blobStore; + protected BlobStore createBlobStore() { + URL normalizedURL = checkURL(url); + return new URLBlobStore(settings, normalizedURL); + } + + // only use for testing + @Override + protected BlobContainer blobContainer() { + return super.blobContainer(); + } + + // only use for testing + @Override + protected BlobStore getBlobStore() { + return super.getBlobStore(); } @Override diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java index 1af4c1eaba9..2de4c132673 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java @@ -31,8 +31,22 @@ import java.io.IOException; import java.nio.file.Path; import java.util.Collections; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.nullValue; + public class URLRepositoryTests extends ESTestCase { + private URLRepository createRepository(Settings baseSettings, RepositoryMetaData repositoryMetaData) { + return new URLRepository(repositoryMetaData, TestEnvironment.newEnvironment(baseSettings), + new NamedXContentRegistry(Collections.emptyList())) { + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we create repo manually on test/main threads + } + }; + } + public void testWhiteListingRepoURL() throws IOException { String repoPath = createTempDir().resolve("repository").toUri().toURL().toString(); Settings baseSettings = Settings.builder() @@ -41,8 +55,12 @@ public class URLRepositoryTests extends ESTestCase { .put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), repoPath) .build(); RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings); - new URLRepository(repositoryMetaData, TestEnvironment.newEnvironment(baseSettings), - new NamedXContentRegistry(Collections.emptyList())); + final URLRepository repository = createRepository(baseSettings, repositoryMetaData); + repository.start(); + + assertThat("blob store has to be lazy initialized", repository.getBlobStore(), is(nullValue())); + repository.blobContainer(); + assertThat("blobContainer has to initialize blob store", repository.getBlobStore(), not(nullValue())); } public void testIfNotWhiteListedMustSetRepoURL() throws IOException { @@ -52,9 +70,10 @@ public class URLRepositoryTests extends ESTestCase { .put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), repoPath) .build(); RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings); + final URLRepository repository = createRepository(baseSettings, repositoryMetaData); + repository.start(); try { - new URLRepository(repositoryMetaData, TestEnvironment.newEnvironment(baseSettings), - new NamedXContentRegistry(Collections.emptyList())); + repository.blobContainer(); fail("RepositoryException should have been thrown."); } catch (RepositoryException e) { String msg = "[url] file url [" + repoPath @@ -73,13 +92,33 @@ public class URLRepositoryTests extends ESTestCase { .put(URLRepository.SUPPORTED_PROTOCOLS_SETTING.getKey(), "http,https") .build(); RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings); + final URLRepository repository = createRepository(baseSettings, repositoryMetaData); + repository.start(); try { - new URLRepository(repositoryMetaData, TestEnvironment.newEnvironment(baseSettings), - new NamedXContentRegistry(Collections.emptyList())); + repository.blobContainer(); fail("RepositoryException should have been thrown."); } catch (RepositoryException e) { assertEquals("[url] unsupported url protocol [file] from URL [" + repoPath +"]", e.getMessage()); } } + public void testNonNormalizedUrl() throws IOException { + Settings baseSettings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(URLRepository.ALLOWED_URLS_SETTING.getKey(), "file:/tmp/") + .put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), "file:/var/" ) + .build(); + RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings); + final URLRepository repository = createRepository(baseSettings, repositoryMetaData); + repository.start(); + try { + repository.blobContainer(); + fail("RepositoryException should have been thrown."); + } catch (RepositoryException e) { + assertEquals("[url] file url [file:/var/] doesn't match any of the locations " + + "specified by path.repo or repositories.url.allowed_urls", + e.getMessage()); + } + } + } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 47b398a4c2f..0797c78af33 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -38,7 +38,6 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.snapshots.SnapshotCreationException; import org.elasticsearch.snapshots.SnapshotId; -import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.Locale; @@ -78,25 +77,21 @@ public class AzureRepository extends BlobStoreRepository { public static final Setting READONLY_SETTING = Setting.boolSetting("readonly", false, Property.NodeScope); } - private final AzureBlobStore blobStore; private final BlobPath basePath; private final ByteSizeValue chunkSize; private final boolean compress; + private final Environment environment; + private final AzureStorageService storageService; private final boolean readonly; public AzureRepository(RepositoryMetaData metadata, Environment environment, NamedXContentRegistry namedXContentRegistry, - AzureStorageService storageService) throws IOException, URISyntaxException, StorageException { + AzureStorageService storageService) { super(metadata, environment.settings(), namedXContentRegistry); - this.blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); this.chunkSize = Repository.CHUNK_SIZE_SETTING.get(metadata.settings()); this.compress = Repository.COMPRESS_SETTING.get(metadata.settings()); - // If the user explicitly did not define a readonly value, we set it by ourselves depending on the location mode setting. - // For secondary_only setting, the repository should be read only - if (Repository.READONLY_SETTING.exists(metadata.settings())) { - this.readonly = Repository.READONLY_SETTING.get(metadata.settings()); - } else { - this.readonly = this.blobStore.getLocationMode() == LocationMode.SECONDARY_ONLY; - } + this.environment = environment; + this.storageService = storageService; + final String basePath = Strings.trimLeadingCharacter(Repository.BASE_PATH_SETTING.get(metadata.settings()), '/'); if (Strings.hasLength(basePath)) { // Remove starting / if any @@ -108,15 +103,33 @@ public class AzureRepository extends BlobStoreRepository { } else { this.basePath = BlobPath.cleanPath(); } - logger.debug((org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( - "using container [{}], chunk_size [{}], compress [{}], base_path [{}]", blobStore, chunkSize, compress, basePath)); + + // If the user explicitly did not define a readonly value, we set it by ourselves depending on the location mode setting. + // For secondary_only setting, the repository should be read only + final LocationMode locationMode = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); + if (Repository.READONLY_SETTING.exists(metadata.settings())) { + this.readonly = Repository.READONLY_SETTING.get(metadata.settings()); + } else { + this.readonly = locationMode == LocationMode.SECONDARY_ONLY; + } + } + + // only use for testing + @Override + protected BlobStore getBlobStore() { + return super.getBlobStore(); } /** * {@inheritDoc} */ @Override - protected BlobStore blobStore() { + protected AzureBlobStore createBlobStore() throws URISyntaxException, StorageException { + final AzureBlobStore blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); + + logger.debug((org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( + "using container [{}], chunk_size [{}], compress [{}], base_path [{}]", + blobStore, chunkSize, compress, basePath)); return blobStore; } @@ -144,6 +157,7 @@ public class AzureRepository extends BlobStoreRepository { @Override public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { try { + final AzureBlobStore blobStore = (AzureBlobStore) blobStore(); if (blobStore.containerExist() == false) { throw new IllegalArgumentException("The bucket [" + blobStore + "] does not exist. Please create it before " + " creating an azure snapshot repository backed by it."); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java index 639905042cf..b4b71577cbc 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.repositories.azure; import com.microsoft.azure.storage.LocationMode; -import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -30,76 +29,76 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; -import java.net.URISyntaxException; - import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; public class AzureRepositorySettingsTests extends ESTestCase { - private AzureRepository azureRepository(Settings settings) throws StorageException, IOException, URISyntaxException { + private AzureRepository azureRepository(Settings settings) { Settings internalSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()) .put(settings) .build(); - return new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings), + final AzureRepository azureRepository = new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings), TestEnvironment.newEnvironment(internalSettings), NamedXContentRegistry.EMPTY, mock(AzureStorageService.class)); + assertThat(azureRepository.getBlobStore(), is(nullValue())); + return azureRepository; } - public void testReadonlyDefault() throws StorageException, IOException, URISyntaxException { + public void testReadonlyDefault() { assertThat(azureRepository(Settings.EMPTY).isReadOnly(), is(false)); } - public void testReadonlyDefaultAndReadonlyOn() throws StorageException, IOException, URISyntaxException { + public void testReadonlyDefaultAndReadonlyOn() { assertThat(azureRepository(Settings.builder() .put("readonly", true) .build()).isReadOnly(), is(true)); } - public void testReadonlyWithPrimaryOnly() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithPrimaryOnly() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.PRIMARY_ONLY.name()) .build()).isReadOnly(), is(false)); } - public void testReadonlyWithPrimaryOnlyAndReadonlyOn() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithPrimaryOnlyAndReadonlyOn() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.PRIMARY_ONLY.name()) .put("readonly", true) .build()).isReadOnly(), is(true)); } - public void testReadonlyWithSecondaryOnlyAndReadonlyOn() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithSecondaryOnlyAndReadonlyOn() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.SECONDARY_ONLY.name()) .put("readonly", true) .build()).isReadOnly(), is(true)); } - public void testReadonlyWithSecondaryOnlyAndReadonlyOff() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithSecondaryOnlyAndReadonlyOff() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.SECONDARY_ONLY.name()) .put("readonly", false) .build()).isReadOnly(), is(false)); } - public void testReadonlyWithPrimaryAndSecondaryOnlyAndReadonlyOn() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithPrimaryAndSecondaryOnlyAndReadonlyOn() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.PRIMARY_THEN_SECONDARY.name()) .put("readonly", true) .build()).isReadOnly(), is(true)); } - public void testReadonlyWithPrimaryAndSecondaryOnlyAndReadonlyOff() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithPrimaryAndSecondaryOnlyAndReadonlyOff() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.PRIMARY_THEN_SECONDARY.name()) .put("readonly", false) .build()).isReadOnly(), is(false)); } - public void testChunkSize() throws StorageException, IOException, URISyntaxException { + public void testChunkSize() { // default chunk size AzureRepository azureRepository = azureRepository(Settings.EMPTY); assertEquals(AzureStorageService.MAX_CHUNK_SIZE, azureRepository.chunkSize()); diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index 83d48eeda20..fe6c8889bd2 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -22,7 +22,6 @@ package org.elasticsearch.repositories.gcs; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -56,18 +55,19 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE, Property.NodeScope, Property.Dynamic); static final Setting CLIENT_NAME = new Setting<>("client", "default", Function.identity()); - private final ByteSizeValue chunkSize; - private final boolean compress; + private final GoogleCloudStorageService storageService; private final BlobPath basePath; - private final GoogleCloudStorageBlobStore blobStore; + private final boolean compress; + private final ByteSizeValue chunkSize; + private final String bucket; + private final String clientName; GoogleCloudStorageRepository(RepositoryMetaData metadata, Environment environment, NamedXContentRegistry namedXContentRegistry, - GoogleCloudStorageService storageService) throws Exception { + GoogleCloudStorageService storageService) { super(metadata, environment.settings(), namedXContentRegistry); + this.storageService = storageService; - String bucket = getSetting(BUCKET, metadata); - String clientName = CLIENT_NAME.get(metadata.settings()); String basePath = BASE_PATH.get(metadata.settings()); if (Strings.hasLength(basePath)) { BlobPath path = new BlobPath(); @@ -81,16 +81,14 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { this.compress = getSetting(COMPRESS, metadata); this.chunkSize = getSetting(CHUNK_SIZE, metadata); - + this.bucket = getSetting(BUCKET, metadata); + this.clientName = CLIENT_NAME.get(metadata.settings()); logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}]", bucket, basePath, chunkSize, compress); - - this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, clientName, storageService); } - @Override - protected BlobStore blobStore() { - return blobStore; + protected GoogleCloudStorageBlobStore createBlobStore() { + return new GoogleCloudStorageBlobStore(settings, bucket, clientName, storageService); } @Override diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 3692b26f2bb..6d5c1bbf853 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase; import org.junit.AfterClass; @@ -34,6 +35,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.instanceOf; public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCase { @@ -49,9 +51,10 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos } @Override - protected void createTestRepository(String name) { + protected void createTestRepository(String name, boolean verify) { assertAcked(client().admin().cluster().preparePutRepository(name) .setType(GoogleCloudStorageRepository.TYPE) + .setVerify(verify) .setSettings(Settings.builder() .put("bucket", BUCKET) .put("base_path", GoogleCloudStorageBlobStoreRepositoryTests.class.getSimpleName()) @@ -59,6 +62,11 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); } + @Override + protected void afterCreationCheck(Repository repository) { + assertThat(repository, instanceOf(GoogleCloudStorageRepository.class)); + } + @AfterClass public static void wipeRepository() { blobs.clear(); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 5ef1c7d18d6..97285f9cecb 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -42,7 +42,6 @@ import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -61,29 +60,26 @@ public final class HdfsRepository extends BlobStoreRepository { private final ByteSizeValue chunkSize; private final boolean compress; private final BlobPath basePath = BlobPath.cleanPath(); - - private HdfsBlobStore blobStore; + private final URI uri; + private final String pathSetting; // buffer size passed to HDFS read/write methods // TODO: why 100KB? private static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(100, ByteSizeUnit.KB); public HdfsRepository(RepositoryMetaData metadata, Environment environment, - NamedXContentRegistry namedXContentRegistry) throws IOException { + NamedXContentRegistry namedXContentRegistry) { super(metadata, environment.settings(), namedXContentRegistry); this.environment = environment; this.chunkSize = metadata.settings().getAsBytesSize("chunk_size", null); this.compress = metadata.settings().getAsBoolean("compress", false); - } - @Override - protected void doStart() { String uriSetting = getMetadata().settings().get("uri"); if (Strings.hasText(uriSetting) == false) { throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore"); } - URI uri = URI.create(uriSetting); + uri = URI.create(uriSetting); if ("hdfs".equalsIgnoreCase(uri.getScheme()) == false) { throw new IllegalArgumentException(String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", uri.getScheme(), uriSetting)); @@ -93,16 +89,11 @@ public final class HdfsRepository extends BlobStoreRepository { "Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", uri.getPath(), uriSetting)); } - String pathSetting = getMetadata().settings().get("path"); + pathSetting = getMetadata().settings().get("path"); // get configuration if (pathSetting == null) { throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); } - - // initialize our blobstore using elevated privileges. - SpecialPermission.check(); - blobStore = AccessController.doPrivileged((PrivilegedAction) () -> createBlobstore(uri, pathSetting, getMetadata().settings())); - super.doStart(); } private HdfsBlobStore createBlobstore(URI uri, String path, Settings repositorySettings) { @@ -229,7 +220,12 @@ public final class HdfsRepository extends BlobStoreRepository { } @Override - protected BlobStore blobStore() { + protected HdfsBlobStore createBlobStore() { + // initialize our blobstore using elevated privileges. + SpecialPermission.check(); + final HdfsBlobStore blobStore = + AccessController.doPrivileged((PrivilegedAction) + () -> createBlobstore(uri, pathSetting, getMetadata().settings())); return blobStore; } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index f6f949aa4d0..ec60536f135 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -35,7 +35,6 @@ import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import java.io.IOException; import java.util.Map; import java.util.function.Function; @@ -144,30 +143,43 @@ class S3Repository extends BlobStoreRepository { */ static final Setting BASE_PATH_SETTING = Setting.simpleString("base_path"); - private final S3BlobStore blobStore; + private final S3Service service; - private final BlobPath basePath; + private final String bucket; + + private final ByteSizeValue bufferSize; private final ByteSizeValue chunkSize; private final boolean compress; + private final BlobPath basePath; + + private final boolean serverSideEncryption; + + private final String storageClass; + + private final String cannedACL; + + private final String clientName; + /** * Constructs an s3 backed repository */ S3Repository(final RepositoryMetaData metadata, final Settings settings, final NamedXContentRegistry namedXContentRegistry, - final S3Service service) throws IOException { + final S3Service service) { super(metadata, settings, namedXContentRegistry); + this.service = service; - final String bucket = BUCKET_SETTING.get(metadata.settings()); + // Parse and validate the user's S3 Storage Class setting + this.bucket = BUCKET_SETTING.get(metadata.settings()); if (bucket == null) { throw new RepositoryException(metadata.name(), "No bucket defined for s3 repository"); } - final boolean serverSideEncryption = SERVER_SIDE_ENCRYPTION_SETTING.get(metadata.settings()); - final ByteSizeValue bufferSize = BUFFER_SIZE_SETTING.get(metadata.settings()); + this.bufferSize = BUFFER_SIZE_SETTING.get(metadata.settings()); this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings()); this.compress = COMPRESS_SETTING.get(metadata.settings()); @@ -177,33 +189,44 @@ class S3Repository extends BlobStoreRepository { ") can't be lower than " + BUFFER_SIZE_SETTING.getKey() + " (" + bufferSize + ")."); } - // Parse and validate the user's S3 Storage Class setting - final String storageClass = STORAGE_CLASS_SETTING.get(metadata.settings()); - final String cannedACL = CANNED_ACL_SETTING.get(metadata.settings()); - final String clientName = CLIENT_NAME.get(metadata.settings()); - - logger.debug("using bucket [{}], chunk_size [{}], server_side_encryption [{}], " + - "buffer_size [{}], cannedACL [{}], storageClass [{}]", - bucket, chunkSize, serverSideEncryption, bufferSize, cannedACL, storageClass); - - // deprecated behavior: override client credentials from the cluster state - // (repository settings) - if (S3ClientSettings.checkDeprecatedCredentials(metadata.settings())) { - overrideCredentialsFromClusterState(service); - } - blobStore = new S3BlobStore(settings, service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); - final String basePath = BASE_PATH_SETTING.get(metadata.settings()); if (Strings.hasLength(basePath)) { this.basePath = new BlobPath().add(basePath); } else { this.basePath = BlobPath.cleanPath(); } + + this.serverSideEncryption = SERVER_SIDE_ENCRYPTION_SETTING.get(metadata.settings()); + + this.storageClass = STORAGE_CLASS_SETTING.get(metadata.settings()); + this.cannedACL = CANNED_ACL_SETTING.get(metadata.settings()); + this.clientName = CLIENT_NAME.get(metadata.settings()); + + logger.debug("using bucket [{}], chunk_size [{}], server_side_encryption [{}], " + + "buffer_size [{}], cannedACL [{}], storageClass [{}]", + bucket, chunkSize, serverSideEncryption, bufferSize, cannedACL, storageClass); + + // (repository settings) + if (S3ClientSettings.checkDeprecatedCredentials(metadata.settings())) { + overrideCredentialsFromClusterState(service); + } } + @Override + protected S3BlobStore createBlobStore() { + return new S3BlobStore(settings, service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); + } + + // only use for testing @Override protected BlobStore blobStore() { - return blobStore; + return super.blobStore(); + } + + // only use for testing + @Override + protected BlobStore getBlobStore() { + return super.getBlobStore(); } @Override diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index 79a5187059f..da3219f2aef 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -61,7 +61,7 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, Relo }); } - private final S3Service service; + protected final S3Service service; public S3RepositoryPlugin(final Settings settings) { this(settings, new S3Service(settings)); @@ -77,7 +77,7 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, Relo // proxy method for testing protected S3Repository createRepository(final RepositoryMetaData metadata, final Settings settings, - final NamedXContentRegistry registry) throws IOException { + final NamedXContentRegistry registry) { return new S3Repository(metadata, settings, registry, service); } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index 744a27dc48e..7eb603b4b78 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -80,6 +80,16 @@ public class RepositoryCredentialsTests extends ESTestCase { ProxyS3RepositoryPlugin(Settings settings) { super(settings, new ProxyS3Service(settings)); } + + @Override + protected S3Repository createRepository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry registry) { + return new S3Repository(metadata, settings, registry, service){ + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we create repo manually on test/main threads + } + }; + } } public void testRepositoryCredentialsOverrideSecureCredentials() throws IOException { @@ -102,8 +112,8 @@ public class RepositoryCredentialsTests extends ESTestCase { .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "insecure_aws_key") .put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret").build()); try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings); - S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); - AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + S3Repository s3repo = createAndStartRepository(metadata, s3Plugin); + AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); @@ -125,8 +135,8 @@ public class RepositoryCredentialsTests extends ESTestCase { .put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret") .build()); try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(Settings.EMPTY); - S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); - AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + S3Repository s3repo = createAndStartRepository(metadata, s3Plugin); + AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); @@ -140,6 +150,12 @@ public class RepositoryCredentialsTests extends ESTestCase { + " See the breaking changes documentation for the next major version."); } + private S3Repository createAndStartRepository(RepositoryMetaData metadata, S3RepositoryPlugin s3Plugin) { + final S3Repository repository = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); + repository.start(); + return repository; + } + public void testReinitSecureCredentials() throws IOException { final String clientName = randomFrom("default", "some_client"); // initial client node settings @@ -156,7 +172,7 @@ public class RepositoryCredentialsTests extends ESTestCase { } final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", builder.build()); try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings); - S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY)) { + S3Repository s3repo = createAndStartRepository(metadata, s3Plugin)) { try (AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials .getCredentials(); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index b061e8e45ed..51fc48dfb59 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -51,6 +51,7 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; @@ -84,8 +85,11 @@ public class S3BlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCa } @Override - protected void createTestRepository(final String name) { - assertAcked(client().admin().cluster().preparePutRepository(name).setType(S3Repository.TYPE).setSettings(Settings.builder() + protected void createTestRepository(final String name, boolean verify) { + assertAcked(client().admin().cluster().preparePutRepository(name) + .setType(S3Repository.TYPE) + .setVerify(verify) + .setSettings(Settings.builder() .put(S3Repository.BUCKET_SETTING.getKey(), bucket) .put(S3Repository.CLIENT_NAME.getKey(), client) .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), bufferSize) @@ -96,6 +100,11 @@ public class S3BlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCa .put(S3Repository.SECRET_KEY_SETTING.getKey(), "not_used_but_this_is_a_secret"))); } + @Override + protected void afterCreationCheck(Repository repository) { + assertThat(repository, instanceOf(S3Repository.class)); + } + @Override protected Collection> nodePlugins() { return Collections.singletonList(TestS3RepositoryPlugin.class); @@ -125,7 +134,7 @@ public class S3BlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCa public void testInsecureRepositoryCredentials() throws Exception { final String repositoryName = "testInsecureRepositoryCredentials"; - createTestRepository(repositoryName); + createAndCheckTestRepository(repositoryName); final NodeClient nodeClient = internalCluster().getInstance(NodeClient.class); final RestGetRepositoriesAction getRepoAction = new RestGetRepositoriesAction(Settings.EMPTY, mock(RestController.class), internalCluster().getInstance(SettingsFilter.class)); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 14f53ae5d33..dcc46661bef 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -29,11 +29,13 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import java.io.IOException; import java.util.Collections; import java.util.Map; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; public class S3RepositoryTests extends ESTestCase { @@ -70,27 +72,27 @@ public class S3RepositoryTests extends ESTestCase { } } - public void testInvalidChunkBufferSizeSettings() throws IOException { + public void testInvalidChunkBufferSizeSettings() { // chunk < buffer should fail final Settings s1 = bufferAndChunkSettings(10, 5); final Exception e1 = expectThrows(RepositoryException.class, - () -> new S3Repository(getRepositoryMetaData(s1), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())); + () -> createS3Repo(getRepositoryMetaData(s1))); assertThat(e1.getMessage(), containsString("chunk_size (5mb) can't be lower than buffer_size (10mb)")); // chunk > buffer should pass final Settings s2 = bufferAndChunkSettings(5, 10); - new S3Repository(getRepositoryMetaData(s2), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()).close(); + createS3Repo(getRepositoryMetaData(s2)).close(); // chunk = buffer should pass final Settings s3 = bufferAndChunkSettings(5, 5); - new S3Repository(getRepositoryMetaData(s3), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()).close(); + createS3Repo(getRepositoryMetaData(s3)).close(); // buffer < 5mb should fail final Settings s4 = bufferAndChunkSettings(4, 10); final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, - () -> new S3Repository(getRepositoryMetaData(s4), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()) + () -> createS3Repo(getRepositoryMetaData(s4)) .close()); assertThat(e2.getMessage(), containsString("failed to parse value [4mb] for setting [buffer_size], must be >= [5mb]")); final Settings s5 = bufferAndChunkSettings(5, 6000000); final IllegalArgumentException e3 = expectThrows(IllegalArgumentException.class, - () -> new S3Repository(getRepositoryMetaData(s5), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()) + () -> createS3Repo(getRepositoryMetaData(s5)) .close()); assertThat(e3.getMessage(), containsString("failed to parse value [6000000mb] for setting [chunk_size], must be <= [5tb]")); } @@ -106,20 +108,32 @@ public class S3RepositoryTests extends ESTestCase { return new RepositoryMetaData("dummy-repo", "mock", Settings.builder().put(settings).build()); } - public void testBasePathSetting() throws IOException { + public void testBasePathSetting() { final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() .put(S3Repository.BASE_PATH_SETTING.getKey(), "foo/bar").build()); - try (S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())) { + try (S3Repository s3repo = createS3Repo(metadata)) { assertEquals("foo/bar/", s3repo.basePath().buildAsString()); } } - public void testDefaultBufferSize() throws IOException { + public void testDefaultBufferSize() { final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.EMPTY); - try (S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())) { - final long defaultBufferSize = ((S3BlobStore) s3repo.blobStore()).bufferSizeInBytes(); + try (S3Repository s3repo = createS3Repo(metadata)) { + assertThat(s3repo.getBlobStore(), is(nullValue())); + s3repo.start(); + final long defaultBufferSize = ((S3BlobStore)s3repo.blobStore()).bufferSizeInBytes(); + assertThat(s3repo.getBlobStore(), not(nullValue())); assertThat(defaultBufferSize, Matchers.lessThanOrEqualTo(100L * 1024 * 1024)); assertThat(defaultBufferSize, Matchers.greaterThanOrEqualTo(5L * 1024 * 1024)); } } + + private S3Repository createS3Repo(RepositoryMetaData metadata) { + return new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()) { + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we create repo manually on test/main threads + } + }; + } } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index d5b2a6413e9..c6cbaa50cdf 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.snapshots.SnapshotsService; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -58,16 +59,20 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta private final ClusterService clusterService; + private final ThreadPool threadPool; + private final VerifyNodeRepositoryAction verifyAction; private volatile Map repositories = Collections.emptyMap(); @Inject public RepositoriesService(Settings settings, ClusterService clusterService, TransportService transportService, - Map typesRegistry) { + Map typesRegistry, + ThreadPool threadPool) { super(settings); this.typesRegistry = typesRegistry; this.clusterService = clusterService; + this.threadPool = threadPool; // Doesn't make sense to maintain repositories on non-master and non-data nodes // Nothing happens there anyway if (DiscoveryNode.isDataNode(settings) || DiscoveryNode.isMasterNode(settings)) { @@ -208,39 +213,51 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta public void verifyRepository(final String repositoryName, final ActionListener listener) { final Repository repository = repository(repositoryName); try { - final String verificationToken = repository.startVerification(); - if (verificationToken != null) { + threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> { try { - verifyAction.verify(repositoryName, verificationToken, new ActionListener() { - @Override - public void onResponse(VerifyResponse verifyResponse) { - try { - repository.endVerification(verificationToken); - } catch (Exception e) { - logger.warn(() -> new ParameterizedMessage("[{}] failed to finish repository verification", repositoryName), e); - listener.onFailure(e); - return; - } - listener.onResponse(verifyResponse); - } + final String verificationToken = repository.startVerification(); + if (verificationToken != null) { + try { + verifyAction.verify(repositoryName, verificationToken, new ActionListener() { + @Override + public void onResponse(VerifyResponse verifyResponse) { + threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> { + try { + repository.endVerification(verificationToken); + } catch (Exception e) { + logger.warn(() -> new ParameterizedMessage( + "[{}] failed to finish repository verification", repositoryName), e); + listener.onFailure(e); + return; + } + listener.onResponse(verifyResponse); + }); + } - @Override - public void onFailure(Exception e) { - listener.onFailure(e); + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } catch (Exception e) { + threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> { + try { + repository.endVerification(verificationToken); + } catch (Exception inner) { + inner.addSuppressed(e); + logger.warn(() -> new ParameterizedMessage( + "[{}] failed to finish repository verification", repositoryName), inner); + } + listener.onFailure(e); + }); } - }); - } catch (Exception e) { - try { - repository.endVerification(verificationToken); - } catch (Exception inner) { - inner.addSuppressed(e); - logger.warn(() -> new ParameterizedMessage("[{}] failed to finish repository verification", repositoryName), inner); + } else { + listener.onResponse(new VerifyResponse(new DiscoveryNode[0], new VerificationFailure[0])); } + } catch (Exception e) { listener.onFailure(e); } - } else { - listener.onResponse(new VerifyResponse(new DiscoveryNode[0], new VerificationFailure[0])); - } + }); } catch (Exception e) { listener.onFailure(e); } diff --git a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index 380ae974080..fbaf369912e 100644 --- a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -61,7 +61,7 @@ public class VerifyNodeRepositoryAction extends AbstractComponent { this.transportService = transportService; this.clusterService = clusterService; this.repositoriesService = repositoriesService; - transportService.registerRequestHandler(ACTION_NAME, VerifyNodeRepositoryRequest::new, ThreadPool.Names.SAME, new VerifyNodeRepositoryRequestHandler()); + transportService.registerRequestHandler(ACTION_NAME, VerifyNodeRepositoryRequest::new, ThreadPool.Names.SNAPSHOT, new VerifyNodeRepositoryRequestHandler()); } public void verify(String repository, String verificationToken, final ActionListener listener) { diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 86131fe468d..22743e38839 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -34,6 +34,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RateLimiter; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; @@ -102,6 +103,7 @@ import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotMissingException; import org.elasticsearch.snapshots.SnapshotShardFailure; +import org.elasticsearch.threadpool.ThreadPool; import java.io.FilterInputStream; import java.io.IOException; @@ -126,8 +128,8 @@ import static org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSna /** * BlobStore - based implementation of Snapshot Repository *

- * This repository works with any {@link BlobStore} implementation. The blobStore should be initialized in the derived - * class before {@link #doStart()} is called. + * This repository works with any {@link BlobStore} implementation. The blobStore could be (and preferred) lazy initialized in + * {@link #createBlobStore()}. *

* BlobStoreRepository maintains the following structure in the blob store *

@@ -169,8 +171,6 @@ import static org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSna
  */
 public abstract class BlobStoreRepository extends AbstractLifecycleComponent implements Repository {
 
-    private BlobContainer snapshotsBlobContainer;
-
     protected final RepositoryMetaData metadata;
 
     protected final NamedXContentRegistry namedXContentRegistry;
@@ -225,6 +225,12 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
 
     private final ChecksumBlobStoreFormat indexShardSnapshotsFormat;
 
+    private final Object lock = new Object();
+
+    private final SetOnce blobContainer = new SetOnce<>();
+
+    private final SetOnce blobStore = new SetOnce<>();
+
     /**
      * Constructs new BlobStoreRepository
      *
@@ -251,7 +257,6 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
 
     @Override
     protected void doStart() {
-        this.snapshotsBlobContainer = blobStore().blobContainer(basePath());
         globalMetaDataFormat = new ChecksumBlobStoreFormat<>(METADATA_CODEC, METADATA_NAME_FORMAT,
             MetaData::fromXContent, namedXContentRegistry, isCompress());
         indexMetaDataFormat = new ChecksumBlobStoreFormat<>(INDEX_METADATA_CODEC, METADATA_NAME_FORMAT,
@@ -265,17 +270,82 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
 
     @Override
     protected void doClose() {
-        try {
-            blobStore().close();
-        } catch (Exception t) {
-            logger.warn("cannot close blob store", t);
+        BlobStore store;
+        // to close blobStore if blobStore initialization is started during close
+        synchronized (lock) {
+            store = blobStore.get();
+        }
+        if (store != null) {
+            try {
+                store.close();
+            } catch (Exception t) {
+                logger.warn("cannot close blob store", t);
+            }
         }
     }
 
+    // package private, only use for testing
+    BlobContainer getBlobContainer() {
+        return blobContainer.get();
+    }
+
+    // for test purposes only
+    protected BlobStore getBlobStore() {
+        return blobStore.get();
+    }
+
     /**
-     * Returns the BlobStore to read and write data.
+     * maintains single lazy instance of {@link BlobContainer}
      */
-    protected abstract BlobStore blobStore();
+    protected BlobContainer blobContainer() {
+        assertSnapshotOrGenericThread();
+
+        BlobContainer blobContainer = this.blobContainer.get();
+        if (blobContainer == null) {
+           synchronized (lock) {
+               blobContainer = this.blobContainer.get();
+               if (blobContainer == null) {
+                   blobContainer = blobStore().blobContainer(basePath());
+                   this.blobContainer.set(blobContainer);
+               }
+           }
+        }
+
+        return blobContainer;
+    }
+
+    /**
+     * maintains single lazy instance of {@link BlobStore}
+     */
+    protected BlobStore blobStore() {
+        assertSnapshotOrGenericThread();
+
+        BlobStore store = blobStore.get();
+        if (store == null) {
+            synchronized (lock) {
+                store = blobStore.get();
+                if (store == null) {
+                    if (lifecycle.started() == false) {
+                        throw new RepositoryException(metadata.name(), "repository is not in started state");
+                    }
+                    try {
+                        store = createBlobStore();
+                    } catch (RepositoryException e) {
+                        throw e;
+                    } catch (Exception e) {
+                        throw new RepositoryException(metadata.name(), "cannot create blob store" , e);
+                    }
+                    blobStore.set(store);
+                }
+            }
+        }
+        return store;
+    }
+
+    /**
+     * Creates new BlobStore to read and write data.
+     */
+    protected abstract BlobStore createBlobStore() throws Exception;
 
     /**
      * Returns base path of the repository
@@ -319,12 +389,12 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
             if (repositoryData.getAllSnapshotIds().stream().anyMatch(s -> s.getName().equals(snapshotName))) {
                 throw new InvalidSnapshotNameException(metadata.name(), snapshotId.getName(), "snapshot with the same name already exists");
             }
-            if (snapshotFormat.exists(snapshotsBlobContainer, snapshotId.getUUID())) {
+            if (snapshotFormat.exists(blobContainer(), snapshotId.getUUID())) {
                 throw new InvalidSnapshotNameException(metadata.name(), snapshotId.getName(), "snapshot with the same name already exists");
             }
 
             // Write Global MetaData
-            globalMetaDataFormat.write(clusterMetaData, snapshotsBlobContainer, snapshotId.getUUID());
+            globalMetaDataFormat.write(clusterMetaData, blobContainer(), snapshotId.getUUID());
 
             // write the index metadata for each index in the snapshot
             for (IndexId index : indices) {
@@ -421,7 +491,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
 
     private void deleteSnapshotBlobIgnoringErrors(final SnapshotInfo snapshotInfo, final String blobId) {
         try {
-            snapshotFormat.delete(snapshotsBlobContainer, blobId);
+            snapshotFormat.delete(blobContainer(), blobId);
         } catch (IOException e) {
             if (snapshotInfo != null) {
                 logger.warn(() -> new ParameterizedMessage("[{}] Unable to delete snapshot file [{}]",
@@ -434,7 +504,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
 
     private void deleteGlobalMetaDataBlobIgnoringErrors(final SnapshotInfo snapshotInfo, final String blobId) {
         try {
-            globalMetaDataFormat.delete(snapshotsBlobContainer, blobId);
+            globalMetaDataFormat.delete(blobContainer(), blobId);
         } catch (IOException e) {
             if (snapshotInfo != null) {
                 logger.warn(() -> new ParameterizedMessage("[{}] Unable to delete global metadata file [{}]",
@@ -472,7 +542,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
             startTime, failure, System.currentTimeMillis(), totalShards, shardFailures,
             includeGlobalState);
         try {
-            snapshotFormat.write(blobStoreSnapshot, snapshotsBlobContainer, snapshotId.getUUID());
+            snapshotFormat.write(blobStoreSnapshot, blobContainer(), snapshotId.getUUID());
             final RepositoryData repositoryData = getRepositoryData();
             writeIndexGen(repositoryData.addSnapshot(snapshotId, blobStoreSnapshot.state(), indices), repositoryStateId);
         } catch (FileAlreadyExistsException ex) {
@@ -490,7 +560,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
     @Override
     public SnapshotInfo getSnapshotInfo(final SnapshotId snapshotId) {
         try {
-            return snapshotFormat.read(snapshotsBlobContainer, snapshotId.getUUID());
+            return snapshotFormat.read(blobContainer(), snapshotId.getUUID());
         } catch (NoSuchFileException ex) {
             throw new SnapshotMissingException(metadata.name(), snapshotId, ex);
         } catch (IOException | NotXContentException ex) {
@@ -501,7 +571,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
     @Override
     public MetaData getSnapshotGlobalMetaData(final SnapshotId snapshotId) {
         try {
-            return globalMetaDataFormat.read(snapshotsBlobContainer, snapshotId.getUUID());
+            return globalMetaDataFormat.read(blobContainer(), snapshotId.getUUID());
         } catch (NoSuchFileException ex) {
             throw new SnapshotMissingException(metadata.name(), snapshotId, ex);
         } catch (IOException ex) {
@@ -543,11 +613,21 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
         return restoreRateLimitingTimeInNanos.count();
     }
 
+    protected void assertSnapshotOrGenericThread() {
+        assert Thread.currentThread().getName().contains(ThreadPool.Names.SNAPSHOT)
+            || Thread.currentThread().getName().contains(ThreadPool.Names.GENERIC) :
+            "Expected current thread [" + Thread.currentThread() + "] to be the snapshot or generic thread.";
+    }
+
     @Override
     public String startVerification() {
         try {
             if (isReadOnly()) {
-                // It's readonly - so there is not much we can do here to verify it
+                // TODO: add repository verification for read-only repositories
+
+                // It's readonly - so there is not much we can do here to verify it apart try to create blobStore()
+                // and check that is is accessible on the master
+                blobStore();
                 return null;
             } else {
                 String seed = UUIDs.randomBase64UUID();
@@ -584,7 +664,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
             final String snapshotsIndexBlobName = INDEX_FILE_PREFIX + Long.toString(indexGen);
 
             RepositoryData repositoryData;
-            try (InputStream blob = snapshotsBlobContainer.readBlob(snapshotsIndexBlobName)) {
+            try (InputStream blob = blobContainer().readBlob(snapshotsIndexBlobName)) {
                 BytesStreamOutput out = new BytesStreamOutput();
                 Streams.copy(blob, out);
                 // EMPTY is safe here because RepositoryData#fromXContent calls namedObject
@@ -598,7 +678,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
             }
 
             // now load the incompatible snapshot ids, if they exist
-            try (InputStream blob = snapshotsBlobContainer.readBlob(INCOMPATIBLE_SNAPSHOTS_BLOB)) {
+            try (InputStream blob = blobContainer().readBlob(INCOMPATIBLE_SNAPSHOTS_BLOB)) {
                 BytesStreamOutput out = new BytesStreamOutput();
                 Streams.copy(blob, out);
                 try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
@@ -636,11 +716,6 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
         return readOnly;
     }
 
-    // package private, only use for testing
-    BlobContainer blobContainer() {
-        return snapshotsBlobContainer;
-    }
-
     protected void writeIndexGen(final RepositoryData repositoryData, final long repositoryStateId) throws IOException {
         assert isReadOnly() == false; // can not write to a read only repository
         final long currentGen = latestIndexBlobId();
@@ -668,7 +743,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
         // delete the N-2 index file if it exists, keep the previous one around as a backup
         if (isReadOnly() == false && newGen - 2 >= 0) {
             final String oldSnapshotIndexFile = INDEX_FILE_PREFIX + Long.toString(newGen - 2);
-            snapshotsBlobContainer.deleteBlobIgnoringIfNotExists(oldSnapshotIndexFile);
+            blobContainer().deleteBlobIgnoringIfNotExists(oldSnapshotIndexFile);
         }
 
         // write the current generation to the index-latest file
@@ -736,7 +811,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
 
     // package private for testing
     long readSnapshotIndexLatestBlob() throws IOException {
-        try (InputStream blob = snapshotsBlobContainer.readBlob(INDEX_LATEST_BLOB)) {
+        try (InputStream blob = blobContainer().readBlob(INDEX_LATEST_BLOB)) {
             BytesStreamOutput out = new BytesStreamOutput();
             Streams.copy(blob, out);
             return Numbers.bytesToLong(out.bytes().toBytesRef());
@@ -744,7 +819,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
     }
 
     private long listBlobsToGetLatestIndexId() throws IOException {
-        Map blobs = snapshotsBlobContainer.listBlobsByPrefix(INDEX_FILE_PREFIX);
+        Map blobs = blobContainer().listBlobsByPrefix(INDEX_FILE_PREFIX);
         long latest = RepositoryData.EMPTY_REPO_GEN;
         if (blobs.isEmpty()) {
             // no snapshot index blobs have been written yet
@@ -766,7 +841,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
 
     private void writeAtomic(final String blobName, final BytesReference bytesRef, boolean failIfAlreadyExists) throws IOException {
         try (InputStream stream = bytesRef.streamInput()) {
-            snapshotsBlobContainer.writeBlobAtomic(blobName, stream, bytesRef.length(), failIfAlreadyExists);
+            blobContainer().writeBlobAtomic(blobName, stream, bytesRef.length(), failIfAlreadyExists);
         }
     }
 
@@ -806,6 +881,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
 
     @Override
     public void verify(String seed, DiscoveryNode localNode) {
+        assertSnapshotOrGenericThread();
         BlobContainer testBlobContainer = blobStore().blobContainer(basePath().add(testBlobPrefix(seed)));
         if (testBlobContainer.blobExists("master.dat")) {
             try  {
diff --git a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java
index 4d4ab60feef..643ff2bc93d 100644
--- a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java
+++ b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java
@@ -31,7 +31,6 @@ import org.elasticsearch.env.Environment;
 import org.elasticsearch.repositories.RepositoryException;
 import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
 
-import java.io.IOException;
 import java.nio.file.Path;
 import java.util.function.Function;
 
@@ -61,8 +60,7 @@ public class FsRepository extends BlobStoreRepository {
     public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, Property.NodeScope);
     public static final Setting REPOSITORIES_COMPRESS_SETTING =
         Setting.boolSetting("repositories.fs.compress", false, Property.NodeScope);
-
-    private final FsBlobStore blobStore;
+    private final Environment environment;
 
     private ByteSizeValue chunkSize;
 
@@ -74,37 +72,45 @@ public class FsRepository extends BlobStoreRepository {
      * Constructs a shared file system repository.
      */
     public FsRepository(RepositoryMetaData metadata, Environment environment,
-                        NamedXContentRegistry namedXContentRegistry) throws IOException {
+                        NamedXContentRegistry namedXContentRegistry) {
         super(metadata, environment.settings(), namedXContentRegistry);
+        this.environment = environment;
         String location = REPOSITORIES_LOCATION_SETTING.get(metadata.settings());
         if (location.isEmpty()) {
-            logger.warn("the repository location is missing, it should point to a shared file system location that is available on all master and data nodes");
+            logger.warn("the repository location is missing, it should point to a shared file system location"
+                + " that is available on all master and data nodes");
             throw new RepositoryException(metadata.name(), "missing location");
         }
         Path locationFile = environment.resolveRepoFile(location);
         if (locationFile == null) {
             if (environment.repoFiles().length > 0) {
-                logger.warn("The specified location [{}] doesn't start with any repository paths specified by the path.repo setting: [{}] ", location, environment.repoFiles());
-                throw new RepositoryException(metadata.name(), "location [" + location + "] doesn't match any of the locations specified by path.repo");
+                logger.warn("The specified location [{}] doesn't start with any "
+                    + "repository paths specified by the path.repo setting: [{}] ", location, environment.repoFiles());
+                throw new RepositoryException(metadata.name(), "location [" + location
+                    + "] doesn't match any of the locations specified by path.repo");
             } else {
-                logger.warn("The specified location [{}] should start with a repository path specified by the path.repo setting, but the path.repo setting was not set on this node", location);
-                throw new RepositoryException(metadata.name(), "location [" + location + "] doesn't match any of the locations specified by path.repo because this setting is empty");
+                logger.warn("The specified location [{}] should start with a repository path specified by"
+                    + " the path.repo setting, but the path.repo setting was not set on this node", location);
+                throw new RepositoryException(metadata.name(), "location [" + location
+                    + "] doesn't match any of the locations specified by path.repo because this setting is empty");
             }
         }
 
-        blobStore = new FsBlobStore(settings, locationFile);
         if (CHUNK_SIZE_SETTING.exists(metadata.settings())) {
             this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings());
         } else {
             this.chunkSize = REPOSITORIES_CHUNK_SIZE_SETTING.get(settings);
         }
-        this.compress = COMPRESS_SETTING.exists(metadata.settings()) ? COMPRESS_SETTING.get(metadata.settings()) : REPOSITORIES_COMPRESS_SETTING.get(settings);
+        this.compress = COMPRESS_SETTING.exists(metadata.settings())
+            ? COMPRESS_SETTING.get(metadata.settings()) : REPOSITORIES_COMPRESS_SETTING.get(settings);
         this.basePath = BlobPath.cleanPath();
     }
 
     @Override
-    protected BlobStore blobStore() {
-        return blobStore;
+    protected BlobStore createBlobStore() throws Exception {
+        final String location = REPOSITORIES_LOCATION_SETTING.get(metadata.settings());
+        final Path locationFile = environment.resolveRepoFile(location);
+        return new FsBlobStore(settings, locationFile);
     }
 
     @Override
diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java
index cc971ed1b04..3a5302bcec2 100644
--- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java
+++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java
@@ -406,7 +406,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice
             Collections.emptySet());
         final ClusterService clusterService = mock(ClusterService.class);
         final RepositoriesService repositoriesService = new RepositoriesService(settings, clusterService,
-            transportService, null);
+            transportService, null, threadPool);
         final PeerRecoveryTargetService recoveryTargetService = new PeerRecoveryTargetService(settings, threadPool,
             transportService, null, clusterService);
         final ShardStateAction shardStateAction = mock(ShardStateAction.class);
diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java
index 7a1d3a89420..0eae9a14200 100644
--- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java
+++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java
@@ -173,10 +173,17 @@ public class BlobStoreRepositoryRestoreTests extends IndexShardTestCase {
     }
 
     /** Create a {@link Repository} with a random name **/
-    private Repository createRepository() throws IOException {
+    private Repository createRepository() {
         Settings settings = Settings.builder().put("location", randomAlphaOfLength(10)).build();
         RepositoryMetaData repositoryMetaData = new RepositoryMetaData(randomAlphaOfLength(10), FsRepository.TYPE, settings);
-        return new FsRepository(repositoryMetaData, createEnvironment(), xContentRegistry());
+        final FsRepository repository = new FsRepository(repositoryMetaData, createEnvironment(), xContentRegistry()) {
+            @Override
+            protected void assertSnapshotOrGenericThread() {
+                // eliminate thread name check as we create repo manually
+            }
+        };
+        repository.start();
+        return repository;
     }
 
     /** Create a {@link Environment} with random path.home and path.repo **/
diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java
index 7e4d5cc54a9..1abdb97f174 100644
--- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java
+++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java
@@ -24,10 +24,16 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRes
 import org.elasticsearch.client.Client;
 import org.elasticsearch.common.UUIDs;
 import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.plugins.RepositoryPlugin;
 import org.elasticsearch.repositories.IndexId;
 import org.elasticsearch.repositories.RepositoriesService;
+import org.elasticsearch.repositories.Repository;
 import org.elasticsearch.repositories.RepositoryData;
 import org.elasticsearch.repositories.RepositoryException;
+import org.elasticsearch.repositories.fs.FsRepository;
 import org.elasticsearch.snapshots.SnapshotId;
 import org.elasticsearch.snapshots.SnapshotState;
 import org.elasticsearch.test.ESIntegTestCase;
@@ -37,18 +43,42 @@ import java.io.IOException;
 import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import java.util.stream.Collectors;
 
 import static org.elasticsearch.repositories.RepositoryDataTests.generateRandomRepoData;
 import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
 
 /**
  * Tests for the {@link BlobStoreRepository} and its subclasses.
  */
 public class BlobStoreRepositoryTests extends ESSingleNodeTestCase {
 
+    static final String REPO_TYPE = "fsLike";
+
+    protected Collection> getPlugins() {
+        return Arrays.asList(FsLikeRepoPlugin.class);
+    }
+
+    // the reason for this plug-in is to drop any assertSnapshotOrGenericThread as mostly all access in this test goes from test threads
+    public static class FsLikeRepoPlugin extends org.elasticsearch.plugins.Plugin implements RepositoryPlugin {
+
+        @Override
+        public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) {
+            return Collections.singletonMap(REPO_TYPE,
+                (metadata) -> new FsRepository(metadata, env, namedXContentRegistry) {
+                    @Override
+                    protected void assertSnapshotOrGenericThread() {
+                        // eliminate thread name check as we access blobStore on test/main threads
+                    }
+                });
+        }
+    }
+
     public void testRetrieveSnapshots() throws Exception {
         final Client client = client();
         final Path location = ESIntegTestCase.randomRepoPath(node().settings());
@@ -57,7 +87,7 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase {
         logger.info("-->  creating repository");
         PutRepositoryResponse putRepositoryResponse =
             client.admin().cluster().preparePutRepository(repositoryName)
-                                    .setType("fs")
+                                    .setType(REPO_TYPE)
                                     .setSettings(Settings.builder().put(node().settings()).put("location", location))
                                     .get();
         assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
@@ -209,7 +239,7 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase {
 
         PutRepositoryResponse putRepositoryResponse =
             client.admin().cluster().preparePutRepository(repositoryName)
-                                    .setType("fs")
+                                    .setType(REPO_TYPE)
                                     .setSettings(Settings.builder().put(node().settings()).put("location", location))
                                     .get();
         assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
@@ -217,6 +247,7 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase {
         final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class);
         @SuppressWarnings("unchecked") final BlobStoreRepository repository =
             (BlobStoreRepository) repositoriesService.repository(repositoryName);
+        assertThat("getBlobContainer has to be lazy initialized", repository.getBlobContainer(), nullValue());
         return repository;
     }
 
diff --git a/server/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java b/server/src/test/java/org/elasticsearch/repositories/fs/FsBlobStoreRepositoryIT.java
similarity index 79%
rename from server/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java
rename to server/src/test/java/org/elasticsearch/repositories/fs/FsBlobStoreRepositoryIT.java
index 792b1bdbddd..1ed42cb2474 100644
--- a/server/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java
+++ b/server/src/test/java/org/elasticsearch/repositories/fs/FsBlobStoreRepositoryIT.java
@@ -16,22 +16,29 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.elasticsearch.snapshots;
+package org.elasticsearch.repositories.fs;
 
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.unit.ByteSizeUnit;
+import org.elasticsearch.repositories.Repository;
 import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase;
 
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.hamcrest.Matchers.instanceOf;
 
 public class FsBlobStoreRepositoryIT extends ESBlobStoreRepositoryIntegTestCase {
     @Override
-    protected void createTestRepository(String name) {
+    protected void createTestRepository(String name, boolean verify) {
         assertAcked(client().admin().cluster().preparePutRepository(name)
+            .setVerify(verify)
             .setType("fs").setSettings(Settings.builder()
                 .put("location", randomRepoPath())
                 .put("compress", randomBoolean())
                 .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES)));
+    }
 
+    @Override
+    protected void afterCreationCheck(Repository repository) {
+        assertThat(repository, instanceOf(FsRepository.class));
     }
 }
diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
index c9ca1637b1a..d2954a4c128 100644
--- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
+++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
@@ -19,6 +19,7 @@
 
 package org.elasticsearch.snapshots;
 
+import org.apache.lucene.util.SetOnce;
 import org.elasticsearch.ExceptionsHelper;
 import org.elasticsearch.Version;
 import org.elasticsearch.action.ActionFuture;
@@ -93,6 +94,7 @@ import org.elasticsearch.script.MockScriptEngine;
 import org.elasticsearch.script.StoredScriptsIT;
 import org.elasticsearch.snapshots.mockstore.MockRepository;
 import org.elasticsearch.test.junit.annotations.TestLogging;
+import org.elasticsearch.threadpool.ThreadPool;
 
 import java.io.IOException;
 import java.nio.channels.SeekableByteChannel;
@@ -1262,7 +1264,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
         RepositoriesService service = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName());
         Repository repository = service.repository("test-repo");
 
-        final Map indexIds = repository.getRepositoryData().getIndices();
+        final Map indexIds = getRepositoryData(repository).getIndices();
         final Path indicesPath = repo.resolve("indices");
 
         logger.info("--> delete index metadata and shard metadata");
@@ -1739,6 +1741,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
 
         logger.info("--> trying to create a repository with different name");
         assertAcked(client.admin().cluster().preparePutRepository("test-repo-2")
+                .setVerify(false) // do not do verification itself as snapshot threads could be fully blocked
                 .setType("fs").setSettings(Settings.builder().put("location", repositoryLocation.resolve("test"))));
 
         logger.info("--> unblocking blocked node");
@@ -2563,7 +2566,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
 
         logger.info("--> emulate an orphan snapshot");
         RepositoriesService repositoriesService = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName());
-        final RepositoryData repositoryData = repositoriesService.repository(repositoryName).getRepositoryData();
+        final RepositoryData repositoryData = getRepositoryData(repositoriesService.repository(repositoryName));
         final IndexId indexId = repositoryData.resolveIndexId(idxName);
 
         clusterService.submitStateUpdateTask("orphan snapshot test", new ClusterStateUpdateTask() {
@@ -2784,7 +2787,8 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
         RepositoriesService service = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName());
         Repository repository = service.repository("test-repo");
 
-        final Map indexIds = repository.getRepositoryData().getIndices();
+        final RepositoryData repositoryData = getRepositoryData(repository);
+        final Map indexIds = repositoryData.getIndices();
         assertThat(indexIds.size(), equalTo(nbIndices));
 
         // Choose a random index from the snapshot
@@ -3445,6 +3449,19 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
         }
     }
 
+    private RepositoryData getRepositoryData(Repository repository) throws InterruptedException {
+        ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, internalCluster().getMasterName());
+        final SetOnce repositoryData = new SetOnce<>();
+        final CountDownLatch latch = new CountDownLatch(1);
+        threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> {
+            repositoryData.set(repository.getRepositoryData());
+            latch.countDown();
+        });
+
+        latch.await();
+        return repositoryData.get();
+    }
+
     private void verifySnapshotInfo(final GetSnapshotsResponse response, final Map> indicesPerSnapshot) {
         for (SnapshotInfo snapshotInfo : response.getSnapshots()) {
             final List expected = snapshotInfo.indices();
diff --git a/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java
index d05a10905d8..75a86831bc5 100644
--- a/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java
+++ b/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java
@@ -92,8 +92,6 @@ public class MockRepository extends FsRepository {
 
     private final long waitAfterUnblock;
 
-    private final MockBlobStore mockBlobStore;
-
     private final String randomPrefix;
 
     private volatile boolean blockOnInitialization;
@@ -128,7 +126,6 @@ public class MockRepository extends FsRepository {
         waitAfterUnblock = metadata.settings().getAsLong("wait_after_unblock", 0L);
         allowAtomicOperations = metadata.settings().getAsBoolean("allow_atomic_operations", true);
         logger.info("starting mock repository with random prefix {}", randomPrefix);
-        mockBlobStore = new MockBlobStore(super.blobStore());
     }
 
     @Override
@@ -163,8 +160,8 @@ public class MockRepository extends FsRepository {
     }
 
     @Override
-    protected BlobStore blobStore() {
-        return mockBlobStore;
+    protected BlobStore createBlobStore() throws Exception {
+        return new MockBlobStore(super.createBlobStore());
     }
 
     public synchronized void unblock() {
@@ -195,7 +192,7 @@ public class MockRepository extends FsRepository {
     }
 
     private synchronized boolean blockExecution() {
-        logger.debug("Blocking execution");
+        logger.debug("[{}] Blocking execution", metadata.name());
         boolean wasBlocked = false;
         try {
             while (blockOnDataFiles || blockOnControlFiles || blockOnInitialization || blockOnWriteIndexFile ||
@@ -207,7 +204,7 @@ public class MockRepository extends FsRepository {
         } catch (InterruptedException ex) {
             Thread.currentThread().interrupt();
         }
-        logger.debug("Unblocking execution");
+        logger.debug("[{}] Unblocking execution", metadata.name());
         return wasBlocked;
     }
 
@@ -285,7 +282,7 @@ public class MockRepository extends FsRepository {
             }
 
             private void blockExecutionAndMaybeWait(final String blobName) {
-                logger.info("blocking I/O operation for file [{}] at path [{}]", blobName, path());
+                logger.info("[{}] blocking I/O operation for file [{}] at path [{}]", metadata.name(), blobName, path());
                 if (blockExecution() && waitAfterUnblock > 0) {
                     try {
                         // Delay operation after unblocking
diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java
index bf9c8193234..439728bac9e 100644
--- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java
@@ -18,6 +18,7 @@
  */
 package org.elasticsearch.repositories.blobstore;
 
+import org.apache.lucene.util.SetOnce;
 import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder;
 import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
 import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder;
@@ -27,34 +28,61 @@ import org.elasticsearch.client.Client;
 import org.elasticsearch.common.blobstore.BlobContainer;
 import org.elasticsearch.repositories.IndexId;
 import org.elasticsearch.repositories.RepositoriesService;
+import org.elasticsearch.repositories.Repository;
 import org.elasticsearch.repositories.RepositoryData;
 import org.elasticsearch.snapshots.SnapshotMissingException;
 import org.elasticsearch.snapshots.SnapshotRestoreException;
 import org.elasticsearch.test.ESIntegTestCase;
+import org.elasticsearch.threadpool.ThreadPool;
 
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Locale;
 import java.util.Set;
+import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutionException;
 
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.nullValue;
 
 /**
  * Basic integration tests for blob-based repository validation.
  */
 public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase {
 
-    protected abstract void createTestRepository(String name);
+    protected abstract void createTestRepository(String name, boolean verify);
+
+    protected void afterCreationCheck(Repository repository) {
+
+    }
+
+    protected void createAndCheckTestRepository(String name) {
+        final boolean verify = randomBoolean();
+        createTestRepository(name, verify);
+
+        final Iterable repositoriesServices =
+            internalCluster().getDataOrMasterNodeInstances(RepositoriesService.class);
+
+        for (RepositoriesService repositoriesService : repositoriesServices) {
+            final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(name);
+
+            afterCreationCheck(repository);
+            assertThat("blob store has to be lazy initialized",
+                repository.getBlobStore(), verify ? is(notNullValue()) : is(nullValue()));
+        }
+
+    }
 
     public void testSnapshotAndRestore() throws Exception {
         final String repoName = randomAsciiName();
         logger.info("-->  creating repository {}", repoName);
-        createTestRepository(repoName);
+        createAndCheckTestRepository(repoName);
         int indexCount = randomIntBetween(1, 5);
         int[] docCounts = new int[indexCount];
         String[] indexNames = generateRandomNames(indexCount);
@@ -125,7 +153,7 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase
     public void testMultipleSnapshotAndRollback() throws Exception {
         String repoName = randomAsciiName();
         logger.info("-->  creating repository {}", repoName);
-        createTestRepository(repoName);
+        createAndCheckTestRepository(repoName);
         int iterationCount = randomIntBetween(2, 5);
         int[] docCounts = new int[iterationCount];
         String indexName = randomAsciiName();
@@ -177,12 +205,12 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase
         }
     }
 
-    public void testIndicesDeletedFromRepository() {
+    public void testIndicesDeletedFromRepository() throws Exception {
         Client client = client();
 
         logger.info("-->  creating repository");
         final String repoName = "test-repo";
-        createTestRepository(repoName);
+        createAndCheckTestRepository(repoName);
 
         createIndex("test-idx-1", "test-idx-2", "test-idx-3");
         ensureGreen();
@@ -219,12 +247,22 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase
 
         logger.info("--> verify index folder deleted from blob container");
         RepositoriesService repositoriesSvc = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName());
+        ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, internalCluster().getMasterName());
         @SuppressWarnings("unchecked") BlobStoreRepository repository = (BlobStoreRepository) repositoriesSvc.repository(repoName);
-        BlobContainer indicesBlobContainer = repository.blobStore().blobContainer(repository.basePath().add("indices"));
-        RepositoryData repositoryData = repository.getRepositoryData();
-        for (IndexId indexId : repositoryData.getIndices().values()) {
+
+        final SetOnce indicesBlobContainer = new SetOnce<>();
+        final SetOnce repositoryData = new SetOnce<>();
+        final CountDownLatch latch = new CountDownLatch(1);
+        threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> {
+            indicesBlobContainer.set(repository.blobStore().blobContainer(repository.basePath().add("indices")));
+            repositoryData.set(repository.getRepositoryData());
+            latch.countDown();
+        });
+
+        latch.await();
+        for (IndexId indexId : repositoryData.get().getIndices().values()) {
             if (indexId.getName().equals("test-idx-3")) {
-                assertFalse(indicesBlobContainer.blobExists(indexId.getId())); // deleted index
+                assertFalse(indicesBlobContainer.get().blobExists(indexId.getId())); // deleted index
             }
         }
     }

From 1f72afa77314d27208e710398f65f701c865ea1f Mon Sep 17 00:00:00 2001
From: Michael Basnight 
Date: Fri, 13 Jul 2018 13:49:47 -0500
Subject: [PATCH 015/260] Watcher: Make settings reloadable (#31746)

This commit allows for rebuilding watcher secure secrets via the
reload_secure_settings API call. The commit also renames a method in the
Notification Service to make it a bit more readable.
---
 .../elasticsearch/xpack/watcher/Watcher.java  | 22 ++++++++++-
 .../notification/NotificationService.java     |  4 +-
 .../notification/email/EmailService.java      |  2 +-
 .../notification/hipchat/HipChatService.java  |  6 +--
 .../notification/jira/JiraService.java        |  2 +-
 .../pagerduty/PagerDutyService.java           |  2 +-
 .../notification/slack/SlackService.java      |  2 +-
 .../xpack/watcher/WatcherPluginTests.java     | 37 +++++++++++++++++++
 .../NotificationServiceTests.java             |  2 +-
 9 files changed, 68 insertions(+), 11 deletions(-)

diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java
index f7d51d328a7..78d1d37287f 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java
@@ -38,6 +38,7 @@ import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.node.Node;
 import org.elasticsearch.plugins.ActionPlugin;
 import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.plugins.ReloadablePlugin;
 import org.elasticsearch.plugins.ScriptPlugin;
 import org.elasticsearch.rest.RestController;
 import org.elasticsearch.rest.RestHandler;
@@ -123,6 +124,7 @@ import org.elasticsearch.xpack.watcher.input.simple.SimpleInput;
 import org.elasticsearch.xpack.watcher.input.simple.SimpleInputFactory;
 import org.elasticsearch.xpack.watcher.input.transform.TransformInput;
 import org.elasticsearch.xpack.watcher.input.transform.TransformInputFactory;
+import org.elasticsearch.xpack.watcher.notification.NotificationService;
 import org.elasticsearch.xpack.watcher.notification.email.Account;
 import org.elasticsearch.xpack.watcher.notification.email.EmailService;
 import org.elasticsearch.xpack.watcher.notification.email.HtmlSanitizer;
@@ -194,7 +196,7 @@ import java.util.function.UnaryOperator;
 
 import static java.util.Collections.emptyList;
 
-public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin {
+public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, ReloadablePlugin {
 
     // This setting is only here for backward compatibility reasons as 6.x indices made use of it. It can be removed in 8.x.
     @Deprecated
@@ -221,6 +223,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin {
     protected final boolean transportClient;
     protected final boolean enabled;
     protected final Environment env;
+    protected List reloadableServices = new ArrayList<>();
 
     public Watcher(final Settings settings) {
         this.settings = settings;
@@ -275,6 +278,12 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin {
         SlackService slackService = new SlackService(settings, httpClient, clusterService.getClusterSettings());
         PagerDutyService pagerDutyService = new PagerDutyService(settings, httpClient, clusterService.getClusterSettings());
 
+        reloadableServices.add(emailService);
+        reloadableServices.add(hipChatService);
+        reloadableServices.add(jiraService);
+        reloadableServices.add(slackService);
+        reloadableServices.add(pagerDutyService);
+
         TextTemplateEngine templateEngine = new TextTemplateEngine(settings, scriptService);
         Map emailAttachmentParsers = new HashMap<>();
         emailAttachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, httpTemplateParser,
@@ -613,4 +622,15 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin {
     public void close() throws IOException {
         IOUtils.closeWhileHandlingException(httpClient);
     }
+
+    /**
+     * Reloads all the reloadable services in watcher.
+     */
+    @Override
+    public void reload(Settings settings) {
+        if (enabled == false || transportClient) {
+            return;
+        }
+        reloadableServices.forEach(s -> s.reload(settings));
+    }
 }
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java
index 88399d3cb93..027825ab778 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java
@@ -31,7 +31,7 @@ public abstract class NotificationService extends AbstractComponent {
     public NotificationService(Settings settings, String type,
                                ClusterSettings clusterSettings, List> pluginSettings) {
         this(settings, type);
-        clusterSettings.addSettingsUpdateConsumer(this::setAccountSetting, pluginSettings);
+        clusterSettings.addSettingsUpdateConsumer(this::reload, pluginSettings);
     }
 
     // Used for testing only
@@ -40,7 +40,7 @@ public abstract class NotificationService extends AbstractComponent {
         this.type = type;
     }
 
-    protected synchronized void setAccountSetting(Settings settings) {
+    public synchronized void reload(Settings settings) {
         Tuple, Account> accounts = buildAccounts(settings, this::createAccount);
         this.accounts = Collections.unmodifiableMap(accounts.v1());
         this.defaultAccount = accounts.v2();
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java
index 15859a5e044..e45ed55cee3 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java
@@ -127,7 +127,7 @@ public class EmailService extends NotificationService {
         clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_SEND_PARTIAL, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WAIT_ON_QUIT, (s, o) -> {}, (s, o) -> {});
         // do an initial load
-        setAccountSetting(settings);
+        reload(settings);
     }
 
     @Override
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java
index ca970d5597b..2f21c2299a9 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java
@@ -79,13 +79,13 @@ public class HipChatService extends NotificationService {
         clusterSettings.addAffixUpdateConsumer(SETTING_PORT, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_MESSAGE_DEFAULTS, (s, o) -> {}, (s, o) -> {});
 
-        setAccountSetting(settings);
+        reload(settings);
     }
 
     @Override
-    protected synchronized void setAccountSetting(Settings settings) {
+    public synchronized void reload(Settings settings) {
         defaultServer = new HipChatServer(settings.getByPrefix("xpack.notification.hipchat."));
-        super.setAccountSetting(settings);
+        super.reload(settings);
     }
 
     @Override
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java
index 3ccff775051..49c05f36b24 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java
@@ -75,7 +75,7 @@ public class JiraService extends NotificationService {
         clusterSettings.addAffixUpdateConsumer(SETTING_SECURE_PASSWORD, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_DEFAULTS, (s, o) -> {}, (s, o) -> {});
         // do an initial load
-        setAccountSetting(settings);
+        reload(settings);
     }
 
     @Override
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java
index 21c2f1fefb1..32a6dcb91aa 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java
@@ -46,7 +46,7 @@ public class PagerDutyService extends NotificationService {
         clusterSettings.addAffixUpdateConsumer(SETTING_SERVICE_API_KEY, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_SECURE_SERVICE_API_KEY, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_DEFAULTS, (s, o) -> {}, (s, o) -> {});
-        setAccountSetting(settings);
+        reload(settings);
     }
 
     @Override
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java
index d648501a5f8..2a38e08d599 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java
@@ -46,7 +46,7 @@ public class SlackService extends NotificationService {
         clusterSettings.addAffixUpdateConsumer(SETTING_URL, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_URL_SECURE, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_DEFAULTS, (s, o) -> {}, (s, o) -> {});
-        setAccountSetting(settings);
+        reload(settings);
     }
 
     @Override
diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java
index abb981053e7..474f69c70ed 100644
--- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java
+++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java
@@ -15,6 +15,7 @@ import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.test.IndexSettingsModule;
 import org.elasticsearch.threadpool.ExecutorBuilder;
 import org.elasticsearch.xpack.core.watcher.watch.Watch;
+import org.elasticsearch.xpack.watcher.notification.NotificationService;
 
 import java.util.List;
 
@@ -22,6 +23,10 @@ import static java.util.Collections.emptyMap;
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.hasSize;
 import static org.hamcrest.Matchers.is;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
 
 public class WatcherPluginTests extends ESTestCase {
 
@@ -97,4 +102,36 @@ public class WatcherPluginTests extends ESTestCase {
                 .build();
         assertThat(Watcher.getWatcherThreadPoolSize(noDataNodeSettings), is(1));
     }
+
+    public void testReload() {
+        Settings settings = Settings.builder()
+            .put("xpack.watcher.enabled", true)
+            .put("path.home", createTempDir())
+            .build();
+        NotificationService mockService = mock(NotificationService.class);
+        Watcher watcher = new TestWatcher(settings, mockService);
+
+        watcher.reload(settings);
+        verify(mockService, times(1)).reload(settings);
+    }
+
+    public void testReloadDisabled() {
+        Settings settings = Settings.builder()
+            .put("xpack.watcher.enabled", false)
+            .put("path.home", createTempDir())
+            .build();
+        NotificationService mockService = mock(NotificationService.class);
+        Watcher watcher = new TestWatcher(settings, mockService);
+
+        watcher.reload(settings);
+        verifyNoMoreInteractions(mockService);
+    }
+
+    private class TestWatcher extends Watcher {
+
+        TestWatcher(Settings settings, NotificationService service) {
+            super(settings);
+            reloadableServices.add(service);
+        }
+    }
 }
diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java
index 829337e9acb..cb86913678a 100644
--- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java
+++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java
@@ -82,7 +82,7 @@ public class NotificationServiceTests extends ESTestCase {
 
         TestNotificationService(Settings settings) {
             super(settings, "test");
-            setAccountSetting(settings);
+            reload(settings);
         }
 
         @Override

From feb07559aadaa3175d8d7d5401c1a55438ee6451 Mon Sep 17 00:00:00 2001
From: Paul Sanwald 
Date: Fri, 13 Jul 2018 14:59:11 -0400
Subject: [PATCH 016/260] fix typo

---
 .../aggregations/bucket/autodatehistogram-aggregation.asciidoc  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc
index 28cb65ce6cc..3bd430d03d5 100644
--- a/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc
+++ b/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc
@@ -215,7 +215,7 @@ the specified time zone.
     "by_day": {
       "buckets": [
         {
-          "key_as_string": "2015-09-30T23:00:00.000-01:00",
+          "key_as_string": "2015-09-30T23:00:00.000-01:00", <1>
           "key": 1443657600000,
           "doc_count": 1
         },

From 42ca520377098edcbd851efa640259b6a81371a5 Mon Sep 17 00:00:00 2001
From: Jack Conradson 
Date: Fri, 13 Jul 2018 13:07:26 -0700
Subject: [PATCH 017/260] Clean Up Snapshot Create Rest API (#31779)

Make SnapshotInfo and CreateSnapshotResponse parsers lenient for backwards compatibility.  Remove extraneous fields from CreateSnapshotRequest toXContent.
---
 .../elasticsearch/client/SnapshotClient.java  |  4 +-
 .../org/elasticsearch/client/SnapshotIT.java  |  4 +-
 .../SnapshotClientDocumentationIT.java        | 10 +++-
 .../snapshot/create_snapshot.asciidoc         | 11 ++++
 .../create/CreateSnapshotRequest.java         |  4 +-
 .../create/CreateSnapshotResponse.java        | 50 ++++++-------------
 .../action/support/IndicesOptions.java        | 28 +++++------
 .../elasticsearch/snapshots/SnapshotInfo.java | 25 ----------
 .../create/CreateSnapshotRequestTests.java    |  4 +-
 .../create/CreateSnapshotResponseTests.java   |  6 +--
 .../action/support/IndicesOptionsTests.java   |  3 --
 11 files changed, 56 insertions(+), 93 deletions(-)

diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
index bc0bbe95488..f75f6cdef24 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
@@ -176,7 +176,7 @@ public final class SnapshotClient {
      * See  Snapshot and Restore
      * API on elastic.co
      */
-    public CreateSnapshotResponse createSnapshot(CreateSnapshotRequest createSnapshotRequest, RequestOptions options)
+    public CreateSnapshotResponse create(CreateSnapshotRequest createSnapshotRequest, RequestOptions options)
         throws IOException {
         return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
             CreateSnapshotResponse::fromXContent, emptySet());
@@ -188,7 +188,7 @@ public final class SnapshotClient {
      * See  Snapshot and Restore
      * API on elastic.co
      */
-    public void createSnapshotAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options,
+    public void createAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options,
                                     ActionListener listener) {
         restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
             CreateSnapshotResponse::fromXContent, listener, emptySet());
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
index 45f9b5bbb0b..6d035f5db65 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
@@ -61,8 +61,8 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
     private CreateSnapshotResponse createTestSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException {
         // assumes the repository already exists
 
-        return execute(createSnapshotRequest, highLevelClient().snapshot()::createSnapshot,
-            highLevelClient().snapshot()::createSnapshotAsync);
+        return execute(createSnapshotRequest, highLevelClient().snapshot()::create,
+            highLevelClient().snapshot()::createAsync);
     }
 
     public void testCreateRepository() throws IOException {
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
index 403ebc7d774..68a8113af6d 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
@@ -425,7 +425,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
         // end::create-snapshot-request-waitForCompletion
 
         // tag::create-snapshot-execute
-        CreateSnapshotResponse response = client.snapshot().createSnapshot(request, RequestOptions.DEFAULT);
+        CreateSnapshotResponse response = client.snapshot().create(request, RequestOptions.DEFAULT);
         // end::create-snapshot-execute
 
         // tag::create-snapshot-response
@@ -433,6 +433,12 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
         // end::create-snapshot-response
 
         assertEquals(RestStatus.OK, status);
+
+        // tag::create-snapshot-response-snapshot-info
+        SnapshotInfo snapshotInfo = response.getSnapshotInfo(); // <1>
+        // end::create-snapshot-response-snapshot-info
+
+        assertNotNull(snapshotInfo);
     }
 
     public void testSnapshotCreateAsync() throws InterruptedException {
@@ -460,7 +466,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
             listener = new LatchedActionListener<>(listener, latch);
 
             // tag::create-snapshot-execute-async
-            client.snapshot().createSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1>
+            client.snapshot().createAsync(request, RequestOptions.DEFAULT, listener); // <1>
             // end::create-snapshot-execute-async
 
             assertTrue(latch.await(30L, TimeUnit.SECONDS));
diff --git a/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc b/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc
index dbd31380a9b..971a6ee4867 100644
--- a/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc
+++ b/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc
@@ -73,11 +73,22 @@ include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-r
 [[java-rest-high-snapshot-create-snapshot-sync]]
 ==== Synchronous Execution
 
+Execute a `CreateSnapshotRequest` synchronously to receive a `CreateSnapshotResponse`.
+
 ["source","java",subs="attributes,callouts,macros"]
 --------------------------------------------------
 include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute]
 --------------------------------------------------
 
+Retrieve the `SnapshotInfo` from a `CreateSnapshotResponse` when the snapshot is fully created.
+(The `waitForCompletion` parameter is `true`).
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-response-snapshot-info]
+--------------------------------------------------
+<1> The `SnapshotInfo` object.
+
 [[java-rest-high-snapshot-create-snapshot-async]]
 ==== Asynchronous Execution
 
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
index 2ff01ab01ed..15fbac35bff 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
@@ -42,9 +42,9 @@ import java.util.Objects;
 
 import static org.elasticsearch.action.ValidateActions.addValidationError;
 import static org.elasticsearch.common.Strings.EMPTY_ARRAY;
+import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
 import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
 import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
-import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
 import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
 
 /**
@@ -433,8 +433,6 @@ public class CreateSnapshotRequest extends MasterNodeRequest PARSER =
+        new ObjectParser<>(CreateSnapshotResponse.class.getName(), true, CreateSnapshotResponse::new);
+
+    static {
+        PARSER.declareObject(CreateSnapshotResponse::setSnapshotInfoFromBuilder,
+            SnapshotInfo.SNAPSHOT_INFO_PARSER, new ParseField("snapshot"));
+    }
+
     @Nullable
     private SnapshotInfo snapshotInfo;
 
@@ -48,8 +58,8 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent
     CreateSnapshotResponse() {
     }
 
-    void setSnapshotInfo(SnapshotInfo snapshotInfo) {
-        this.snapshotInfo = snapshotInfo;
+    private void setSnapshotInfoFromBuilder(SnapshotInfoBuilder snapshotInfoBuilder) {
+        this.snapshotInfo = snapshotInfoBuilder.build();
     }
 
     /**
@@ -101,38 +111,8 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent
         return builder;
     }
 
-    public static CreateSnapshotResponse fromXContent(XContentParser parser) throws IOException {
-        CreateSnapshotResponse createSnapshotResponse = new CreateSnapshotResponse();
-
-        parser.nextToken(); // move to '{'
-
-        if (parser.currentToken() != Token.START_OBJECT) {
-            throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['{']");
-        }
-
-        parser.nextToken(); // move to 'snapshot' || 'accepted'
-
-        if ("snapshot".equals(parser.currentName())) {
-            createSnapshotResponse.snapshotInfo = SnapshotInfo.fromXContent(parser);
-        } else if ("accepted".equals(parser.currentName())) {
-            parser.nextToken(); // move to 'accepted' field value
-
-            if (parser.booleanValue()) {
-                // ensure accepted is a boolean value
-            }
-
-            parser.nextToken(); // move past 'true'/'false'
-        } else {
-            throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "] expected ['snapshot', 'accepted']");
-        }
-
-        if (parser.currentToken() != Token.END_OBJECT) {
-            throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['}']");
-        }
-
-        parser.nextToken(); // move past '}'
-
-        return createSnapshotResponse;
+    public static CreateSnapshotResponse fromXContent(XContentParser parser) {
+        return PARSER.apply(parser, null);
     }
 
     @Override
diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java
index 19572a6c212..f2cf0b5444d 100644
--- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java
+++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java
@@ -22,6 +22,7 @@ package org.elasticsearch.action.support;
 import org.elasticsearch.Version;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ToXContent;
 import org.elasticsearch.common.xcontent.ToXContentFragment;
 import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.rest.RestRequest;
@@ -316,21 +317,6 @@ public class IndicesOptions implements ToXContentFragment {
                 defaultSettings);
     }
 
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        builder.startArray("expand_wildcards");
-        for (WildcardStates expandWildcard : expandWildcards) {
-            builder.value(expandWildcard.toString().toLowerCase(Locale.ROOT));
-        }
-        builder.endArray();
-        builder.field("ignore_unavailable", ignoreUnavailable());
-        builder.field("allow_no_indices", allowNoIndices());
-        builder.field("forbid_aliases_to_multiple_indices", allowAliasesToMultipleIndices() == false);
-        builder.field("forbid_closed_indices", forbidClosedIndices());
-        builder.field("ignore_aliases", ignoreAliases());
-        return builder;
-    }
-
     /**
      * Returns true if the name represents a valid name for one of the indices option
      * false otherwise
@@ -360,6 +346,18 @@ public class IndicesOptions implements ToXContentFragment {
         );
     }
 
+    @Override
+    public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
+        builder.startArray("expand_wildcards");
+        for (WildcardStates expandWildcard : expandWildcards) {
+            builder.value(expandWildcard.toString().toLowerCase(Locale.ROOT));
+        }
+        builder.endArray();
+        builder.field("ignore_unavailable", ignoreUnavailable());
+        builder.field("allow_no_indices", allowNoIndices());
+        return builder;
+    }
+
     /**
      * @return indices options that requires every specified index to exist, expands wildcards only to open indices and
      *         allows that no indices are resolved from wildcard expressions (not returning an error).
diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java
index cf2f66a750c..67ddabc37fa 100644
--- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java
+++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java
@@ -140,22 +140,6 @@ public final class SnapshotInfo implements Comparable, ToXContent,
             this.shardFailures = shardFailures;
         }
 
-        private void ignoreVersion(String version) {
-            // ignore extra field
-        }
-
-        private void ignoreStartTime(String startTime) {
-            // ignore extra field
-        }
-
-        private void ignoreEndTime(String endTime) {
-            // ignore extra field
-        }
-
-        private void ignoreDurationInMillis(long durationInMillis) {
-            // ignore extra field
-        }
-
         public SnapshotInfo build() {
             SnapshotId snapshotId = new SnapshotId(snapshotName, snapshotUUID);
 
@@ -197,10 +181,6 @@ public final class SnapshotInfo implements Comparable, ToXContent,
         int getSuccessfulShards() {
             return successfulShards;
         }
-
-        private void ignoreFailedShards(int failedShards) {
-            // ignore extra field
-        }
     }
 
     public static final ObjectParser SNAPSHOT_INFO_PARSER =
@@ -222,14 +202,9 @@ public final class SnapshotInfo implements Comparable, ToXContent,
         SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID));
         SNAPSHOT_INFO_PARSER.declareObjectArray(SnapshotInfoBuilder::setShardFailures, SnapshotShardFailure.SNAPSHOT_SHARD_FAILURE_PARSER,
             new ParseField(FAILURES));
-        SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreVersion, new ParseField(VERSION));
-        SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreStartTime, new ParseField(START_TIME));
-        SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreEndTime, new ParseField(END_TIME));
-        SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::ignoreDurationInMillis, new ParseField(DURATION_IN_MILLIS));
 
         SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setTotalShards, new ParseField(TOTAL));
         SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setSuccessfulShards, new ParseField(SUCCESSFUL));
-        SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::ignoreFailedShards, new ParseField(FAILED));
     }
 
     private final SnapshotId snapshotId;
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java
index 1bde8ab572b..0b598be6849 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java
@@ -102,8 +102,8 @@ public class CreateSnapshotRequestTests extends ESTestCase {
                 NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput());
         Map map = parser.mapOrdered();
         CreateSnapshotRequest processed = new CreateSnapshotRequest((String)map.get("repository"), (String)map.get("snapshot"));
-        processed.waitForCompletion((boolean)map.getOrDefault("wait_for_completion", false));
-        processed.masterNodeTimeout((String)map.getOrDefault("master_node_timeout", "30s"));
+        processed.waitForCompletion(original.waitForCompletion());
+        processed.masterNodeTimeout(original.masterNodeTimeout());
         processed.source(map);
 
         assertEquals(original, processed);
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java
index bbfc9755bf2..bbb11fc6fee 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java
@@ -40,7 +40,7 @@ public class CreateSnapshotResponseTests extends AbstractXContentTestCase
Date: Fri, 13 Jul 2018 16:10:39 -0400
Subject: [PATCH 018/260] [Rollup] Histo group config should support
 scaled_floats (#32048)

Metric config already whitelist scaled_floats, but it wasn't added to
the histo group config.  This centralizes the mapping types map
so that both metrics and histo (and any future configs) use the same
map.

Fixes #32035
---
 .../xpack/core/rollup/RollupField.java              | 13 +++++++++++++
 .../xpack/core/rollup/job/HistoGroupConfig.java     |  8 +-------
 .../xpack/core/rollup/job/MetricConfig.java         | 13 +------------
 .../job/HistoGroupConfigSerializingTests.java       |  4 +++-
 4 files changed, 18 insertions(+), 20 deletions(-)

diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java
index 1e2e011276d..134ce6c87b3 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java
@@ -6,6 +6,7 @@
 package org.elasticsearch.xpack.core.rollup;
 
 import org.elasticsearch.common.ParseField;
+import org.elasticsearch.index.mapper.NumberFieldMapper;
 import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
 import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
 import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
@@ -15,6 +16,8 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuil
 
 import java.util.Arrays;
 import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
 
 public class RollupField {
     // Fields that are used both in core Rollup actions and Rollup plugin
@@ -34,6 +37,16 @@ public class RollupField {
     public static final List SUPPORTED_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME,
             SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
 
+    // these mapper types are used by the configs (metric, histo, etc) to validate field mappings
+    public static final List NUMERIC_FIELD_MAPPER_TYPES;
+    static {
+        List types = Stream.of(NumberFieldMapper.NumberType.values())
+            .map(NumberFieldMapper.NumberType::typeName)
+            .collect(Collectors.toList());
+        types.add("scaled_float"); // have to add manually since scaled_float is in a module
+        NUMERIC_FIELD_MAPPER_TYPES = types;
+    }
+
     /**
      * Format to the appropriate Rollup field name convention
      *
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java
index 2b1511077d9..87de9e16534 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java
@@ -15,7 +15,6 @@ import org.elasticsearch.common.io.stream.Writeable;
 import org.elasticsearch.common.xcontent.ObjectParser;
 import org.elasticsearch.common.xcontent.ToXContentFragment;
 import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.index.mapper.NumberFieldMapper;
 import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
 import org.elasticsearch.search.aggregations.bucket.composite.HistogramValuesSourceBuilder;
 import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
@@ -30,7 +29,6 @@ import java.util.Map;
 import java.util.Objects;
 import java.util.Set;
 import java.util.stream.Collectors;
-import java.util.stream.Stream;
 
 /**
  * The configuration object for the histograms in the rollup config
@@ -51,10 +49,6 @@ public class HistoGroupConfig implements Writeable, ToXContentFragment {
 
     private static final ParseField INTERVAL = new ParseField("interval");
     private static final ParseField FIELDS = new ParseField("fields");
-    private static final List MAPPER_TYPES = Stream.of(NumberFieldMapper.NumberType.values())
-            .map(NumberFieldMapper.NumberType::typeName)
-            .collect(Collectors.toList());
-
 
     private final long interval;
     private final String[] fields;
@@ -126,7 +120,7 @@ public class HistoGroupConfig implements Writeable, ToXContentFragment {
             Map fieldCaps = fieldCapsResponse.get(field);
             if (fieldCaps != null && fieldCaps.isEmpty() == false) {
                 fieldCaps.forEach((key, value) -> {
-                    if (MAPPER_TYPES.contains(key)) {
+                    if (RollupField.NUMERIC_FIELD_MAPPER_TYPES.contains(key)) {
                         if (value.isAggregatable() == false) {
                             validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " +
                                     "but is not.");
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java
index 67b83646c42..006d8c35c32 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java
@@ -15,7 +15,6 @@ import org.elasticsearch.common.io.stream.Writeable;
 import org.elasticsearch.common.xcontent.ObjectParser;
 import org.elasticsearch.common.xcontent.ToXContentFragment;
 import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.index.mapper.NumberFieldMapper;
 import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
 import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
 import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
@@ -32,7 +31,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Objects;
 import java.util.stream.Collectors;
-import java.util.stream.Stream;
 
 /**
  * The configuration object for the metrics portion of a rollup job config
@@ -66,15 +64,6 @@ public class MetricConfig implements Writeable, ToXContentFragment {
     private static final ParseField AVG = new ParseField("avg");
     private static final ParseField VALUE_COUNT = new ParseField("value_count");
 
-    private static final List MAPPER_TYPES;
-    static {
-        List types = Stream.of(NumberFieldMapper.NumberType.values())
-                .map(NumberFieldMapper.NumberType::typeName)
-                .collect(Collectors.toList());
-        types.add("scaled_float"); // have to add manually since scaled_float is in a module
-        MAPPER_TYPES = types;
-    }
-
     public static final ObjectParser PARSER = new ObjectParser<>(NAME, MetricConfig.Builder::new);
 
     static {
@@ -153,7 +142,7 @@ public class MetricConfig implements Writeable, ToXContentFragment {
         Map fieldCaps = fieldCapsResponse.get(field);
         if (fieldCaps != null && fieldCaps.isEmpty() == false) {
             fieldCaps.forEach((key, value) -> {
-                if (MAPPER_TYPES.contains(key)) {
+                if (RollupField.NUMERIC_FIELD_MAPPER_TYPES.contains(key)) {
                     if (value.isAggregatable() == false) {
                         validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " +
                                 "but is not.");
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfigSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfigSerializingTests.java
index 18a64bc2adf..92e7d8b9643 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfigSerializingTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfigSerializingTests.java
@@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.Writeable;
 import org.elasticsearch.common.xcontent.XContentParser;
 import org.elasticsearch.test.AbstractSerializingTestCase;
 import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers;
+import org.elasticsearch.xpack.core.rollup.RollupField;
 
 import java.io.IOException;
 import java.util.Collections;
@@ -111,7 +112,8 @@ public class HistoGroupConfigSerializingTests extends AbstractSerializingTestCas
         // Have to mock fieldcaps because the ctor's aren't public...
         FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
         when(fieldCaps.isAggregatable()).thenReturn(true);
-        responseMap.put("my_field", Collections.singletonMap("long", fieldCaps));
+        String mappingType = randomFrom(RollupField.NUMERIC_FIELD_MAPPER_TYPES);
+        responseMap.put("my_field", Collections.singletonMap(mappingType, fieldCaps));
 
         HistoGroupConfig config = new HistoGroupConfig.Builder()
                 .setFields(Collections.singletonList("my_field"))

From 466235288e8fc74d265fbf6f98d044572a60a460 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Christoph=20B=C3=BCscher?= 
Date: Fri, 13 Jul 2018 23:08:18 +0200
Subject: [PATCH 019/260] Mute failing tests

Relates to #32055
---
 .../test/search.inner_hits/10_basic.yml         |  9 +++++----
 .../test/search/110_field_collapsing.yml        | 17 ++++++++++++++---
 2 files changed, 19 insertions(+), 7 deletions(-)

diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
index 884a50507c7..8f162ae2eb2 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
@@ -13,8 +13,9 @@ setup:
 ---
 "Nested inner hits":
     - skip:
-        version: " - 6.1.99"
-        reason: "<= 6.1 nodes don't always include index or id in nested inner hits"
+        version: "all"
+        reason: "https://github.com/elastic/elasticsearch/issues/32055"
+
     - do:
         index:
           index: test
@@ -45,8 +46,8 @@ setup:
 "Nested doc version and seqIDs":
 
     - skip:
-        version: " - 6.3.99"
-        reason:  "object notation for docvalue_fields was introduced in 6.4"
+        version: "all"
+        reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
     - do:
         index:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
index 2dfd868d66b..39597b1fbbe 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
@@ -107,6 +107,9 @@ setup:
 
 ---
 "field collapsing and inner_hits":
+  - skip:
+      version: "all"
+      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -146,6 +149,9 @@ setup:
 
 ---
 "field collapsing, inner_hits and maxConcurrentGroupRequests":
+  - skip:
+      version: "all"
+      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -226,6 +232,9 @@ setup:
 
 ---
 "no hits and inner_hits":
+  - skip:
+      version: "all"
+      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -240,6 +249,9 @@ setup:
 
 ---
 "field collapsing and multiple inner_hits":
+  - skip:
+      version: "all"
+      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -292,10 +304,9 @@ setup:
 
 ---
 "field collapsing, inner_hits and version":
-
   - skip:
-      version: " - 6.1.0"
-      reason:  "bug fixed in 6.1.1"
+      version: "all"
+      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:

From 3679d00a7465c9d8602e991be76390bd905fddd7 Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Fri, 13 Jul 2018 23:26:10 +0200
Subject: [PATCH 020/260] Replace Ingest ScriptContext with Custom Interface
 (#32003)

* Replace Ingest ScriptContext with Custom Interface
* Make org.elasticsearch.ingest.common.ScriptProcessorTests#testScripting more precise
* Don't mock script factory in ScriptProcessorTests
* Adjust mock script plugin in IT for new API
---
 .../ingest/common/ScriptProcessor.java        | 10 ++--
 .../ingest/common/IngestRestartIT.java        |  4 +-
 .../ingest/common/ScriptProcessorTests.java   | 38 ++++++++------
 .../script/ExecutableScript.java              |  1 -
 .../elasticsearch/script/IngestScript.java    | 52 +++++++++++++++++++
 .../elasticsearch/script/ScriptModule.java    |  2 +-
 .../script/ScriptServiceTests.java            |  8 +--
 .../script/MockScriptEngine.java              |  8 +++
 8 files changed, 91 insertions(+), 32 deletions(-)
 create mode 100644 server/src/main/java/org/elasticsearch/script/IngestScript.java

diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
index ddb284b9c89..74c68fd5c26 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
@@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
 import org.elasticsearch.ingest.AbstractProcessor;
 import org.elasticsearch.ingest.IngestDocument;
 import org.elasticsearch.ingest.Processor;
-import org.elasticsearch.script.ExecutableScript;
+import org.elasticsearch.script.IngestScript;
 import org.elasticsearch.script.Script;
 import org.elasticsearch.script.ScriptException;
 import org.elasticsearch.script.ScriptService;
@@ -73,10 +73,8 @@ public final class ScriptProcessor extends AbstractProcessor {
      */
     @Override
     public void execute(IngestDocument document) {
-        ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.INGEST_CONTEXT);
-        ExecutableScript executableScript = factory.newInstance(script.getParams());
-        executableScript.setNextVar("ctx",  document.getSourceAndMetadata());
-        executableScript.run();
+        IngestScript.Factory factory = scriptService.compile(script, IngestScript.CONTEXT);
+        factory.newInstance(script.getParams()).execute(document.getSourceAndMetadata());
     }
 
     @Override
@@ -108,7 +106,7 @@ public final class ScriptProcessor extends AbstractProcessor {
 
                 // verify script is able to be compiled before successfully creating processor.
                 try {
-                    scriptService.compile(script, ExecutableScript.INGEST_CONTEXT);
+                    scriptService.compile(script, IngestScript.CONTEXT);
                 } catch (ScriptException e) {
                     throw newConfigurationException(TYPE, processorTag, null, e);
                 }
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java
index 69236144007..8c3976d2b17 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java
@@ -58,9 +58,7 @@ public class IngestRestartIT extends ESIntegTestCase {
     public static class CustomScriptPlugin extends MockScriptPlugin {
         @Override
         protected Map, Object>> pluginScripts() {
-            return Collections.singletonMap("my_script", script -> {
-                @SuppressWarnings("unchecked")
-                Map ctx = (Map) script.get("ctx");
+            return Collections.singletonMap("my_script", ctx -> {
                 ctx.put("z", 0);
                 return null;
             });
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java
index 1004a41bcc5..72bc337e9c9 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java
@@ -19,22 +19,22 @@
 
 package org.elasticsearch.ingest.common;
 
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.ingest.IngestDocument;
 import org.elasticsearch.ingest.RandomDocumentPicks;
-import org.elasticsearch.script.ExecutableScript;
+import org.elasticsearch.script.MockScriptEngine;
 import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptModule;
 import org.elasticsearch.script.ScriptService;
+import org.elasticsearch.script.ScriptType;
 import org.elasticsearch.test.ESTestCase;
 
 import static org.hamcrest.Matchers.hasKey;
 import static org.hamcrest.core.Is.is;
-import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.doAnswer;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 public class ScriptProcessorTests extends ESTestCase {
 
@@ -42,24 +42,28 @@ public class ScriptProcessorTests extends ESTestCase {
         int randomBytesIn = randomInt();
         int randomBytesOut = randomInt();
         int randomBytesTotal = randomBytesIn + randomBytesOut;
-
-        ScriptService scriptService = mock(ScriptService.class);
-        Script script = mockScript("_script");
-        ExecutableScript.Factory factory = mock(ExecutableScript.Factory.class);
-        ExecutableScript executableScript = mock(ExecutableScript.class);
-        when(scriptService.compile(script, ExecutableScript.INGEST_CONTEXT)).thenReturn(factory);
-        when(factory.newInstance(any())).thenReturn(executableScript);
+        String scriptName = "script";
+        ScriptService scriptService = new ScriptService(Settings.builder().build(),
+            Collections.singletonMap(
+                Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine(
+                    Script.DEFAULT_SCRIPT_LANG,
+                    Collections.singletonMap(
+                        scriptName, ctx -> {
+                            ctx.put("bytes_total", randomBytesTotal);
+                            return null;
+                        }
+                    )
+                )
+            ),
+            new HashMap<>(ScriptModule.CORE_CONTEXTS)
+        );
+        Script script = new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap());
 
         Map document = new HashMap<>();
         document.put("bytes_in", randomInt());
         document.put("bytes_out", randomInt());
         IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
 
-        doAnswer(invocationOnMock ->  {
-            ingestDocument.setFieldValue("bytes_total", randomBytesTotal);
-            return null;
-        }).when(executableScript).run();
-
         ScriptProcessor processor = new ScriptProcessor(randomAlphaOfLength(10), script, scriptService);
 
         processor.execute(ingestDocument);
diff --git a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java
index e87b7cdf389..2f7a01c3798 100644
--- a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java
+++ b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java
@@ -50,5 +50,4 @@ public interface ExecutableScript {
     // TODO: remove these once each has its own script interface
     ScriptContext AGGS_CONTEXT = new ScriptContext<>("aggs_executable", Factory.class);
     ScriptContext UPDATE_CONTEXT = new ScriptContext<>("update", Factory.class);
-    ScriptContext INGEST_CONTEXT = new ScriptContext<>("ingest", Factory.class);
 }
diff --git a/server/src/main/java/org/elasticsearch/script/IngestScript.java b/server/src/main/java/org/elasticsearch/script/IngestScript.java
new file mode 100644
index 00000000000..f357394ed31
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/script/IngestScript.java
@@ -0,0 +1,52 @@
+
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.script;
+
+import java.util.Map;
+
+/**
+ * A script used by the Ingest Script Processor.
+ */
+public abstract class IngestScript {
+
+    public static final String[] PARAMETERS = { "ctx" };
+
+    /** The context used to compile {@link IngestScript} factories. */
+    public static final ScriptContext CONTEXT = new ScriptContext<>("ingest", Factory.class);
+
+    /** The generic runtime parameters for the script. */
+    private final Map params;
+
+    public IngestScript(Map params) {
+        this.params = params;
+    }
+
+    /** Return the parameters for this script. */
+    public Map getParams() {
+        return params;
+    }
+
+    public abstract void execute(Map ctx);
+
+    public interface Factory {
+        IngestScript newInstance(Map params);
+    }
+}
diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java
index 042953117c5..bf4bd9c57ce 100644
--- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java
+++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java
@@ -51,7 +51,7 @@ public class ScriptModule {
             ExecutableScript.CONTEXT,
             ExecutableScript.AGGS_CONTEXT,
             ExecutableScript.UPDATE_CONTEXT,
-            ExecutableScript.INGEST_CONTEXT,
+            IngestScript.CONTEXT,
             FilterScript.CONTEXT,
             SimilarityScript.CONTEXT,
             SimilarityWeightScript.CONTEXT,
diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java
index b35fcbcc03c..585f8601651 100644
--- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java
@@ -168,7 +168,7 @@ public class ScriptServiceTests extends ESTestCase {
         assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT);
         assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT);
         assertCompileAccepted("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT);
-        assertCompileAccepted("painless", "script", ScriptType.INLINE, ExecutableScript.INGEST_CONTEXT);
+        assertCompileAccepted("painless", "script", ScriptType.INLINE, IngestScript.CONTEXT);
     }
 
     public void testAllowSomeScriptTypeSettings() throws IOException {
@@ -209,13 +209,13 @@ public class ScriptServiceTests extends ESTestCase {
     }
 
     public void testCompileNonRegisteredContext() throws IOException {
-        contexts.remove(ExecutableScript.INGEST_CONTEXT.name);
+        contexts.remove(IngestScript.CONTEXT.name);
         buildScriptService(Settings.EMPTY);
 
         String type = scriptEngine.getType();
         IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
-            scriptService.compile(new Script(ScriptType.INLINE, type, "test", Collections.emptyMap()), ExecutableScript.INGEST_CONTEXT));
-        assertThat(e.getMessage(), containsString("script context [" + ExecutableScript.INGEST_CONTEXT.name + "] not supported"));
+            scriptService.compile(new Script(ScriptType.INLINE, type, "test", Collections.emptyMap()), IngestScript.CONTEXT));
+        assertThat(e.getMessage(), containsString("script context [" + IngestScript.CONTEXT.name + "] not supported"));
     }
 
     public void testCompileCountedInCompilationStats() throws IOException {
diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
index e608bd13d25..8e40e4bcf14 100644
--- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
+++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
@@ -88,6 +88,14 @@ public class MockScriptEngine implements ScriptEngine {
         } else if (context.instanceClazz.equals(ExecutableScript.class)) {
             ExecutableScript.Factory factory = mockCompiled::createExecutableScript;
             return context.factoryClazz.cast(factory);
+        } else if (context.instanceClazz.equals(IngestScript.class)) {
+            IngestScript.Factory factory = parameters -> new IngestScript(parameters) {
+                @Override
+                public void execute(Map ctx) {
+                    script.apply(ctx);
+                }
+            };
+            return context.factoryClazz.cast(factory);
         } else if (context.instanceClazz.equals(TemplateScript.class)) {
             TemplateScript.Factory factory = vars -> {
                 // TODO: need a better way to implement all these new contexts

From 305bfea9c3c174d35136fddbf2bcd8544b4ea3bd Mon Sep 17 00:00:00 2001
From: Tim Brooks 
Date: Fri, 13 Jul 2018 16:41:02 -0600
Subject: [PATCH 021/260] Add nio http transport to security plugin (#32018)

This is related to #27260. It adds the SecurityNioHttpServerTransport
to the security plugin. It randomly uses the nio http transport in
security integration tests.
---
 .../transport/netty4/Netty4TcpChannel.java    |   2 +-
 .../http/nio/HttpReadWriteHandler.java        |   4 +-
 .../http/nio/NioHttpChannel.java              |   2 +-
 .../http/nio/NioHttpServerChannel.java        |   3 +-
 .../http/nio/NioHttpServerTransport.java      |  27 ++-
 .../xpack/security/Security.java              |  21 +-
 .../security/rest/SecurityRestFilter.java     |  10 +-
 .../security/transport/SSLEngineUtils.java    |  93 ++++++++
 .../SecurityHttpExceptionHandler.java         |  64 ++++++
 .../transport/SecurityHttpSettings.java       |  22 ++
 .../transport/ServerTransportFilter.java      |  46 +---
 .../SecurityNetty4HttpServerTransport.java    |  47 +---
 .../security/transport/nio/NioIPFilter.java   |  32 +++
 .../transport/nio/SSLChannelContext.java      |   5 +
 .../security/transport/nio/SSLDriver.java     |   4 +
 .../nio/SecurityNioHttpServerTransport.java   | 132 +++++++++++
 .../transport/nio/SecurityNioTransport.java   |  19 +-
 .../test/SecurityIntegTestCase.java           |   1 +
 .../test/SecuritySettingsSource.java          |   1 +
 .../transport/SecurityHttpSettingsTests.java  |  44 ++++
 ...ecurityNetty4HttpServerTransportTests.java |  29 ---
 .../transport/nio/NioIPFilterTests.java       |  91 ++++++++
 .../SecurityNioHttpServerTransportTests.java  | 207 ++++++++++++++++++
 23 files changed, 750 insertions(+), 156 deletions(-)
 create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java
 create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java
 create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettings.java
 create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilter.java
 create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
 create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettingsTests.java
 create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java
 create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java

diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java
index 78a14255000..51821c73329 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java
@@ -112,7 +112,7 @@ public class Netty4TcpChannel implements TcpChannel {
         }
     }
 
-    public Channel getLowLevelChannel() {
+    public Channel getNettyChannel() {
         return channel;
     }
 
diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java
index ad81719ebcb..3dcd59cf8e2 100644
--- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java
+++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java
@@ -51,8 +51,8 @@ public class HttpReadWriteHandler implements ReadWriteHandler {
     private final NioHttpChannel nioHttpChannel;
     private final NioHttpServerTransport transport;
 
-    HttpReadWriteHandler(NioHttpChannel nioHttpChannel, NioHttpServerTransport transport, HttpHandlingSettings settings,
-                         NioCorsConfig corsConfig) {
+    public HttpReadWriteHandler(NioHttpChannel nioHttpChannel, NioHttpServerTransport transport, HttpHandlingSettings settings,
+                                NioCorsConfig corsConfig) {
         this.nioHttpChannel = nioHttpChannel;
         this.transport = transport;
 
diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java
index 0a797a5687e..1a4c5f14c91 100644
--- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java
+++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java
@@ -28,7 +28,7 @@ import java.nio.channels.SocketChannel;
 
 public class NioHttpChannel extends NioSocketChannel implements HttpChannel {
 
-    NioHttpChannel(SocketChannel socketChannel) {
+    public NioHttpChannel(SocketChannel socketChannel) {
         super(socketChannel);
     }
 
diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java
index 2674d38dc49..d72376da5c0 100644
--- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java
+++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java
@@ -23,12 +23,11 @@ import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.http.HttpServerChannel;
 import org.elasticsearch.nio.NioServerSocketChannel;
 
-import java.io.IOException;
 import java.nio.channels.ServerSocketChannel;
 
 public class NioHttpServerChannel extends NioServerSocketChannel implements HttpServerChannel {
 
-    NioHttpServerChannel(ServerSocketChannel serverSocketChannel) throws IOException {
+    public NioHttpServerChannel(ServerSocketChannel serverSocketChannel) {
         super(serverSocketChannel);
     }
 
diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java
index b80778e9642..9c672c1caf1 100644
--- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java
+++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java
@@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry;
 import org.elasticsearch.http.AbstractHttpServerTransport;
 import org.elasticsearch.http.HttpChannel;
 import org.elasticsearch.http.HttpServerChannel;
-import org.elasticsearch.http.HttpServerTransport;
 import org.elasticsearch.http.nio.cors.NioCorsConfig;
 import org.elasticsearch.http.nio.cors.NioCorsConfigBuilder;
 import org.elasticsearch.nio.BytesChannelContext;
@@ -87,21 +86,21 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
             (s) -> Integer.toString(EsExecutors.numberOfProcessors(s) * 2),
             (s) -> Setting.parseInt(s, 1, "http.nio.worker_count"), Setting.Property.NodeScope);
 
-    private final PageCacheRecycler pageCacheRecycler;
+    protected final PageCacheRecycler pageCacheRecycler;
+    protected final NioCorsConfig corsConfig;
 
-    private final boolean tcpNoDelay;
-    private final boolean tcpKeepAlive;
-    private final boolean reuseAddress;
-    private final int tcpSendBufferSize;
-    private final int tcpReceiveBufferSize;
+    protected final boolean tcpNoDelay;
+    protected final boolean tcpKeepAlive;
+    protected final boolean reuseAddress;
+    protected final int tcpSendBufferSize;
+    protected final int tcpReceiveBufferSize;
 
     private NioGroup nioGroup;
-    private HttpChannelFactory channelFactory;
-    private final NioCorsConfig corsConfig;
+    private ChannelFactory channelFactory;
 
     public NioHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays,
                                   PageCacheRecycler pageCacheRecycler, ThreadPool threadPool, NamedXContentRegistry xContentRegistry,
-                                  HttpServerTransport.Dispatcher dispatcher) {
+                                  Dispatcher dispatcher) {
         super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher);
         this.pageCacheRecycler = pageCacheRecycler;
 
@@ -136,7 +135,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
             nioGroup = new NioGroup(daemonThreadFactory(this.settings, HTTP_SERVER_ACCEPTOR_THREAD_NAME_PREFIX), acceptorCount,
                 daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX), workerCount,
                 (s) -> new EventHandler(this::onNonChannelException, s));
-            channelFactory = new HttpChannelFactory();
+            channelFactory = channelFactory();
             bindServer();
             success = true;
         } catch (IOException e) {
@@ -162,6 +161,10 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
         return nioGroup.bindServerChannel(socketAddress, channelFactory);
     }
 
+    protected ChannelFactory channelFactory() {
+        return new HttpChannelFactory();
+    }
+
     static NioCorsConfig buildCorsConfig(Settings settings) {
         if (SETTING_CORS_ENABLED.get(settings) == false) {
             return NioCorsConfigBuilder.forOrigins().disable().build();
@@ -194,7 +197,7 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
             .build();
     }
 
-    private void acceptChannel(NioSocketChannel socketChannel) {
+    protected void acceptChannel(NioSocketChannel socketChannel) {
         super.serverAcceptedChannel((HttpChannel) socketChannel);
     }
 
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
index 26ec50c0eb3..3115c08a946 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
@@ -200,11 +200,13 @@ import org.elasticsearch.xpack.security.rest.action.user.RestHasPrivilegesAction
 import org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction;
 import org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction;
 import org.elasticsearch.xpack.security.support.SecurityIndexManager;
+import org.elasticsearch.xpack.security.transport.SecurityHttpSettings;
 import org.elasticsearch.xpack.security.transport.SecurityServerTransportInterceptor;
 import org.elasticsearch.xpack.security.transport.filter.IPFilter;
 import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4HttpServerTransport;
 import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4ServerTransport;
 import org.elasticsearch.xpack.core.template.TemplateUtils;
+import org.elasticsearch.xpack.security.transport.nio.SecurityNioHttpServerTransport;
 import org.elasticsearch.xpack.security.transport.nio.SecurityNioTransport;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
@@ -511,21 +513,22 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
 
             if (NetworkModule.HTTP_TYPE_SETTING.exists(settings)) {
                 final String httpType = NetworkModule.HTTP_TYPE_SETTING.get(settings);
-                if (httpType.equals(SecurityField.NAME4)) {
-                    SecurityNetty4HttpServerTransport.overrideSettings(builder, settings);
+                if (httpType.equals(SecurityField.NAME4) || httpType.equals(SecurityField.NIO)) {
+                    SecurityHttpSettings.overrideSettings(builder, settings);
                 } else {
                     final String message = String.format(
                             Locale.ROOT,
-                            "http type setting [%s] must be [%s] but is [%s]",
+                            "http type setting [%s] must be [%s] or [%s] but is [%s]",
                             NetworkModule.HTTP_TYPE_KEY,
                             SecurityField.NAME4,
+                            SecurityField.NIO,
                             httpType);
                     throw new IllegalArgumentException(message);
                 }
             } else {
                 // default to security4
                 builder.put(NetworkModule.HTTP_TYPE_KEY, SecurityField.NAME4);
-                SecurityNetty4HttpServerTransport.overrideSettings(builder, settings);
+                SecurityHttpSettings.overrideSettings(builder, settings);
             }
             builder.put(SecuritySettings.addUserSettings(settings));
             return builder.build();
@@ -869,8 +872,14 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
         if (enabled == false) { // don't register anything if we are not enabled
             return Collections.emptyMap();
         }
-        return Collections.singletonMap(SecurityField.NAME4, () -> new SecurityNetty4HttpServerTransport(settings,
-                networkService, bigArrays, ipFilter.get(), getSslService(), threadPool, xContentRegistry, dispatcher));
+
+        Map> httpTransports = new HashMap<>();
+        httpTransports.put(SecurityField.NAME4, () -> new SecurityNetty4HttpServerTransport(settings, networkService, bigArrays,
+            ipFilter.get(), getSslService(), threadPool, xContentRegistry, dispatcher));
+        httpTransports.put(SecurityField.NIO, () -> new SecurityNioHttpServerTransport(settings, networkService, bigArrays,
+            pageCacheRecycler, threadPool, xContentRegistry, dispatcher, ipFilter.get(), getSslService()));
+
+        return httpTransports;
     }
 
     @Override
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java
index 9109bb37e8c..8d304302e03 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java
@@ -5,8 +5,6 @@
  */
 package org.elasticsearch.xpack.security.rest;
 
-import io.netty.channel.Channel;
-import io.netty.handler.ssl.SslHandler;
 import org.apache.logging.log4j.Logger;
 import org.apache.logging.log4j.message.ParameterizedMessage;
 import org.apache.logging.log4j.util.Supplier;
@@ -15,7 +13,6 @@ import org.elasticsearch.client.node.NodeClient;
 import org.elasticsearch.common.logging.ESLoggerFactory;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
 import org.elasticsearch.http.HttpChannel;
-import org.elasticsearch.http.netty4.Netty4HttpChannel;
 import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.rest.BytesRestResponse;
 import org.elasticsearch.rest.RestChannel;
@@ -24,7 +21,7 @@ import org.elasticsearch.rest.RestRequest;
 import org.elasticsearch.rest.RestRequest.Method;
 import org.elasticsearch.xpack.core.security.rest.RestRequestFilter;
 import org.elasticsearch.xpack.security.authc.AuthenticationService;
-import org.elasticsearch.xpack.security.transport.ServerTransportFilter;
+import org.elasticsearch.xpack.security.transport.SSLEngineUtils;
 
 import java.io.IOException;
 
@@ -53,10 +50,7 @@ public class SecurityRestFilter implements RestHandler {
             // CORS - allow for preflight unauthenticated OPTIONS request
             if (extractClientCertificate) {
                 HttpChannel httpChannel = request.getHttpChannel();
-                Channel nettyChannel = ((Netty4HttpChannel) httpChannel).getNettyChannel();
-                SslHandler handler = nettyChannel.pipeline().get(SslHandler.class);
-                assert handler != null;
-                ServerTransportFilter.extractClientCertificates(logger, threadContext, handler.engine(), nettyChannel);
+                SSLEngineUtils.extractClientCertificates(logger, threadContext, httpChannel);
             }
             service.authenticate(maybeWrapRestRequest(request), ActionListener.wrap(
                 authentication -> {
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java
new file mode 100644
index 00000000000..5bbcbaa0509
--- /dev/null
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport;
+
+import io.netty.channel.Channel;
+import io.netty.handler.ssl.SslHandler;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
+import org.apache.logging.log4j.util.Supplier;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.http.HttpChannel;
+import org.elasticsearch.http.netty4.Netty4HttpChannel;
+import org.elasticsearch.http.nio.NioHttpChannel;
+import org.elasticsearch.nio.SocketChannelContext;
+import org.elasticsearch.transport.TcpChannel;
+import org.elasticsearch.transport.netty4.Netty4TcpChannel;
+import org.elasticsearch.transport.nio.NioTcpChannel;
+import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
+import org.elasticsearch.xpack.security.transport.nio.SSLChannelContext;
+
+import javax.net.ssl.SSLEngine;
+import javax.net.ssl.SSLPeerUnverifiedException;
+import java.security.cert.Certificate;
+import java.security.cert.X509Certificate;
+
+public class SSLEngineUtils {
+
+    private SSLEngineUtils() {}
+
+    public static void extractClientCertificates(Logger logger, ThreadContext threadContext, HttpChannel httpChannel) {
+        SSLEngine sslEngine = getSSLEngine(httpChannel);
+        extract(logger, threadContext, sslEngine, httpChannel);
+    }
+
+    public static void extractClientCertificates(Logger logger, ThreadContext threadContext, TcpChannel tcpChannel) {
+        SSLEngine sslEngine = getSSLEngine(tcpChannel);
+        extract(logger, threadContext, sslEngine, tcpChannel);
+    }
+
+    public static SSLEngine getSSLEngine(HttpChannel httpChannel) {
+        if (httpChannel instanceof Netty4HttpChannel) {
+            Channel nettyChannel = ((Netty4HttpChannel) httpChannel).getNettyChannel();
+            SslHandler handler = nettyChannel.pipeline().get(SslHandler.class);
+            assert handler != null : "Must have SslHandler";
+            return handler.engine();
+        } else if (httpChannel instanceof NioHttpChannel) {
+            SocketChannelContext context = ((NioHttpChannel) httpChannel).getContext();
+            assert context instanceof SSLChannelContext : "Must be SSLChannelContext.class, found:  " + context.getClass();
+            return ((SSLChannelContext) context).getSSLEngine();
+        } else {
+            throw new AssertionError("Unknown channel class type: " + httpChannel.getClass());
+        }
+    }
+
+    public static SSLEngine getSSLEngine(TcpChannel tcpChannel) {
+        if (tcpChannel instanceof Netty4TcpChannel) {
+            Channel nettyChannel = ((Netty4TcpChannel) tcpChannel).getNettyChannel();
+            SslHandler handler = nettyChannel.pipeline().get(SslHandler.class);
+            assert handler != null : "Must have SslHandler";
+            return handler.engine();
+        } else if (tcpChannel instanceof NioTcpChannel) {
+            SocketChannelContext context = ((NioTcpChannel) tcpChannel).getContext();
+            assert context instanceof SSLChannelContext : "Must be SSLChannelContext.class, found:  " + context.getClass();
+            return ((SSLChannelContext) context).getSSLEngine();
+        } else {
+            throw new AssertionError("Unknown channel class type: " + tcpChannel.getClass());
+        }
+    }
+
+    private static void extract(Logger logger, ThreadContext threadContext, SSLEngine sslEngine, Object channel) {
+        try {
+            Certificate[] certs = sslEngine.getSession().getPeerCertificates();
+            if (certs instanceof X509Certificate[]) {
+                threadContext.putTransient(PkiRealm.PKI_CERT_HEADER_NAME, certs);
+            }
+        } catch (SSLPeerUnverifiedException e) {
+            // this happens when client authentication is optional and the client does not provide credentials. If client
+            // authentication was required then this connection should be closed before ever getting into this class
+            assert sslEngine.getNeedClientAuth() == false;
+            assert sslEngine.getWantClientAuth();
+            if (logger.isTraceEnabled()) {
+                logger.trace(
+                    (Supplier) () -> new ParameterizedMessage(
+                        "SSL Peer did not present a certificate on channel [{}]", channel), e);
+            } else if (logger.isDebugEnabled()) {
+                logger.debug("SSL Peer did not present a certificate on channel [{}]", channel);
+            }
+        }
+    }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java
new file mode 100644
index 00000000000..c1999c5ddfb
--- /dev/null
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport;
+
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
+import org.elasticsearch.common.component.Lifecycle;
+import org.elasticsearch.common.network.CloseableChannel;
+import org.elasticsearch.http.HttpChannel;
+
+import java.util.function.BiConsumer;
+
+import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isCloseDuringHandshakeException;
+import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isNotSslRecordException;
+import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isReceivedCertificateUnknownException;
+
+public final class SecurityHttpExceptionHandler implements BiConsumer {
+
+    private final Lifecycle lifecycle;
+    private final Logger logger;
+    private final BiConsumer fallback;
+
+    public SecurityHttpExceptionHandler(Logger logger, Lifecycle lifecycle, BiConsumer fallback) {
+        this.lifecycle = lifecycle;
+        this.logger = logger;
+        this.fallback = fallback;
+    }
+
+    public void accept(HttpChannel channel, Exception e) {
+        if (!lifecycle.started()) {
+            return;
+        }
+
+        if (isNotSslRecordException(e)) {
+            if (logger.isTraceEnabled()) {
+                logger.trace(new ParameterizedMessage("received plaintext http traffic on a https channel, closing connection {}",
+                    channel), e);
+            } else {
+                logger.warn("received plaintext http traffic on a https channel, closing connection {}", channel);
+            }
+            CloseableChannel.closeChannel(channel);
+        } else if (isCloseDuringHandshakeException(e)) {
+            if (logger.isTraceEnabled()) {
+                logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e);
+            } else {
+                logger.warn("connection {} closed during ssl handshake", channel);
+            }
+            CloseableChannel.closeChannel(channel);
+        } else if (isReceivedCertificateUnknownException(e)) {
+            if (logger.isTraceEnabled()) {
+                logger.trace(new ParameterizedMessage("http client did not trust server's certificate, closing connection {}",
+                    channel), e);
+            } else {
+                logger.warn("http client did not trust this server's certificate, closing connection {}", channel);
+            }
+            CloseableChannel.closeChannel(channel);
+        } else {
+            fallback.accept(channel, e);
+        }
+    }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettings.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettings.java
new file mode 100644
index 00000000000..f8079535acf
--- /dev/null
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettings.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport;
+
+import org.elasticsearch.common.settings.Settings;
+
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION;
+import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED;
+
+public final class SecurityHttpSettings {
+
+    private SecurityHttpSettings() {}
+
+    public static void overrideSettings(Settings.Builder settingsBuilder, Settings settings) {
+        if (HTTP_SSL_ENABLED.get(settings) && SETTING_HTTP_COMPRESSION.exists(settings) == false) {
+            settingsBuilder.put(SETTING_HTTP_COMPRESSION.getKey(), false);
+        }
+    }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java
index 9427812ba13..2f0c40c1fdd 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java
@@ -5,11 +5,7 @@
  */
 package org.elasticsearch.xpack.security.transport;
 
-import io.netty.channel.Channel;
-import io.netty.handler.ssl.SslHandler;
 import org.apache.logging.log4j.Logger;
-import org.apache.logging.log4j.message.ParameterizedMessage;
-import org.apache.logging.log4j.util.Supplier;
 import org.elasticsearch.Version;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.IndicesRequest;
@@ -20,11 +16,13 @@ import org.elasticsearch.action.support.DestructiveOperations;
 import org.elasticsearch.common.logging.Loggers;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
 import org.elasticsearch.transport.TaskTransportChannel;
+import org.elasticsearch.transport.TcpChannel;
 import org.elasticsearch.transport.TcpTransportChannel;
 import org.elasticsearch.transport.TransportChannel;
 import org.elasticsearch.transport.TransportRequest;
 import org.elasticsearch.transport.TransportService;
 import org.elasticsearch.transport.netty4.Netty4TcpChannel;
+import org.elasticsearch.transport.nio.NioTcpChannel;
 import org.elasticsearch.xpack.core.security.SecurityContext;
 import org.elasticsearch.xpack.core.security.authc.Authentication;
 import org.elasticsearch.xpack.core.security.user.KibanaUser;
@@ -32,16 +30,10 @@ import org.elasticsearch.xpack.core.security.user.SystemUser;
 import org.elasticsearch.xpack.core.security.user.User;
 import org.elasticsearch.xpack.security.action.SecurityActionMapper;
 import org.elasticsearch.xpack.security.authc.AuthenticationService;
-import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
 import org.elasticsearch.xpack.security.authz.AuthorizationService;
 import org.elasticsearch.xpack.security.authz.AuthorizationUtils;
 
-import javax.net.ssl.SSLEngine;
-import javax.net.ssl.SSLPeerUnverifiedException;
-
 import java.io.IOException;
-import java.security.cert.Certificate;
-import java.security.cert.X509Certificate;
 
 import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError;
 
@@ -115,13 +107,12 @@ public interface ServerTransportFilter {
                 unwrappedChannel = ((TaskTransportChannel) unwrappedChannel).getChannel();
             }
 
-            if (extractClientCert && (unwrappedChannel instanceof TcpTransportChannel) &&
-                ((TcpTransportChannel) unwrappedChannel).getChannel() instanceof Netty4TcpChannel) {
-                Channel channel = ((Netty4TcpChannel) ((TcpTransportChannel) unwrappedChannel).getChannel()).getLowLevelChannel();
-                SslHandler sslHandler = channel.pipeline().get(SslHandler.class);
-                if (channel.isOpen()) {
-                    assert sslHandler != null : "channel [" + channel + "] did not have a ssl handler. pipeline " + channel.pipeline();
-                    extractClientCertificates(logger, threadContext, sslHandler.engine(), channel);
+            if (extractClientCert && (unwrappedChannel instanceof TcpTransportChannel)) {
+                TcpChannel tcpChannel = ((TcpTransportChannel) unwrappedChannel).getChannel();
+                if (tcpChannel instanceof Netty4TcpChannel || tcpChannel instanceof NioTcpChannel) {
+                    if (tcpChannel.isOpen()) {
+                        SSLEngineUtils.extractClientCertificates(logger, threadContext, tcpChannel);
+                    }
                 }
             }
 
@@ -172,27 +163,6 @@ public interface ServerTransportFilter {
         }
     }
 
-    static void extractClientCertificates(Logger logger, ThreadContext threadContext, SSLEngine sslEngine, Channel channel) {
-        try {
-            Certificate[] certs = sslEngine.getSession().getPeerCertificates();
-            if (certs instanceof X509Certificate[]) {
-                threadContext.putTransient(PkiRealm.PKI_CERT_HEADER_NAME, certs);
-            }
-        } catch (SSLPeerUnverifiedException e) {
-            // this happens when client authentication is optional and the client does not provide credentials. If client
-            // authentication was required then this connection should be closed before ever getting into this class
-            assert sslEngine.getNeedClientAuth() == false;
-            assert sslEngine.getWantClientAuth();
-            if (logger.isTraceEnabled()) {
-                logger.trace(
-                        (Supplier) () -> new ParameterizedMessage(
-                                "SSL Peer did not present a certificate on channel [{}]", channel), e);
-            } else if (logger.isDebugEnabled()) {
-                logger.debug("SSL Peer did not present a certificate on channel [{}]", channel);
-            }
-        }
-    }
-
     /**
      * A server transport filter rejects internal calls, which should be used on connections
      * where only clients connect to. This ensures that no client can send any internal actions
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java
index d7a609f6f14..a728467f8bd 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java
@@ -8,8 +8,6 @@ package org.elasticsearch.xpack.security.transport.netty4;
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelHandler;
 import io.netty.handler.ssl.SslHandler;
-import org.apache.logging.log4j.message.ParameterizedMessage;
-import org.elasticsearch.common.network.CloseableChannel;
 import org.elasticsearch.common.network.NetworkService;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.util.BigArrays;
@@ -19,18 +17,16 @@ import org.elasticsearch.http.netty4.Netty4HttpServerTransport;
 import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.xpack.core.ssl.SSLConfiguration;
 import org.elasticsearch.xpack.core.ssl.SSLService;
+import org.elasticsearch.xpack.security.transport.SecurityHttpExceptionHandler;
 import org.elasticsearch.xpack.security.transport.filter.IPFilter;
 
 import javax.net.ssl.SSLEngine;
 
-import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION;
 import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED;
-import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isCloseDuringHandshakeException;
-import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isNotSslRecordException;
-import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isReceivedCertificateUnknownException;
 
 public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport {
 
+    private final SecurityHttpExceptionHandler securityExceptionHandler;
     private final IPFilter ipFilter;
     private final SSLService sslService;
     private final SSLConfiguration sslConfiguration;
@@ -39,6 +35,7 @@ public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport
                                              SSLService sslService, ThreadPool threadPool, NamedXContentRegistry xContentRegistry,
                                              Dispatcher dispatcher) {
         super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher);
+        this.securityExceptionHandler = new SecurityHttpExceptionHandler(logger, lifecycle, (c, e) -> super.onException(c, e));
         this.ipFilter = ipFilter;
         final boolean ssl = HTTP_SSL_ENABLED.get(settings);
         this.sslService = sslService;
@@ -51,41 +48,11 @@ public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport
         } else {
             this.sslConfiguration = null;
         }
-
     }
 
     @Override
     protected void onException(HttpChannel channel, Exception e) {
-        if (!lifecycle.started()) {
-            return;
-        }
-
-        if (isNotSslRecordException(e)) {
-            if (logger.isTraceEnabled()) {
-                logger.trace(new ParameterizedMessage("received plaintext http traffic on a https channel, closing connection {}",
-                    channel), e);
-            } else {
-                logger.warn("received plaintext http traffic on a https channel, closing connection {}", channel);
-            }
-            CloseableChannel.closeChannel(channel);
-        } else if (isCloseDuringHandshakeException(e)) {
-            if (logger.isTraceEnabled()) {
-                logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e);
-            } else {
-                logger.warn("connection {} closed during ssl handshake", channel);
-            }
-            CloseableChannel.closeChannel(channel);
-        } else if (isReceivedCertificateUnknownException(e)) {
-            if (logger.isTraceEnabled()) {
-                logger.trace(new ParameterizedMessage("http client did not trust server's certificate, closing connection {}",
-                    channel), e);
-            } else {
-                logger.warn("http client did not trust this server's certificate, closing connection {}", channel);
-            }
-            CloseableChannel.closeChannel(channel);
-        } else {
-            super.onException(channel, e);
-        }
+        securityExceptionHandler.accept(channel, e);
     }
 
     @Override
@@ -115,10 +82,4 @@ public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport
             ch.pipeline().addFirst("ip_filter", new IpFilterRemoteAddressFilter(ipFilter, IPFilter.HTTP_PROFILE_NAME));
         }
     }
-
-    public static void overrideSettings(Settings.Builder settingsBuilder, Settings settings) {
-        if (HTTP_SSL_ENABLED.get(settings) && SETTING_HTTP_COMPRESSION.exists(settings) == false) {
-            settingsBuilder.put(SETTING_HTTP_COMPRESSION.getKey(), false);
-        }
-    }
 }
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilter.java
new file mode 100644
index 00000000000..afb13ceff2e
--- /dev/null
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilter.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport.nio;
+
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.nio.NioSocketChannel;
+import org.elasticsearch.xpack.security.transport.filter.IPFilter;
+
+import java.util.function.Predicate;
+
+public final class NioIPFilter implements Predicate {
+
+    private final IPFilter filter;
+    private final String profile;
+
+    NioIPFilter(@Nullable IPFilter filter, String profile) {
+        this.filter = filter;
+        this.profile = profile;
+    }
+
+    @Override
+    public boolean test(NioSocketChannel nioChannel) {
+        if (filter != null) {
+            return filter.accept(profile, nioChannel.getRemoteAddress());
+        } else {
+            return true;
+        }
+    }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java
index da348ea1f78..c83bd16ca95 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java
@@ -14,6 +14,7 @@ import org.elasticsearch.nio.SocketChannelContext;
 import org.elasticsearch.nio.NioSelector;
 import org.elasticsearch.nio.WriteOperation;
 
+import javax.net.ssl.SSLEngine;
 import java.io.IOException;
 import java.util.function.BiConsumer;
 import java.util.function.Consumer;
@@ -164,6 +165,10 @@ public final class SSLChannelContext extends SocketChannelContext {
         }
     }
 
+    public SSLEngine getSSLEngine() {
+        return sslDriver.getSSLEngine();
+    }
+
     private static class CloseNotifyOperation implements WriteOperation {
 
         private static final BiConsumer LISTENER = (v, t) -> {};
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java
index 4080574713c..382230684c7 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java
@@ -96,6 +96,10 @@ public class SSLDriver implements AutoCloseable {
         }
     }
 
+    public SSLEngine getSSLEngine() {
+        return engine;
+    }
+
     public boolean hasFlushPending() {
         return networkWriteBuffer.hasRemaining();
     }
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
new file mode 100644
index 00000000000..006c78b4ae0
--- /dev/null
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport.nio;
+
+import org.elasticsearch.common.network.NetworkService;
+import org.elasticsearch.common.recycler.Recycler;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.BigArrays;
+import org.elasticsearch.common.util.PageCacheRecycler;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.http.nio.HttpReadWriteHandler;
+import org.elasticsearch.http.nio.NioHttpChannel;
+import org.elasticsearch.http.nio.NioHttpServerChannel;
+import org.elasticsearch.http.nio.NioHttpServerTransport;
+import org.elasticsearch.nio.BytesChannelContext;
+import org.elasticsearch.nio.ChannelFactory;
+import org.elasticsearch.nio.InboundChannelBuffer;
+import org.elasticsearch.nio.NioSelector;
+import org.elasticsearch.nio.NioSocketChannel;
+import org.elasticsearch.nio.ServerChannelContext;
+import org.elasticsearch.nio.SocketChannelContext;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.xpack.core.ssl.SSLConfiguration;
+import org.elasticsearch.xpack.core.ssl.SSLService;
+import org.elasticsearch.xpack.security.transport.SecurityHttpExceptionHandler;
+import org.elasticsearch.xpack.security.transport.filter.IPFilter;
+
+import javax.net.ssl.SSLEngine;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.nio.channels.ServerSocketChannel;
+import java.nio.channels.SocketChannel;
+import java.util.function.Consumer;
+import java.util.function.Supplier;
+
+import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED;
+
+public class SecurityNioHttpServerTransport extends NioHttpServerTransport {
+
+    private final SecurityHttpExceptionHandler securityExceptionHandler;
+    private final IPFilter ipFilter;
+    private final NioIPFilter nioIpFilter;
+    private final SSLService sslService;
+    private final SSLConfiguration sslConfiguration;
+    private final boolean sslEnabled;
+
+    public SecurityNioHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays,
+                                          PageCacheRecycler pageCacheRecycler, ThreadPool threadPool,
+                                          NamedXContentRegistry xContentRegistry, Dispatcher dispatcher, IPFilter ipFilter,
+                                          SSLService sslService) {
+        super(settings, networkService, bigArrays, pageCacheRecycler, threadPool, xContentRegistry, dispatcher);
+        this.securityExceptionHandler = new SecurityHttpExceptionHandler(logger, lifecycle, (c, e) -> super.onException(c, e));
+        this.ipFilter = ipFilter;
+        this.nioIpFilter = new NioIPFilter(ipFilter, IPFilter.HTTP_PROFILE_NAME);
+        this.sslEnabled = HTTP_SSL_ENABLED.get(settings);
+        this.sslService = sslService;
+        if (sslEnabled) {
+            this.sslConfiguration = sslService.sslConfiguration(SSLService.getHttpTransportSSLSettings(settings), Settings.EMPTY);
+            if (sslService.isConfigurationValidForServerUsage(sslConfiguration) == false) {
+                throw new IllegalArgumentException("a key must be provided to run as a server. the key should be configured using the " +
+                    "[xpack.security.http.ssl.key] or [xpack.security.http.ssl.keystore.path] setting");
+            }
+        } else {
+            this.sslConfiguration = null;
+        }
+    }
+
+    @Override
+    protected void doStart() {
+        super.doStart();
+        ipFilter.setBoundHttpTransportAddress(this.boundAddress());
+    }
+
+    protected SecurityHttpChannelFactory channelFactory() {
+        return new SecurityHttpChannelFactory();
+    }
+
+    class SecurityHttpChannelFactory extends ChannelFactory {
+
+        private SecurityHttpChannelFactory() {
+            super(new RawChannelFactory(tcpNoDelay, tcpKeepAlive, reuseAddress, tcpSendBufferSize, tcpReceiveBufferSize));
+        }
+
+        @Override
+        public NioHttpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException {
+            NioHttpChannel httpChannel = new NioHttpChannel(channel);
+            Supplier pageSupplier = () -> {
+                Recycler.V bytes = pageCacheRecycler.bytePage(false);
+                return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close);
+            };
+            HttpReadWriteHandler httpHandler = new HttpReadWriteHandler(httpChannel,SecurityNioHttpServerTransport.this,
+                handlingSettings, corsConfig);
+            InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier);
+            Consumer exceptionHandler = (e) -> securityExceptionHandler.accept(httpChannel, e);
+
+            SocketChannelContext context;
+            if (sslEnabled) {
+                SSLEngine sslEngine;
+                boolean hostnameVerificationEnabled = sslConfiguration.verificationMode().isHostnameVerificationEnabled();
+                if (hostnameVerificationEnabled) {
+                    InetSocketAddress address = (InetSocketAddress) channel.getRemoteAddress();
+                    // we create the socket based on the name given. don't reverse DNS
+                    sslEngine = sslService.createSSLEngine(sslConfiguration, address.getHostString(), address.getPort());
+                } else {
+                    sslEngine = sslService.createSSLEngine(sslConfiguration, null, -1);
+                }
+                SSLDriver sslDriver = new SSLDriver(sslEngine, false);
+                context = new SSLChannelContext(httpChannel, selector, exceptionHandler, sslDriver, httpHandler, buffer, nioIpFilter);
+            } else {
+                context = new BytesChannelContext(httpChannel, selector, exceptionHandler, httpHandler, buffer, nioIpFilter);
+            }
+            httpChannel.setContext(context);
+
+            return httpChannel;
+        }
+
+        @Override
+        public NioHttpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) {
+            NioHttpServerChannel httpServerChannel = new NioHttpServerChannel(channel);
+            Consumer exceptionHandler = (e) -> onServerException(httpServerChannel, e);
+            Consumer acceptor = SecurityNioHttpServerTransport.this::acceptChannel;
+            ServerChannelContext context = new ServerChannelContext(httpServerChannel, this, selector, acceptor, exceptionHandler);
+            httpServerChannel.setContext(context);
+
+            return httpServerChannel;
+        }
+    }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java
index fb94b669e83..71e14696a11 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java
@@ -44,7 +44,6 @@ import java.nio.channels.SocketChannel;
 import java.util.Collections;
 import java.util.Map;
 import java.util.function.Consumer;
-import java.util.function.Predicate;
 import java.util.function.Supplier;
 
 import static org.elasticsearch.xpack.core.security.SecurityField.setting;
@@ -129,19 +128,11 @@ public class SecurityNioTransport extends NioTransport {
         return new SecurityTcpChannelFactory(profileSettings, isClient);
     }
 
-    private boolean validateChannel(NioSocketChannel channel) {
-        if (authenticator != null) {
-            NioTcpChannel nioTcpChannel = (NioTcpChannel) channel;
-            return authenticator.accept(nioTcpChannel.getProfile(), nioTcpChannel.getRemoteAddress());
-        } else {
-            return true;
-        }
-    }
-
     private class SecurityTcpChannelFactory extends TcpChannelFactory {
 
         private final String profileName;
         private final boolean isClient;
+        private final NioIPFilter ipFilter;
 
         private SecurityTcpChannelFactory(ProfileSettings profileSettings, boolean isClient) {
             super(new RawChannelFactory(profileSettings.tcpNoDelay,
@@ -151,12 +142,12 @@ public class SecurityNioTransport extends NioTransport {
                 Math.toIntExact(profileSettings.receiveBufferSize.getBytes())));
             this.profileName = profileSettings.profileName;
             this.isClient = isClient;
+            this.ipFilter = new NioIPFilter(authenticator, profileName);
         }
 
         @Override
         public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException {
             NioTcpChannel nioChannel = new NioTcpChannel(profileName, channel);
-            SocketChannelContext context;
             Supplier pageSupplier = () -> {
                 Recycler.V bytes = pageCacheRecycler.bytePage(false);
                 return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close);
@@ -164,8 +155,8 @@ public class SecurityNioTransport extends NioTransport {
             TcpReadWriteHandler readWriteHandler = new TcpReadWriteHandler(nioChannel, SecurityNioTransport.this);
             InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier);
             Consumer exceptionHandler = (e) -> onException(nioChannel, e);
-            Predicate filter = SecurityNioTransport.this::validateChannel;
 
+            SocketChannelContext context;
             if (sslEnabled) {
                 SSLEngine sslEngine;
                 SSLConfiguration defaultConfig = profileConfiguration.get(TcpTransport.DEFAULT_PROFILE);
@@ -179,9 +170,9 @@ public class SecurityNioTransport extends NioTransport {
                     sslEngine = sslService.createSSLEngine(sslConfig, null, -1);
                 }
                 SSLDriver sslDriver = new SSLDriver(sslEngine, isClient);
-                context = new SSLChannelContext(nioChannel, selector, exceptionHandler, sslDriver, readWriteHandler, buffer, filter);
+                context = new SSLChannelContext(nioChannel, selector, exceptionHandler, sslDriver, readWriteHandler, buffer, ipFilter);
             } else {
-                context = new BytesChannelContext(nioChannel, selector, exceptionHandler, readWriteHandler, buffer, filter);
+                context = new BytesChannelContext(nioChannel, selector, exceptionHandler, readWriteHandler, buffer, ipFilter);
             }
             nioChannel.setContext(context);
 
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java
index e6db3407496..9bb0e44eb66 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java
@@ -244,6 +244,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase {
         builder.put(customSettings, false); // handle secure settings separately
         builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
         builder.put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO);
+        builder.put(NetworkModule.HTTP_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO);
         Settings.Builder customBuilder = Settings.builder().put(customSettings);
         if (customBuilder.getSecureSettings() != null) {
             SecuritySettingsSource.addSecureSettings(builder, secureSettings ->
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java
index 2e0662264a2..df1456c3790 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java
@@ -126,6 +126,7 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas
         Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal))
                 .put(XPackSettings.SECURITY_ENABLED.getKey(), true)
                 .put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO)
+                .put(NetworkModule.HTTP_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO)
                 //TODO: for now isolate security tests from watcher & monitoring (randomize this later)
                 .put(XPackSettings.WATCHER_ENABLED.getKey(), false)
                 .put(XPackSettings.MONITORING_ENABLED.getKey(), false)
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettingsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettingsTests.java
new file mode 100644
index 00000000000..56c79a4c127
--- /dev/null
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettingsTests.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.http.HttpTransportSettings;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.xpack.core.XPackSettings;
+
+import static org.hamcrest.Matchers.is;
+
+public class SecurityHttpSettingsTests extends ESTestCase {
+
+    public void testDisablesCompressionByDefaultForSsl() {
+        Settings settings = Settings.builder()
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
+
+        Settings.Builder pluginSettingsBuilder = Settings.builder();
+        SecurityHttpSettings.overrideSettings(pluginSettingsBuilder, settings);
+        assertThat(HttpTransportSettings.SETTING_HTTP_COMPRESSION.get(pluginSettingsBuilder.build()), is(false));
+    }
+
+    public void testLeavesCompressionOnIfNotSsl() {
+        Settings settings = Settings.builder()
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), false).build();
+        Settings.Builder pluginSettingsBuilder = Settings.builder();
+        SecurityHttpSettings.overrideSettings(pluginSettingsBuilder, settings);
+        assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
+    }
+
+    public void testDoesNotChangeExplicitlySetCompression() {
+        Settings settings = Settings.builder()
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put(HttpTransportSettings.SETTING_HTTP_COMPRESSION.getKey(), true)
+            .build();
+
+        Settings.Builder pluginSettingsBuilder = Settings.builder();
+        SecurityHttpSettings.overrideSettings(pluginSettingsBuilder, settings);
+        assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
+    }
+}
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java
index ec925f43abe..ad64dea79a5 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java
@@ -14,7 +14,6 @@ import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.util.BigArrays;
 import org.elasticsearch.env.Environment;
 import org.elasticsearch.env.TestEnvironment;
-import org.elasticsearch.http.HttpTransportSettings;
 import org.elasticsearch.http.NullDispatcher;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.threadpool.ThreadPool;
@@ -144,34 +143,6 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase {
         assertThat(customEngine.getEnabledProtocols(), not(equalTo(defaultEngine.getEnabledProtocols())));
     }
 
-    public void testDisablesCompressionByDefaultForSsl() throws Exception {
-        Settings settings = Settings.builder()
-                .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
-
-        Settings.Builder pluginSettingsBuilder = Settings.builder();
-        SecurityNetty4HttpServerTransport.overrideSettings(pluginSettingsBuilder, settings);
-        assertThat(HttpTransportSettings.SETTING_HTTP_COMPRESSION.get(pluginSettingsBuilder.build()), is(false));
-    }
-
-    public void testLeavesCompressionOnIfNotSsl() throws Exception {
-        Settings settings = Settings.builder()
-                .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), false).build();
-        Settings.Builder pluginSettingsBuilder = Settings.builder();
-        SecurityNetty4HttpServerTransport.overrideSettings(pluginSettingsBuilder, settings);
-        assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
-    }
-
-    public void testDoesNotChangeExplicitlySetCompression() throws Exception {
-        Settings settings = Settings.builder()
-                .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
-                .put(HttpTransportSettings.SETTING_HTTP_COMPRESSION.getKey(), true)
-                .build();
-
-        Settings.Builder pluginSettingsBuilder = Settings.builder();
-        SecurityNetty4HttpServerTransport.overrideSettings(pluginSettingsBuilder, settings);
-        assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
-    }
-
     public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() throws Exception {
         MockSecureSettings secureSettings = new MockSecureSettings();
         secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode");
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java
new file mode 100644
index 00000000000..1832669fce1
--- /dev/null
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport.nio;
+
+import org.elasticsearch.common.component.Lifecycle;
+import org.elasticsearch.common.network.InetAddresses;
+import org.elasticsearch.common.settings.ClusterSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.transport.BoundTransportAddress;
+import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.http.HttpServerTransport;
+import org.elasticsearch.license.XPackLicenseState;
+import org.elasticsearch.nio.NioSocketChannel;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.transport.Transport;
+import org.elasticsearch.xpack.security.audit.AuditTrailService;
+import org.elasticsearch.xpack.security.transport.filter.IPFilter;
+import org.junit.Before;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+
+import static org.hamcrest.Matchers.is;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class NioIPFilterTests extends ESTestCase {
+
+    private NioIPFilter nioIPFilter;
+
+    @Before
+    public void init() throws Exception {
+        Settings settings = Settings.builder()
+            .put("xpack.security.transport.filter.allow", "127.0.0.1")
+            .put("xpack.security.transport.filter.deny", "10.0.0.0/8")
+            .build();
+
+        boolean isHttpEnabled = randomBoolean();
+
+        Transport transport = mock(Transport.class);
+        TransportAddress address = new TransportAddress(InetAddress.getLoopbackAddress(), 9300);
+        when(transport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { address }, address));
+        when(transport.lifecycleState()).thenReturn(Lifecycle.State.STARTED);
+        ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(
+            IPFilter.HTTP_FILTER_ALLOW_SETTING,
+            IPFilter.HTTP_FILTER_DENY_SETTING,
+            IPFilter.IP_FILTER_ENABLED_HTTP_SETTING,
+            IPFilter.IP_FILTER_ENABLED_SETTING,
+            IPFilter.TRANSPORT_FILTER_ALLOW_SETTING,
+            IPFilter.TRANSPORT_FILTER_DENY_SETTING,
+            IPFilter.PROFILE_FILTER_ALLOW_SETTING,
+            IPFilter.PROFILE_FILTER_DENY_SETTING)));
+        XPackLicenseState licenseState = mock(XPackLicenseState.class);
+        when(licenseState.isIpFilteringAllowed()).thenReturn(true);
+        when(licenseState.isSecurityEnabled()).thenReturn(true);
+        AuditTrailService auditTrailService = new AuditTrailService(settings, Collections.emptyList(), licenseState);
+        IPFilter ipFilter = new IPFilter(settings, auditTrailService, clusterSettings, licenseState);
+        ipFilter.setBoundTransportAddress(transport.boundAddress(), transport.profileBoundAddresses());
+        if (isHttpEnabled) {
+            HttpServerTransport httpTransport = mock(HttpServerTransport.class);
+            TransportAddress httpAddress = new TransportAddress(InetAddress.getLoopbackAddress(), 9200);
+            when(httpTransport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { httpAddress }, httpAddress));
+            when(httpTransport.lifecycleState()).thenReturn(Lifecycle.State.STARTED);
+            ipFilter.setBoundHttpTransportAddress(httpTransport.boundAddress());
+        }
+
+        if (isHttpEnabled) {
+            nioIPFilter = new NioIPFilter(ipFilter, IPFilter.HTTP_PROFILE_NAME);
+        } else {
+            nioIPFilter = new NioIPFilter(ipFilter, "default");
+        }
+    }
+
+    public void testThatFilteringWorksByIp() throws Exception {
+        InetSocketAddress localhostAddr = new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 12345);
+        NioSocketChannel channel1 = mock(NioSocketChannel.class);
+        when(channel1.getRemoteAddress()).thenReturn(localhostAddr);
+        assertThat(nioIPFilter.test(channel1), is(true));
+
+        InetSocketAddress remoteAddr = new InetSocketAddress(InetAddresses.forString("10.0.0.8"), 12345);
+        NioSocketChannel channel2 = mock(NioSocketChannel.class);
+        when(channel2.getRemoteAddress()).thenReturn(remoteAddr);
+        assertThat(nioIPFilter.test(channel2), is(false));
+    }
+}
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java
new file mode 100644
index 00000000000..b5d84d45916
--- /dev/null
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java
@@ -0,0 +1,207 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport.nio;
+
+import org.elasticsearch.common.network.NetworkService;
+import org.elasticsearch.common.settings.MockSecureSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.BigArrays;
+import org.elasticsearch.common.util.PageCacheRecycler;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.env.TestEnvironment;
+import org.elasticsearch.http.NullDispatcher;
+import org.elasticsearch.http.nio.NioHttpChannel;
+import org.elasticsearch.nio.NioSelector;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.xpack.core.XPackSettings;
+import org.elasticsearch.xpack.core.ssl.SSLClientAuth;
+import org.elasticsearch.xpack.core.ssl.SSLService;
+import org.elasticsearch.xpack.security.transport.SSLEngineUtils;
+import org.elasticsearch.xpack.security.transport.filter.IPFilter;
+import org.junit.Before;
+
+import javax.net.ssl.SSLEngine;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.nio.channels.SocketChannel;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.Locale;
+
+import static org.hamcrest.Matchers.arrayContaining;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class SecurityNioHttpServerTransportTests extends ESTestCase {
+
+    private SSLService sslService;
+    private Environment env;
+    private InetSocketAddress address = new InetSocketAddress(InetAddress.getLoopbackAddress(), 0);
+
+    @Before
+    public void createSSLService() {
+        Path testNodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks");
+        MockSecureSettings secureSettings = new MockSecureSettings();
+        secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode");
+        Settings settings = Settings.builder()
+            .put("xpack.ssl.keystore.path", testNodeStore)
+            .put("path.home", createTempDir())
+            .setSecureSettings(secureSettings)
+            .build();
+        env = TestEnvironment.newEnvironment(settings);
+        sslService = new SSLService(settings, env);
+    }
+
+    public void testDefaultClientAuth() throws IOException {
+        Settings settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+        SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
+        SocketChannel socketChannel = mock(SocketChannel.class);
+        when(socketChannel.getRemoteAddress()).thenReturn(address);
+        NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
+
+        assertThat(engine.getNeedClientAuth(), is(false));
+        assertThat(engine.getWantClientAuth(), is(false));
+    }
+
+    public void testOptionalClientAuth() throws IOException {
+        String value = randomFrom(SSLClientAuth.OPTIONAL.name(), SSLClientAuth.OPTIONAL.name().toLowerCase(Locale.ROOT));
+        Settings settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put("xpack.security.http.ssl.client_authentication", value).build();
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+
+        SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
+        SocketChannel socketChannel = mock(SocketChannel.class);
+        when(socketChannel.getRemoteAddress()).thenReturn(address);
+        NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
+        assertThat(engine.getNeedClientAuth(), is(false));
+        assertThat(engine.getWantClientAuth(), is(true));
+    }
+
+    public void testRequiredClientAuth() throws IOException {
+        String value = randomFrom(SSLClientAuth.REQUIRED.name(), SSLClientAuth.REQUIRED.name().toLowerCase(Locale.ROOT));
+        Settings settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put("xpack.security.http.ssl.client_authentication", value).build();
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+
+        SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
+        SocketChannel socketChannel = mock(SocketChannel.class);
+        when(socketChannel.getRemoteAddress()).thenReturn(address);
+        NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
+        assertThat(engine.getNeedClientAuth(), is(true));
+        assertThat(engine.getWantClientAuth(), is(false));
+    }
+
+    public void testNoClientAuth() throws IOException {
+        String value = randomFrom(SSLClientAuth.NONE.name(), SSLClientAuth.NONE.name().toLowerCase(Locale.ROOT));
+        Settings settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put("xpack.security.http.ssl.client_authentication", value).build();
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+
+        SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
+        SocketChannel socketChannel = mock(SocketChannel.class);
+        when(socketChannel.getRemoteAddress()).thenReturn(address);
+        NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
+        assertThat(engine.getNeedClientAuth(), is(false));
+        assertThat(engine.getWantClientAuth(), is(false));
+    }
+
+    public void testCustomSSLConfiguration() throws IOException {
+        Settings settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+        SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
+        SocketChannel socketChannel = mock(SocketChannel.class);
+        when(socketChannel.getRemoteAddress()).thenReturn(address);
+        NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine defaultEngine = SSLEngineUtils.getSSLEngine(channel);
+
+        settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put("xpack.security.http.ssl.supported_protocols", "TLSv1.2")
+            .build();
+        sslService = new SSLService(settings, TestEnvironment.newEnvironment(settings));
+        transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+        factory = transport.channelFactory();
+        channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine customEngine = SSLEngineUtils.getSSLEngine(channel);
+        assertThat(customEngine.getEnabledProtocols(), arrayContaining("TLSv1.2"));
+        assertThat(customEngine.getEnabledProtocols(), not(equalTo(defaultEngine.getEnabledProtocols())));
+    }
+
+    public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() {
+        MockSecureSettings secureSettings = new MockSecureSettings();
+        secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode");
+        Settings settings = Settings.builder()
+            .put("xpack.ssl.truststore.path",
+                getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"))
+            .setSecureSettings(secureSettings)
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put("path.home", createTempDir())
+            .build();
+        env = TestEnvironment.newEnvironment(settings);
+        sslService = new SSLService(settings, env);
+
+        IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+            () -> new SecurityNioHttpServerTransport(settings,
+                new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+                xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService));
+        assertThat(e.getMessage(), containsString("key must be provided"));
+    }
+
+    public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() {
+        MockSecureSettings secureSettings = new MockSecureSettings();
+        secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode");
+        Settings settings = Settings.builder()
+            .put("xpack.ssl.truststore.path",
+                getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"))
+            .setSecureSettings(secureSettings)
+            .put("path.home", createTempDir())
+            .build();
+        env = TestEnvironment.newEnvironment(settings);
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+    }
+}

From a612404b1f84ce92a24a69b9ad0b538f4c8f3428 Mon Sep 17 00:00:00 2001
From: Tim Brooks 
Date: Fri, 13 Jul 2018 23:37:15 -0600
Subject: [PATCH 022/260] Fix compile issues introduced by merge (#32058)

The build was broken due to some issues with the merging of #32018. A
method that was public went private before the PR was merged. That did
not cause a merge conflict (so the PR was merged successfully). But it
did cause the build to fail.
---
 .../security/transport/nio/SecurityNioHttpServerTransport.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
index 006c78b4ae0..50a78d93c71 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
@@ -59,7 +59,7 @@ public class SecurityNioHttpServerTransport extends NioHttpServerTransport {
         this.sslEnabled = HTTP_SSL_ENABLED.get(settings);
         this.sslService = sslService;
         if (sslEnabled) {
-            this.sslConfiguration = sslService.sslConfiguration(SSLService.getHttpTransportSSLSettings(settings), Settings.EMPTY);
+            this.sslConfiguration = sslService.getHttpTransportSSLConfiguration();
             if (sslService.isConfigurationValidForServerUsage(sslConfiguration) == false) {
                 throw new IllegalArgumentException("a key must be provided to run as a server. the key should be configured using the " +
                     "[xpack.security.http.ssl.key] or [xpack.security.http.ssl.keystore.path] setting");

From ccf61264101437c1d188c6dec24fc9348dfe0a63 Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Sat, 14 Jul 2018 09:03:35 +0200
Subject: [PATCH 023/260] SCRIPTING: Remove unused
 MultiSearchTemplateRequestBuilder (#32049)

* Ever since 46e8d97813addd8c57fa54d2c700d26a171f2dbb this class is unused
---
 .../MultiSearchTemplateRequestBuilder.java    | 65 -------------------
 1 file changed, 65 deletions(-)
 delete mode 100644 modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java

diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java
deleted file mode 100644
index c4dac0dd88e..00000000000
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.script.mustache;
-
-import org.elasticsearch.action.ActionRequestBuilder;
-import org.elasticsearch.action.support.IndicesOptions;
-import org.elasticsearch.client.ElasticsearchClient;
-
-public class MultiSearchTemplateRequestBuilder
-        extends ActionRequestBuilder {
-
-    protected MultiSearchTemplateRequestBuilder(ElasticsearchClient client, MultiSearchTemplateAction action) {
-        super(client, action, new MultiSearchTemplateRequest());
-    }
-
-    public MultiSearchTemplateRequestBuilder add(SearchTemplateRequest request) {
-        if (request.getRequest().indicesOptions() == IndicesOptions.strictExpandOpenAndForbidClosed()
-                && request().indicesOptions() != IndicesOptions.strictExpandOpenAndForbidClosed()) {
-            request.getRequest().indicesOptions(request().indicesOptions());
-        }
-
-        super.request.add(request);
-        return this;
-    }
-
-    public MultiSearchTemplateRequestBuilder add(SearchTemplateRequestBuilder request) {
-        if (request.request().getRequest().indicesOptions() == IndicesOptions.strictExpandOpenAndForbidClosed()
-                && request().indicesOptions() != IndicesOptions.strictExpandOpenAndForbidClosed()) {
-            request.request().getRequest().indicesOptions(request().indicesOptions());
-        }
-
-        super.request.add(request);
-        return this;
-    }
-
-    public MultiSearchTemplateRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
-        request().indicesOptions(indicesOptions);
-        return this;
-    }
-
-    /**
-     * Sets how many search requests specified in this multi search requests are allowed to be ran concurrently.
-     */
-    public MultiSearchTemplateRequestBuilder setMaxConcurrentSearchRequests(int maxConcurrentSearchRequests) {
-        request().maxConcurrentSearchRequests(maxConcurrentSearchRequests);
-        return this;
-    }
-}

From b65c586cef0c8b46aaab26bc3ea9ef81c7653aa9 Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Sat, 14 Jul 2018 13:37:59 +0200
Subject: [PATCH 024/260] Cleanup Duplication in `PainlessScriptEngine`
 (#31991)

* Cleanup Duplication in `PainlessScriptEngine`
* Extract duplicate building of compiler settings to method
* Remove dead method params + dead constant in `ScriptProcessor`
---
 .../ingest/common/ScriptProcessor.java        |  3 -
 .../painless/PainlessScriptEngine.java        | 85 ++++++-------------
 2 files changed, 26 insertions(+), 62 deletions(-)

diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
index 74c68fd5c26..169b2ab646a 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
@@ -19,8 +19,6 @@
 
 package org.elasticsearch.ingest.common;
 
-import com.fasterxml.jackson.core.JsonFactory;
-
 import org.elasticsearch.common.bytes.BytesReference;
 import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
 import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@@ -48,7 +46,6 @@ import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationExcept
 public final class ScriptProcessor extends AbstractProcessor {
 
     public static final String TYPE = "script";
-    private static final JsonFactory JSON_FACTORY = new JsonFactory();
 
     private final Script script;
     private final ScriptService scriptService;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
index ae1944c9bd3..4560fd85a65 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
@@ -366,44 +366,7 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
     }
 
     Object compile(Compiler compiler, String scriptName, String source, Map params, Object... args) {
-        final CompilerSettings compilerSettings;
-
-        if (params.isEmpty()) {
-            // Use the default settings.
-            compilerSettings = defaultCompilerSettings;
-        } else {
-            // Use custom settings specified by params.
-            compilerSettings = new CompilerSettings();
-
-            // Except regexes enabled - this is a node level setting and can't be changed in the request.
-            compilerSettings.setRegexesEnabled(defaultCompilerSettings.areRegexesEnabled());
-
-            Map copy = new HashMap<>(params);
-
-            String value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER);
-            if (value != null) {
-                compilerSettings.setMaxLoopCounter(Integer.parseInt(value));
-            }
-
-            value = copy.remove(CompilerSettings.PICKY);
-            if (value != null) {
-                compilerSettings.setPicky(Boolean.parseBoolean(value));
-            }
-
-            value = copy.remove(CompilerSettings.INITIAL_CALL_SITE_DEPTH);
-            if (value != null) {
-                compilerSettings.setInitialCallSiteDepth(Integer.parseInt(value));
-            }
-
-            value = copy.remove(CompilerSettings.REGEX_ENABLED.getKey());
-            if (value != null) {
-                throw new IllegalArgumentException("[painless.regex.enabled] can only be set on node startup.");
-            }
-
-            if (!copy.isEmpty()) {
-                throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy);
-            }
-        }
+        final CompilerSettings compilerSettings = buildCompilerSettings(params);
 
         // Check we ourselves are not being called by unprivileged code.
         SpecialPermission.check();
@@ -434,14 +397,33 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
             }, COMPILATION_CONTEXT);
         // Note that it is safe to catch any of the following errors since Painless is stateless.
         } catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) {
-            throw convertToScriptException(scriptName == null ? source : scriptName, source, e);
+            throw convertToScriptException(source, e);
         }
     }
 
     void compile(Compiler compiler, Loader loader, MainMethodReserved reserved,
                  String scriptName, String source, Map params) {
-        final CompilerSettings compilerSettings;
+        final CompilerSettings compilerSettings = buildCompilerSettings(params);
 
+        try {
+            // Drop all permissions to actually compile the code itself.
+            AccessController.doPrivileged(new PrivilegedAction() {
+                @Override
+                public Void run() {
+                    String name = scriptName == null ? source : scriptName;
+                    compiler.compile(loader, reserved, name, source, compilerSettings);
+
+                    return null;
+                }
+            }, COMPILATION_CONTEXT);
+            // Note that it is safe to catch any of the following errors since Painless is stateless.
+        } catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) {
+            throw convertToScriptException(source, e);
+        }
+    }
+
+    private CompilerSettings buildCompilerSettings(Map params) {
+        CompilerSettings compilerSettings;
         if (params.isEmpty()) {
             // Use the default settings.
             compilerSettings = defaultCompilerSettings;
@@ -478,25 +460,10 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
                 throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy);
             }
         }
-
-        try {
-            // Drop all permissions to actually compile the code itself.
-            AccessController.doPrivileged(new PrivilegedAction() {
-                @Override
-                public Void run() {
-                    String name = scriptName == null ? source : scriptName;
-                    compiler.compile(loader, reserved, name, source, compilerSettings);
-
-                    return null;
-                }
-            }, COMPILATION_CONTEXT);
-            // Note that it is safe to catch any of the following errors since Painless is stateless.
-        } catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) {
-            throw convertToScriptException(scriptName == null ? source : scriptName, source, e);
-        }
+        return compilerSettings;
     }
 
-    private ScriptException convertToScriptException(String scriptName, String scriptSource, Throwable t) {
+    private ScriptException convertToScriptException(String scriptSource, Throwable t) {
         // create a script stack: this is just the script portion
         List scriptStack = new ArrayList<>();
         for (StackTraceElement element : t.getStackTrace()) {
@@ -507,7 +474,7 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
                     scriptStack.add("<<< unknown portion of script >>>");
                 } else {
                     offset--; // offset is 1 based, line numbers must be!
-                    int startOffset = getPreviousStatement(scriptSource, offset);
+                    int startOffset = getPreviousStatement(offset);
                     int endOffset = getNextStatement(scriptSource, offset);
                     StringBuilder snippet = new StringBuilder();
                     if (startOffset > 0) {
@@ -535,7 +502,7 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
     }
 
     // very simple heuristic: +/- 25 chars. can be improved later.
-    private int getPreviousStatement(String scriptSource, int offset) {
+    private int getPreviousStatement(int offset) {
         return Math.max(0, offset - 25);
     }
 

From edbea73f24b8f85b05a4f6af1a8d1c7b3b63796f Mon Sep 17 00:00:00 2001
From: Tim Vernum 
Date: Mon, 16 Jul 2018 15:43:41 +1000
Subject: [PATCH 025/260] Fix broken OpenLDAP Vagrant QA test

This was broken due to c662565 but the problem didn't get detected as
CI builds typically don't run vagrant tests
---
 .../xpack/security/authc/ldap/LdapTestUtils.java   |  2 +-
 .../java/org/elasticsearch/test/OpenLdapTests.java | 14 ++++++++++----
 2 files changed, 11 insertions(+), 5 deletions(-)

diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java
index 9a9368c25e1..8bdfd02d2fc 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java
@@ -62,7 +62,7 @@ public class LdapTestUtils {
 
         final SSLConfiguration sslConfiguration;
         if (useGlobalSSL) {
-            sslConfiguration = sslService.getSSLConfiguration("_global");
+            sslConfiguration = sslService.getSSLConfiguration("xpack.ssl");
         } else {
             sslConfiguration = sslService.getSSLConfiguration("xpack.security.authc.realms.foo.ssl");
         }
diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java
index c6d541b8064..f96823df019 100644
--- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java
+++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java
@@ -104,7 +104,13 @@ public class OpenLdapTests extends ESTestCase {
             builder.put("xpack.security.authc.realms." + REALM_NAME + ".ssl.truststore.path", truststore);
             mockSecureSettings.setString("xpack.security.authc.realms." + REALM_NAME + ".ssl.truststore.secure_password", "changeit");
             builder.put("xpack.security.authc.realms." + REALM_NAME + ".ssl.verification_mode", VerificationMode.CERTIFICATE);
+
+            // If not using global ssl, need to set the truststore for the "full verification" realm
+            builder.put("xpack.security.authc.realms.vmode_full.ssl.truststore.path", truststore);
+            mockSecureSettings.setString("xpack.security.authc.realms.vmode_full.ssl.truststore.secure_password", "changeit");
         }
+        builder.put("xpack.security.authc.realms.vmode_full.ssl.verification_mode", VerificationMode.FULL);
+
         globalSettings = builder.setSecureSettings(mockSecureSettings).build();
         Environment environment = TestEnvironment.newEnvironment(globalSettings);
         sslService = new SSLService(globalSettings, environment);
@@ -188,10 +194,10 @@ public class OpenLdapTests extends ESTestCase {
         Settings settings = Settings.builder()
             // The certificate used in the vagrant box is valid for "localhost", but not for "127.0.0.1"
             .put(buildLdapSettings(OPEN_LDAP_IP_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL))
-            .put("ssl.verification_mode", VerificationMode.FULL)
             .build();
 
-        RealmConfig config = new RealmConfig("oldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
+        // Pick up the "full" verification mode config
+        RealmConfig config = new RealmConfig("vmode_full", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
             new ThreadContext(Settings.EMPTY));
         LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool);
 
@@ -211,10 +217,10 @@ public class OpenLdapTests extends ESTestCase {
         Settings settings = Settings.builder()
             // The certificate used in the vagrant box is valid for "localhost" (but not for "127.0.0.1")
             .put(buildLdapSettings(OPEN_LDAP_DNS_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL))
-            .put("ssl.verification_mode", VerificationMode.FULL)
             .build();
 
-        RealmConfig config = new RealmConfig("oldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
+        // Pick up the "full" verification mode config
+        RealmConfig config = new RealmConfig("vmode_full", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
             new ThreadContext(Settings.EMPTY));
         LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool);
 

From 016e8760f0b44e816bae541a822d5d2fbb8f3021 Mon Sep 17 00:00:00 2001
From: Daniel Mitterdorfer 
Date: Mon, 16 Jul 2018 10:40:36 +0200
Subject: [PATCH 026/260] Turn off real-mem breaker in single node tests

With this commit we disable the real-memory circuit breaker in tests
that inherit from `ESSingleNodeTestCase`. As this breaker is based on
real memory usage over which we have no (full) control in tests and
their purpose is also not to test the circuit breaker, we use the
deterministic circuit breaker implementation that only accounts for
explicitly reserved memory.

Closes #32047
Relates #32071
---
 .../java/org/elasticsearch/test/ESSingleNodeTestCase.java     | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java
index a1b8f44a923..9633f56dea9 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java
@@ -42,6 +42,7 @@ import org.elasticsearch.env.NodeEnvironment;
 import org.elasticsearch.index.Index;
 import org.elasticsearch.index.IndexService;
 import org.elasticsearch.indices.IndicesService;
+import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
 import org.elasticsearch.node.MockNode;
 import org.elasticsearch.node.Node;
 import org.elasticsearch.node.NodeValidationException;
@@ -184,6 +185,9 @@ public abstract class ESSingleNodeTestCase extends ESTestCase {
             .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b")
             .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b")
             .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "1b")
+            // turning on the real memory circuit breaker leads to spurious test failures. As have no full control over heap usage, we
+            // turn it off for these tests.
+            .put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), false)
             .put(nodeSettings()) // allow test cases to provide their own settings or override these
             .build();
         Collection> plugins = getPlugins();

From e0cfa1689c17a660e0e3e957262e642ac760a7d8 Mon Sep 17 00:00:00 2001
From: Daniel Mitterdorfer 
Date: Mon, 16 Jul 2018 10:44:04 +0200
Subject: [PATCH 027/260] Turn off real-mem breaker in REST tests

With this commit we disable the real-memory circuit breaker in REST
tests as this breaker is based on real memory usage over which we have
no (full) control in tests and the REST client is not yet ready to retry
on circuit breaker exceptions.

This is only meant as a temporary measure to avoid spurious test
failures while we ensure that the REST client can handle those
situations appropriately.

Closes #32050
Relates #31767
Relates #31986
Relates #32074
---
 .../elasticsearch/gradle/test/ClusterFormationTasks.groovy  | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
index be0fb3a07c6..0349130076c 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
@@ -331,6 +331,12 @@ class ClusterFormationTasks {
         }
         // increase script compilation limit since tests can rapid-fire script compilations
         esConfig['script.max_compilations_rate'] = '2048/1m'
+        // Temporarily disable the real memory usage circuit breaker. It depends on real memory usage which we have no full control
+        // over and the REST client will not retry on circuit breaking exceptions yet (see #31986 for details). Once the REST client
+        // can retry on circuit breaking exceptions, we can revert again to the default configuration.
+        if (node.nodeVersion.major >= 7) {
+            esConfig['indices.breaker.total.use_real_memory'] = false
+        }
         esConfig.putAll(node.config.settings)
 
         Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)

From a14db2f9d6df79d5b38befc2657a689ce6d90cbf Mon Sep 17 00:00:00 2001
From: Jim Ferenczi 
Date: Mon, 16 Jul 2018 10:53:51 +0200
Subject: [PATCH 028/260] [Test] Mute MlJobIT#testDeleteJobAfterMissingAliases

Relates #32034
---
 .../java/org/elasticsearch/xpack/ml/integration/MlJobIT.java     | 1 +
 1 file changed, 1 insertion(+)

diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java
index 6713e66692d..7820cbc06f5 100644
--- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java
+++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java
@@ -438,6 +438,7 @@ public class MlJobIT extends ESRestTestCase {
                 client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
     }
 
+    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32034")
     public void testDeleteJobAfterMissingAliases() throws Exception {
         String jobId = "delete-job-after-missing-alias-job";
         String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId);

From ca4c4f736ae1d73d503e3e5b530e025117d15e94 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Christoph=20B=C3=BCscher?= 
Date: Mon, 16 Jul 2018 10:54:23 +0200
Subject: [PATCH 029/260] Remove unused params from SSource and Walker (#31935)

The "source" field in SSource seems unused. If removed, it can also be removed
from the ctor, which in turn makes is possible to delete the sourceText in the
Walker class.
---
 .../org/elasticsearch/painless/antlr/Walker.java    |  6 ++----
 .../org/elasticsearch/painless/node/SSource.java    | 13 +++++--------
 2 files changed, 7 insertions(+), 12 deletions(-)

diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java
index e2742ffb993..6c8d3a62e06 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java
@@ -29,7 +29,6 @@ import org.antlr.v4.runtime.Recognizer;
 import org.antlr.v4.runtime.atn.PredictionMode;
 import org.antlr.v4.runtime.tree.TerminalNode;
 import org.elasticsearch.painless.CompilerSettings;
-import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Location;
 import org.elasticsearch.painless.Operation;
@@ -107,6 +106,7 @@ import org.elasticsearch.painless.antlr.PainlessParser.TrueContext;
 import org.elasticsearch.painless.antlr.PainlessParser.TryContext;
 import org.elasticsearch.painless.antlr.PainlessParser.VariableContext;
 import org.elasticsearch.painless.antlr.PainlessParser.WhileContext;
+import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.node.AExpression;
 import org.elasticsearch.painless.node.ANode;
 import org.elasticsearch.painless.node.AStatement;
@@ -184,7 +184,6 @@ public final class Walker extends PainlessParserBaseVisitor {
     private final CompilerSettings settings;
     private final Printer debugStream;
     private final String sourceName;
-    private final String sourceText;
     private final PainlessLookup painlessLookup;
 
     private final Deque reserved = new ArrayDeque<>();
@@ -198,7 +197,6 @@ public final class Walker extends PainlessParserBaseVisitor {
         this.debugStream = debugStream;
         this.settings = settings;
         this.sourceName = Location.computeSourceName(sourceName);
-        this.sourceText = sourceText;
         this.globals = new Globals(new BitSet(sourceText.length()));
         this.painlessLookup = painlessLookup;
         this.source = (SSource)visit(buildAntlrTree(sourceText));
@@ -267,7 +265,7 @@ public final class Walker extends PainlessParserBaseVisitor {
             statements.add((AStatement)visit(ctx.dstatement()));
         }
 
-        return new SSource(scriptClassInfo, settings, sourceName, sourceText, debugStream, (MainMethodReserved)reserved.pop(),
+        return new SSource(scriptClassInfo, settings, sourceName, debugStream, (MainMethodReserved)reserved.pop(),
                            location(ctx), functions, globals, statements);
     }
 
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java
index 4781457a57d..cd473e2c84e 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java
@@ -21,9 +21,6 @@ package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.CompilerSettings;
 import org.elasticsearch.painless.Constant;
-import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessMethod;
-import org.elasticsearch.painless.lookup.PainlessMethodKey;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Locals.Variable;
@@ -32,6 +29,9 @@ import org.elasticsearch.painless.MethodWriter;
 import org.elasticsearch.painless.ScriptClassInfo;
 import org.elasticsearch.painless.SimpleChecksAdapter;
 import org.elasticsearch.painless.WriterConstants;
+import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessMethod;
+import org.elasticsearch.painless.lookup.PainlessMethodKey;
 import org.elasticsearch.painless.node.SFunction.FunctionReserved;
 import org.objectweb.asm.ClassVisitor;
 import org.objectweb.asm.ClassWriter;
@@ -130,7 +130,6 @@ public final class SSource extends AStatement {
     private final ScriptClassInfo scriptClassInfo;
     private final CompilerSettings settings;
     private final String name;
-    private final String source;
     private final Printer debugStream;
     private final MainMethodReserved reserved;
     private final List functions;
@@ -141,14 +140,12 @@ public final class SSource extends AStatement {
     private final List getMethods;
     private byte[] bytes;
 
-    public SSource(ScriptClassInfo scriptClassInfo, CompilerSettings settings, String name, String source, Printer debugStream,
-                   MainMethodReserved reserved, Location location,
-                   List functions, Globals globals, List statements) {
+    public SSource(ScriptClassInfo scriptClassInfo, CompilerSettings settings, String name, Printer debugStream,
+            MainMethodReserved reserved, Location location, List functions, Globals globals, List statements) {
         super(location);
         this.scriptClassInfo = Objects.requireNonNull(scriptClassInfo);
         this.settings = Objects.requireNonNull(settings);
         this.name = Objects.requireNonNull(name);
-        this.source = Objects.requireNonNull(source);
         this.debugStream = debugStream;
         this.reserved = Objects.requireNonNull(reserved);
         // process any synthetic functions generated by walker (because right now, thats still easy)

From 3587d8872e9d2b43c6b66f7d6ec7aafe6a7c1df0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Christoph=20B=C3=BCscher?= 
Date: Mon, 16 Jul 2018 11:22:42 +0200
Subject: [PATCH 030/260] [Tests] Fix failure due to changes exception message
 (#32036)

Java 11 seems to get more verbose on the ClassCastException we check for in
SearchDocumentationIT. This changes the test from asserting the exact exception
message to only checking the two classes involved are part of the message.

Closes #32029
---
 .../client/documentation/SearchDocumentationIT.java         | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
index 6d00e5d8d03..26bb4682fd9 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
@@ -295,7 +295,6 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
     }
 
     @SuppressWarnings({ "unused" })
-    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32029")
     public void testSearchRequestAggregations() throws IOException {
         RestHighLevelClient client = highLevelClient();
         {
@@ -338,8 +337,9 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
                     Range range = aggregations.get("by_company"); // <1>
                     // end::search-request-aggregations-get-wrongCast
                 } catch (ClassCastException ex) {
-                    assertEquals("org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms"
-                            + " cannot be cast to org.elasticsearch.search.aggregations.bucket.range.Range", ex.getMessage());
+                    String message = ex.getMessage();
+                    assertThat(message, containsString("org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms"));
+                    assertThat(message, containsString("org.elasticsearch.search.aggregations.bucket.range.Range"));
                 }
                 assertEquals(3, elasticBucket.getDocCount());
                 assertEquals(30, avg, 0.0);

From fa59bb10999b20d487ff12a890ee8cfa0f414826 Mon Sep 17 00:00:00 2001
From: Jim Ferenczi 
Date: Mon, 16 Jul 2018 11:59:59 +0200
Subject: [PATCH 031/260] Fix BWC check after backport

Relates #31808
---
 .../java/org/elasticsearch/index/query/InnerHitBuilder.java   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java
index 6bdc55d31cd..8b2db374c8d 100644
--- a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java
+++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java
@@ -199,7 +199,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
             boolean hasChildren = in.readBoolean();
             assert hasChildren == false;
         }
-        if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
+        if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
             this.innerCollapseBuilder = in.readOptionalWriteable(CollapseBuilder::new);
         }
     }
@@ -247,7 +247,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
             }
         }
         out.writeOptionalWriteable(highlightBuilder);
-        if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
+        if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
             out.writeOptionalWriteable(innerCollapseBuilder);
         }
     }

From 44f0c1df39ebc34e9613e3e5bcad78a3308a9068 Mon Sep 17 00:00:00 2001
From: Jim Ferenczi 
Date: Mon, 16 Jul 2018 12:03:28 +0200
Subject: [PATCH 032/260] Unmute field collapsing rest tests

BWC tests can run now that master and 6x branch are aligned.
Closes #32055
---
 .../test/search.inner_hits/10_basic.yml         |  9 ++++-----
 .../test/search/110_field_collapsing.yml        | 17 +++--------------
 2 files changed, 7 insertions(+), 19 deletions(-)

diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
index 8f162ae2eb2..884a50507c7 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
@@ -13,9 +13,8 @@ setup:
 ---
 "Nested inner hits":
     - skip:
-        version: "all"
-        reason: "https://github.com/elastic/elasticsearch/issues/32055"
-
+        version: " - 6.1.99"
+        reason: "<= 6.1 nodes don't always include index or id in nested inner hits"
     - do:
         index:
           index: test
@@ -46,8 +45,8 @@ setup:
 "Nested doc version and seqIDs":
 
     - skip:
-        version: "all"
-        reason: "https://github.com/elastic/elasticsearch/issues/32055"
+        version: " - 6.3.99"
+        reason:  "object notation for docvalue_fields was introduced in 6.4"
 
     - do:
         index:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
index 39597b1fbbe..2dfd868d66b 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
@@ -107,9 +107,6 @@ setup:
 
 ---
 "field collapsing and inner_hits":
-  - skip:
-      version: "all"
-      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -149,9 +146,6 @@ setup:
 
 ---
 "field collapsing, inner_hits and maxConcurrentGroupRequests":
-  - skip:
-      version: "all"
-      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -232,9 +226,6 @@ setup:
 
 ---
 "no hits and inner_hits":
-  - skip:
-      version: "all"
-      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -249,9 +240,6 @@ setup:
 
 ---
 "field collapsing and multiple inner_hits":
-  - skip:
-      version: "all"
-      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -304,9 +292,10 @@ setup:
 
 ---
 "field collapsing, inner_hits and version":
+
   - skip:
-      version: "all"
-      reason: "https://github.com/elastic/elasticsearch/issues/32055"
+      version: " - 6.1.0"
+      reason:  "bug fixed in 6.1.1"
 
   - do:
       search:

From 1fef139c11065aee746a18f4d76ca65eb369ece3 Mon Sep 17 00:00:00 2001
From: Daniel Mitterdorfer 
Date: Mon, 16 Jul 2018 13:50:17 +0200
Subject: [PATCH 033/260] Ensure only parent breaker trips in unit test

With this commit we raise the limit of the child circuit breaker used in
the unit test for the circuit breaker service so it is high enough to trip
only the parent circuit breaker. The previous limit was 300 bytes but
theoretically (considering overhead) we could reach 346 bytes. Thus any
value larger than 300 bytes could trip the child circuit breaker leading
to spurious failures.

Relates #31767
---
 .../indices/breaker/HierarchyCircuitBreakerServiceTests.java    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java
index 00bd15d244f..a73cf8630fe 100644
--- a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java
@@ -206,7 +206,7 @@ public class HierarchyCircuitBreakerServiceTests extends ESTestCase {
         Settings clusterSettings = Settings.builder()
             .put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), Boolean.TRUE)
             .put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "200b")
-            .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "300b")
+            .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "350b")
             .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 2)
             .build();
 

From a3b608d616fe8f9b8bb10bb60a97a9f39ec53a6c Mon Sep 17 00:00:00 2001
From: Jim Ferenczi 
Date: Mon, 16 Jul 2018 15:25:45 +0200
Subject: [PATCH 034/260] [Rollup] Fix duplicate field names in test (#32075)

This commit ensures that random field names do not clash with the explicit field names
set by the tests.

Closes #32067
---
 .../elasticsearch/xpack/core/rollup/ConfigTestHelpers.java  | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
index 3d82ac118f5..3e4e4a84d2f 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
@@ -94,7 +94,7 @@ public class ConfigTestHelpers {
         if (ESTestCase.randomBoolean()) {
             dateHistoBuilder.setDelay(new DateHistogramInterval(randomPositiveTimeValue()));
         }
-        dateHistoBuilder.setField(ESTestCase.randomAlphaOfLengthBetween(1, 10 ));
+        dateHistoBuilder.setField(ESTestCase.randomAlphaOfLengthBetween(5, 10));
         return dateHistoBuilder;
     }
 
@@ -112,8 +112,8 @@ public class ConfigTestHelpers {
     }
 
     public static  List getFields() {
-        return IntStream.range(0, ESTestCase.randomIntBetween(1,10))
-                .mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(1,10))
+        return IntStream.range(0, ESTestCase.randomIntBetween(1, 10))
+                .mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(5, 10))
                 .collect(Collectors.toList());
     }
 

From ef7ccd1c074b7fb9a2d1e388d517e6d20c131903 Mon Sep 17 00:00:00 2001
From: Ioannis Kakavas 
Date: Mon, 16 Jul 2018 16:41:56 +0300
Subject: [PATCH 035/260] [TEST] Consistent algorithm usage (#32077)

Ensure that the same algorithm is used for settings and
change password requests for consistency, even if we
do not expext to reach the code where the algorithm is
checked for now.
Completes a7eaa409e804f218aa06fd02d9166b9a5998b48a
---
 .../TransportChangePasswordActionTests.java     | 17 +++++++----------
 1 file changed, 7 insertions(+), 10 deletions(-)

diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java
index 516b33cbacc..aabaa40381f 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java
@@ -49,18 +49,16 @@ public class TransportChangePasswordActionTests extends ESTestCase {
 
     public void testAnonymousUser() {
         final String hashingAlgorithm = randomFrom("pbkdf2", "pbkdf2_1000", "bcrypt", "bcrypt9");
-        Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser").build();
+        Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser")
+            .put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), hashingAlgorithm).build();
         AnonymousUser anonymousUser = new AnonymousUser(settings);
         NativeUsersStore usersStore = mock(NativeUsersStore.class);
-        Settings passwordHashingSettings = Settings.builder().
-            put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), hashingAlgorithm).build();
-        TransportService transportService = new TransportService(passwordHashingSettings, mock(Transport.class), null,
+        TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
             TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
         TransportChangePasswordAction action = new TransportChangePasswordAction(settings, transportService,
             mock(ActionFilters.class), usersStore);
-
-        ChangePasswordRequest request = new ChangePasswordRequest();
         // Request will fail before the request hashing algorithm is checked, but we use the same algorithm as in settings for consistency
+        ChangePasswordRequest request = new ChangePasswordRequest();
         request.username(anonymousUser.principal());
         request.passwordHash(Hasher.resolve(hashingAlgorithm).hash(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING));
 
@@ -89,14 +87,13 @@ public class TransportChangePasswordActionTests extends ESTestCase {
         NativeUsersStore usersStore = mock(NativeUsersStore.class);
         Settings passwordHashingSettings = Settings.builder().
             put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), hashingAlgorithm).build();
-        TransportService transportService = new TransportService(passwordHashingSettings, mock(Transport.class), null,
+        TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
             TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
-        TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService,
+        TransportChangePasswordAction action = new TransportChangePasswordAction(passwordHashingSettings, transportService,
             mock(ActionFilters.class), usersStore);
-
+        // Request will fail before the request hashing algorithm is checked, but we use the same algorithm as in settings for consistency
         ChangePasswordRequest request = new ChangePasswordRequest();
         request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal()));
-        // Request will fail before the request hashing algorithm is checked, but we use the same algorithm as in settings for consistency
         request.passwordHash(Hasher.resolve(hashingAlgorithm).hash(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING));
 
         final AtomicReference throwableRef = new AtomicReference<>();

From 59191b4998afb7dd7093e44d14f06f4f3e7c4740 Mon Sep 17 00:00:00 2001
From: Zachary Tong 
Date: Mon, 16 Jul 2018 10:47:46 -0400
Subject: [PATCH 036/260] [Rollup] Replace RollupIT with a ESRestTestCase
 version (#31977)

The old RollupIT was a node IT, an flaky for a number of reasons.
This new version is an ESRestTestCase and should be a little more robust.

This was added to the multi-node QA tests as that seemed like the most
appropriate location.  It didn't seem necessary to create a whole new
QA module.

Note: The only test that was ported was the "Big" test for validating
a larger dataset.  The rest of the tests are represented in existing
yaml tests.

Closes #31258
Closes #30232
Related to #30290
---
 x-pack/plugin/rollup/build.gradle             |  28 -
 .../elasticsearch/xpack/rollup/RollupIT.java  | 498 ------------------
 .../elasticsearch/multi_node/RollupIT.java    | 326 ++++++++++++
 3 files changed, 326 insertions(+), 526 deletions(-)
 delete mode 100644 x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java
 create mode 100644 x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java

diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle
index 18ef7abee5c..ff9c30ed9a9 100644
--- a/x-pack/plugin/rollup/build.gradle
+++ b/x-pack/plugin/rollup/build.gradle
@@ -1,6 +1,3 @@
-import com.carrotsearch.gradle.junit4.RandomizedTestingTask
-import org.elasticsearch.gradle.BuildPlugin
-
 evaluationDependsOn(xpackModule('core'))
 
 apply plugin: 'elasticsearch.esplugin'
@@ -23,33 +20,8 @@ dependencies {
     testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
 }
 
-dependencyLicenses {
-    ignoreSha 'x-pack-core'
-}
-
 run {
     plugin xpackModule('core')
 }
 
 integTest.enabled = false
-
-
-// Instead we create a separate task to run the
-// tests based on ESIntegTestCase
-task internalClusterTest(type: RandomizedTestingTask,
-        group: JavaBasePlugin.VERIFICATION_GROUP,
-        description: 'Multi-node tests',
-        dependsOn: test.dependsOn) {
-    configure(BuildPlugin.commonTestConfig(project))
-    classpath = project.test.classpath
-    testClassesDirs = project.test.testClassesDirs
-    include '**/*IT.class'
-    systemProperty 'es.set.netty.runtime.available.processors', 'false'
-}
-check.dependsOn internalClusterTest
-internalClusterTest.mustRunAfter test
-
-// also add an "alias" task to make typing on the command line easier task icTest {
-task icTest {
-    dependsOn internalClusterTest
-}
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java
deleted file mode 100644
index 157cd6a5b9d..00000000000
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java
+++ /dev/null
@@ -1,498 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-package org.elasticsearch.xpack.rollup;
-
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
-import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
-import org.elasticsearch.action.bulk.BulkRequestBuilder;
-import org.elasticsearch.action.index.IndexRequest;
-import org.elasticsearch.action.search.SearchAction;
-import org.elasticsearch.action.search.SearchRequest;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.support.WriteRequest;
-import org.elasticsearch.analysis.common.CommonAnalysisPlugin;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.xcontent.XContentType;
-import org.elasticsearch.index.query.MatchAllQueryBuilder;
-import org.elasticsearch.license.LicenseService;
-import org.elasticsearch.plugins.Plugin;
-import org.elasticsearch.search.aggregations.Aggregation;
-import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
-import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram;
-import org.elasticsearch.search.builder.SearchSourceBuilder;
-import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.transport.Netty4Plugin;
-import org.elasticsearch.xpack.core.XPackSettings;
-import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction;
-import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction;
-import org.elasticsearch.xpack.core.rollup.action.PutRollupJobAction;
-import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction;
-import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction;
-import org.elasticsearch.xpack.core.rollup.action.StopRollupJobAction;
-import org.elasticsearch.xpack.core.rollup.job.DateHistoGroupConfig;
-import org.elasticsearch.xpack.core.rollup.job.GroupConfig;
-import org.elasticsearch.xpack.core.rollup.job.IndexerState;
-import org.elasticsearch.xpack.core.rollup.job.MetricConfig;
-import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig;
-import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus;
-import org.hamcrest.Matchers;
-import org.joda.time.DateTime;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeUnit;
-
-import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
-import static org.hamcrest.core.IsEqual.equalTo;
-
-@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
-public class RollupIT extends ESIntegTestCase {
-
-    private String taskId = "test-bigID";
-
-    @Override
-    protected boolean ignoreExternalCluster() {
-        return true;
-    }
-
-    @Override
-    protected Collection> nodePlugins() {
-        return Arrays.asList(LocalStateRollup.class, CommonAnalysisPlugin.class, Netty4Plugin.class);
-    }
-
-    @Override
-    protected Collection> transportClientPlugins() {
-        return nodePlugins();
-    }
-
-    @Override
-    protected Settings nodeSettings(int nodeOrdinal) {
-        Settings.Builder builder = Settings.builder();
-        builder.put(XPackSettings.ROLLUP_ENABLED.getKey(), true);
-        builder.put(XPackSettings.SECURITY_ENABLED.getKey(), false);
-        builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
-        return builder.build();
-    }
-
-    @Override
-    protected Settings externalClusterClientSettings() {
-        return nodeSettings(0);
-    }
-
-    @Override
-    protected Settings transportClientSettings() {
-        return Settings.builder().put(super.transportClientSettings())
-                .put(XPackSettings.ROLLUP_ENABLED.getKey(), true)
-                .put(XPackSettings.SECURITY_ENABLED.getKey(), false)
-                .build();
-    }
-
-    @Before
-    public void createIndex() {
-        client().admin().indices().prepareCreate("test-1").addMapping("doc", "{\"doc\": {\"properties\": {" +
-                "\"date_histo\": {\"type\": \"date\"}, " +
-                "\"histo\": {\"type\": \"integer\"}, " +
-                "\"terms\": {\"type\": \"keyword\"}}}}", XContentType.JSON).get();
-        client().admin().cluster().prepareHealth("test-1").setWaitForYellowStatus().get();
-
-        BulkRequestBuilder bulk = client().prepareBulk();
-        Map source = new HashMap<>(3);
-        for (int i = 0; i < 20; i++) {
-            for (int j = 0; j < 20; j++) {
-                for (int k = 0; k < 20; k++) {
-                    source.put("date_histo", new DateTime().minusDays(i).toString());
-                    source.put("histo", Integer.toString(j * 100));
-                    source.put("terms", Integer.toString(k * 100));
-                    source.put("foo", k);
-                    bulk.add(new IndexRequest("test-1", "doc").source(source));
-                    source.clear();
-                }
-            }
-        }
-        bulk.get();
-        client().admin().indices().prepareRefresh("test-1").get();
-    }
-
-    public void testGetJob() throws ExecutionException, InterruptedException {
-        MetricConfig metricConfig = new MetricConfig.Builder()
-                .setField("foo")
-                .setMetrics(Arrays.asList("sum", "min", "max", "avg"))
-                .build();
-
-        DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
-        datehistoGroupConfig.setField("date_histo");
-        datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
-
-        GroupConfig.Builder groupConfig = new GroupConfig.Builder();
-        groupConfig.setDateHisto(datehistoGroupConfig.build());
-
-
-        RollupJobConfig.Builder config = new RollupJobConfig.Builder();
-        config.setIndexPattern("test-1");
-        config.setRollupIndex("rolled");
-        config.setId("testGet");
-        config.setGroupConfig(groupConfig.build());
-        config.setMetricsConfig(Collections.singletonList(metricConfig));
-        config.setCron("* * * * * ? *");
-        config.setPageSize(10);
-
-        PutRollupJobAction.Request request = new PutRollupJobAction.Request();
-        request.setConfig(config.build());
-        client().execute(PutRollupJobAction.INSTANCE, request).get();
-
-        GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("testGet");
-        GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
-        assertThat(response.getJobs().size(), equalTo(1));
-        assertThat(response.getJobs().get(0).getJob().getId(), equalTo("testGet"));
-    }
-
-    public void testIndexPattern() throws Exception {
-        MetricConfig metricConfig = new MetricConfig.Builder()
-                .setField("foo")
-                .setMetrics(Arrays.asList("sum", "min", "max", "avg"))
-                .build();
-
-        DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
-        datehistoGroupConfig.setField("date_histo");
-        datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
-
-        GroupConfig.Builder groupConfig = new GroupConfig.Builder();
-        groupConfig.setDateHisto(datehistoGroupConfig.build());
-
-
-        RollupJobConfig.Builder config = new RollupJobConfig.Builder();
-        config.setIndexPattern("test-*");
-        config.setId("testIndexPattern");
-        config.setRollupIndex("rolled");
-        config.setGroupConfig(groupConfig.build());
-        config.setMetricsConfig(Collections.singletonList(metricConfig));
-        config.setCron("* * * * * ? *");
-        config.setPageSize(10);
-
-        PutRollupJobAction.Request request = new PutRollupJobAction.Request();
-        request.setConfig(config.build());
-        client().execute(PutRollupJobAction.INSTANCE, request).get();
-
-        StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request("testIndexPattern");
-        StartRollupJobAction.Response startResponse = client().execute(StartRollupJobAction.INSTANCE, startRequest).get();
-        Assert.assertThat(startResponse.isStarted(), equalTo(true));
-
-        // Make sure it started
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("testIndexPattern");
-            if (rollupJobStatus == null) {
-                fail("null");
-            }
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            assertTrue(state.equals(IndexerState.STARTED) || state.equals(IndexerState.INDEXING));
-        }, 60, TimeUnit.SECONDS);
-
-        // And wait for it to finish
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("testIndexPattern");
-            if (rollupJobStatus == null) {
-                fail("null");
-            }
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            assertTrue(state.equals(IndexerState.STARTED) && rollupJobStatus.getPosition() != null);
-        }, 60, TimeUnit.SECONDS);
-
-        GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("testIndexPattern");
-        GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
-        Assert.assertThat(response.getJobs().size(), equalTo(1));
-        Assert.assertThat(response.getJobs().get(0).getJob().getId(), equalTo("testIndexPattern"));
-
-        GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().addIndices("rolled").get();
-        Assert.assertThat(getIndexResponse.indices().length, Matchers.greaterThan(0));
-    }
-
-    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30290")
-    public void testTwoJobsStartStopDeleteOne() throws Exception {
-        MetricConfig metricConfig = new MetricConfig.Builder()
-                .setField("foo")
-                .setMetrics(Arrays.asList("sum", "min", "max", "avg"))
-                .build();
-
-        DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
-        datehistoGroupConfig.setField("date_histo");
-        datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
-
-        GroupConfig.Builder groupConfig = new GroupConfig.Builder();
-        groupConfig.setDateHisto(datehistoGroupConfig.build());
-
-
-        RollupJobConfig.Builder config = new RollupJobConfig.Builder();
-        config.setIndexPattern("test-1");
-        config.setRollupIndex("rolled");
-        config.setId("job1");
-        config.setGroupConfig(groupConfig.build());
-        config.setMetricsConfig(Collections.singletonList(metricConfig));
-        config.setCron("* * * * * ? *");
-        config.setPageSize(10);
-
-        PutRollupJobAction.Request request = new PutRollupJobAction.Request();
-        request.setConfig(config.build());
-        client().execute(PutRollupJobAction.INSTANCE, request).get();
-
-        RollupJobConfig.Builder config2 = new RollupJobConfig.Builder();
-        config2.setIndexPattern("test-1");
-        config2.setRollupIndex("rolled");
-        config2.setId("job2");
-        config2.setGroupConfig(groupConfig.build());
-        config2.setMetricsConfig(Collections.singletonList(metricConfig));
-        config2.setCron("* * * * * ? *");
-        config2.setPageSize(10);
-
-        PutRollupJobAction.Request request2 = new PutRollupJobAction.Request();
-        request2.setConfig(config2.build());
-        client().execute(PutRollupJobAction.INSTANCE, request2).get();
-
-        StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request("job1");
-        StartRollupJobAction.Response response = client().execute(StartRollupJobAction.INSTANCE, startRequest).get();
-        Assert.assertThat(response.isStarted(), equalTo(true));
-
-        // Make sure it started
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("job1");
-            if (rollupJobStatus == null) {
-                fail("null");
-            }
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            assertTrue(state.equals(IndexerState.STARTED) || state.equals(IndexerState.INDEXING));
-        }, 60, TimeUnit.SECONDS);
-
-        //but not the other task
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("job2");
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            assertTrue(state.equals(IndexerState.STOPPED));
-        }, 60, TimeUnit.SECONDS);
-
-        // Delete the task
-        DeleteRollupJobAction.Request deleteRequest = new DeleteRollupJobAction.Request("job1");
-        DeleteRollupJobAction.Response deleteResponse = client().execute(DeleteRollupJobAction.INSTANCE, deleteRequest).get();
-        Assert.assertTrue(deleteResponse.isAcknowledged());
-
-        // Make sure the first job's task is gone
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("job1");
-            assertTrue(rollupJobStatus == null);
-        }, 60, TimeUnit.SECONDS);
-
-        // And that we don't see it in the GetJobs API
-        GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("job1");
-        GetRollupJobsAction.Response getResponse = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
-        Assert.assertThat(getResponse.getJobs().size(), equalTo(0));
-
-        // But make sure the other job is still there
-        getRequest = new GetRollupJobsAction.Request("job2");
-        getResponse = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
-        Assert.assertThat(getResponse.getJobs().size(), equalTo(1));
-        Assert.assertThat(getResponse.getJobs().get(0).getJob().getId(), equalTo("job2"));
-
-        // and still STOPPED
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("job2");
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            assertTrue(state.equals(IndexerState.STOPPED));
-        }, 60, TimeUnit.SECONDS);
-    }
-
-    public void testBig() throws Exception {
-
-        client().admin().indices().prepareCreate("test-big")
-                .addMapping("test-big", "{\"test-big\": {\"properties\": {\"timestamp\": {\"type\": \"date\"}, " +
-                    "\"thefield\": {\"type\": \"integer\"}}}}", XContentType.JSON)
-                .setSettings(Settings.builder()
-                        .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
-                        .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get();
-        client().admin().cluster().prepareHealth("test-big").setWaitForYellowStatus().get();
-
-        client().admin().indices().prepareCreate("test-verify")
-                .addMapping("test-big", "{\"test-big\": {\"properties\": {\"timestamp\": {\"type\": \"date\"}, " +
-                        "\"thefield\": {\"type\": \"integer\"}}}}", XContentType.JSON)
-                .setSettings(Settings.builder()
-                        .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
-                        .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get();
-        client().admin().cluster().prepareHealth("test-verify").setWaitForYellowStatus().get();
-
-        BulkRequestBuilder bulk = client().prepareBulk();
-        Map source = new HashMap<>(3);
-
-        int numDays = 90;
-        int numDocsPerDay = 100;
-
-        for (int i = 0; i < numDays; i++) {
-            DateTime ts = new DateTime().minusDays(i);
-            for (int j = 0; j < numDocsPerDay; j++) {
-
-                int value = ESTestCase.randomIntBetween(0,100);
-                source.put("timestamp", ts.toString());
-                source.put("thefield", value);
-                bulk.add(new IndexRequest("test-big", "test-big").source(source));
-                bulk.add(new IndexRequest("test-verify", "test-big").source(source));
-                source.clear();
-            }
-
-            bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
-            bulk.get();
-            bulk = client().prepareBulk();
-            logger.info("Day: [" + i + "]: " + ts.toString() + " [" + ts.getMillis() + "]" );
-        }
-
-
-        client().admin().indices().prepareRefresh("test-big").get();
-        client().admin().indices().prepareRefresh("test-verify").get();
-
-        MetricConfig metricConfig = new MetricConfig.Builder()
-                .setField("thefield")
-                .setMetrics(Arrays.asList("sum", "min", "max", "avg"))
-                .build();
-
-        DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
-        datehistoGroupConfig.setField("timestamp");
-        datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
-
-        GroupConfig.Builder groupConfig = new GroupConfig.Builder();
-        groupConfig.setDateHisto(datehistoGroupConfig.build());
-
-        RollupJobConfig.Builder config = new RollupJobConfig.Builder();
-        config.setIndexPattern("test-big");
-        config.setRollupIndex("rolled");
-        config.setId(taskId);
-        config.setGroupConfig(groupConfig.build());
-        config.setMetricsConfig(Collections.singletonList(metricConfig));
-        config.setCron("* * * * * ? *");
-        config.setPageSize(1000);
-
-        PutRollupJobAction.Request request = new PutRollupJobAction.Request();
-        request.setConfig(config.build());
-        client().execute(PutRollupJobAction.INSTANCE, request).get();
-
-        StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request(taskId);
-        StartRollupJobAction.Response response = client().execute(StartRollupJobAction.INSTANCE, startRequest).get();
-        Assert.assertThat(response.isStarted(), equalTo(true));
-
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus(taskId);
-            if (rollupJobStatus == null) {
-                fail("null");
-            }
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            logger.error("state: [" + state + "]");
-            assertTrue(state.equals(IndexerState.STARTED) && rollupJobStatus.getPosition() != null);
-        }, 60, TimeUnit.SECONDS);
-
-        RollupJobStatus rollupJobStatus = getRollupJobStatus(taskId);
-        if (rollupJobStatus == null) {
-            Assert.fail("rollup job status should not be null");
-        }
-
-        client().admin().indices().prepareRefresh("rolled").get();
-
-        SearchResponse count = client().prepareSearch("rolled").setSize(10).get();
-        // total document is numDays minus 1 because we don't build rollup for
-        // buckets that are not full (bucket for the current day).
-        Assert.assertThat(count.getHits().totalHits, equalTo(Long.valueOf(numDays-1)));
-
-        if (ESTestCase.randomBoolean()) {
-            client().admin().indices().prepareDelete("test-big").get();
-            client().admin().indices().prepareRefresh().get();
-        }
-
-        // Execute the rollup search
-        SearchRequest rollupRequest = new SearchRequest("rolled")
-                .source(new SearchSourceBuilder()
-                        .aggregation(dateHistogram("timestamp")
-                                .interval(1000*86400)
-                                .field("timestamp"))
-                        .size(0));
-        SearchResponse searchResponse = client().execute(RollupSearchAction.INSTANCE, rollupRequest).get();
-        Assert.assertNotNull(searchResponse);
-
-        // And a regular search against the verification index
-        SearchRequest verifyRequest = new SearchRequest("test-verify")
-                .source(new SearchSourceBuilder()
-                        .aggregation(dateHistogram("timestamp")
-                                .interval(1000*86400)
-                                .field("timestamp"))
-                        .size(0));
-        SearchResponse verifyResponse = client().execute(SearchAction.INSTANCE, verifyRequest).get();
-
-        Map rollupAggs = searchResponse.getAggregations().asMap();
-
-        for (Aggregation agg : verifyResponse.getAggregations().asList()) {
-            Aggregation rollupAgg = rollupAggs.get(agg.getName());
-
-            Assert.assertNotNull(rollupAgg);
-            Assert.assertThat(rollupAgg.getType(), equalTo(agg.getType()));
-            verifyAgg((InternalDateHistogram)agg, (InternalDateHistogram)rollupAgg);
-        }
-
-        // And a quick sanity check for doc type
-        SearchRequest rollupRawRequest = new SearchRequest("rolled")
-                .source(new SearchSourceBuilder().query(new MatchAllQueryBuilder())
-                        .size(1));
-        SearchResponse searchRawResponse = client().execute(SearchAction.INSTANCE, rollupRawRequest).get();
-        Assert.assertNotNull(searchRawResponse);
-        assertThat(searchRawResponse.getHits().getAt(0).getType(), equalTo("_doc"));
-    }
-
-    private void verifyAgg(InternalDateHistogram verify, InternalDateHistogram rollup) {
-        for (int i = 0; i < rollup.getBuckets().size(); i++) {
-            InternalDateHistogram.Bucket verifyBucket = verify.getBuckets().get(i);
-            InternalDateHistogram.Bucket rollupBucket = rollup.getBuckets().get(i);
-            Assert.assertThat(rollupBucket.getDocCount(), equalTo(verifyBucket.getDocCount()));
-            Assert.assertThat(((DateTime)rollupBucket.getKey()).getMillis(), equalTo(((DateTime)verifyBucket.getKey()).getMillis()));
-            Assert.assertTrue(rollupBucket.getAggregations().equals(verifyBucket.getAggregations()));
-        }
-    }
-
-    private RollupJobStatus getRollupJobStatus(final String taskId) {
-        final GetRollupJobsAction.Request request = new GetRollupJobsAction.Request(taskId);
-        final GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, request).actionGet();
-
-        if (response.getJobs() != null && response.getJobs().isEmpty() == false) {
-            assertThat("Expect 1 rollup job with id " + taskId, response.getJobs().size(), equalTo(1));
-            return response.getJobs().iterator().next().getStatus();
-        }
-        return null;
-    }
-
-    @After
-    public void cleanup() throws ExecutionException, InterruptedException {
-        GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("_all");
-        GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
-
-        for (GetRollupJobsAction.JobWrapper job : response.getJobs()) {
-            StopRollupJobAction.Request stopRequest = new StopRollupJobAction.Request(job.getJob().getId());
-            try {
-                client().execute(StopRollupJobAction.INSTANCE, stopRequest).get();
-            } catch (ElasticsearchException e) {
-                //
-            }
-
-            DeleteRollupJobAction.Request deleteRequest = new DeleteRollupJobAction.Request(job.getJob().getId());
-            client().execute(DeleteRollupJobAction.INSTANCE, deleteRequest).get();
-        }
-    }
-}
diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java
new file mode 100644
index 00000000000..b0142ae1418
--- /dev/null
+++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java
@@ -0,0 +1,326 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.multi_node;
+
+import org.apache.http.HttpStatus;
+import org.apache.http.util.EntityUtils;
+import org.elasticsearch.client.Request;
+import org.elasticsearch.client.Response;
+import org.elasticsearch.common.settings.SecureString;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.common.xcontent.support.XContentMapValues;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.rest.ESRestTestCase;
+import org.elasticsearch.xpack.core.rollup.job.RollupJob;
+import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath;
+import org.junit.After;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
+import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.isOneOf;
+
+public class RollupIT extends ESRestTestCase {
+
+    @Override
+    protected Settings restClientSettings() {
+        return getClientSettings("super-user", "x-pack-super-password");
+    }
+
+    @Override
+    protected Settings restAdminSettings() {
+        return getClientSettings("super-user", "x-pack-super-password");
+    }
+
+    private Settings getClientSettings(final String username, final String password) {
+        final String token = basicAuthHeaderValue(username, new SecureString(password.toCharArray()));
+        return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
+    }
+
+    static Map toMap(Response response) throws IOException {
+        return toMap(EntityUtils.toString(response.getEntity()));
+    }
+
+    static Map toMap(String response) throws IOException {
+        return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false);
+    }
+
+    @After
+    public void clearRollupMetadata() throws Exception {
+        deleteAllJobs();
+        waitForPendingTasks();
+        // indices will be deleted by the ESRestTestCase class
+    }
+
+    public void testBigRollup() throws Exception {
+        final int numDocs = 200;
+
+        // index documents for the rollup job
+        final StringBuilder bulk = new StringBuilder();
+        for (int i = 0; i < numDocs; i++) {
+            bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"_doc\"}}\n");
+            ZonedDateTime zdt = ZonedDateTime.ofInstant(Instant.ofEpochSecond(1531221196 + (60*i)), ZoneId.of("UTC"));
+            String date = zdt.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
+            bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n");
+        }
+        bulk.append("\r\n");
+
+        final Request bulkRequest = new Request("POST", "/_bulk");
+        bulkRequest.addParameter("refresh", "true");
+        bulkRequest.setJsonEntity(bulk.toString());
+        client().performRequest(bulkRequest);
+        // create the rollup job
+        final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test");
+        createRollupJobRequest.setJsonEntity("{"
+            + "\"index_pattern\":\"rollup-*\","
+            + "\"rollup_index\":\"results-rollup\","
+            + "\"cron\":\"*/1 * * * * ?\","             // fast cron and big page size so test runs quickly
+            + "\"page_size\":20,"
+            + "\"groups\":{"
+            + "    \"date_histogram\":{"
+            + "        \"field\":\"timestamp\","
+            + "        \"interval\":\"5m\""
+            + "      }"
+            + "},"
+            + "\"metrics\":["
+            + "    {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}"
+            + "]"
+            + "}");
+
+        Map createRollupJobResponse = toMap(client().performRequest(createRollupJobRequest));
+        assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE));
+
+        // start the rollup job
+        final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-job-test/_start");
+        Map startRollupJobResponse = toMap(client().performRequest(startRollupJobRequest));
+        assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE));
+
+        assertRollUpJob("rollup-job-test");
+
+        // Wait for the job to finish, by watching how many rollup docs we've indexed
+        assertBusy(() -> {
+            final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/rollup-job-test");
+            Response getRollupJobResponse = client().performRequest(getRollupJobRequest);
+            assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
+
+            Map job = getJob(getRollupJobResponse, "rollup-job-test");
+            if (job != null) {
+                assertThat(ObjectPath.eval("status.job_state", job), equalTo("started"));
+                assertThat(ObjectPath.eval("stats.rollups_indexed", job), equalTo(41));
+            }
+        }, 30L, TimeUnit.SECONDS);
+
+        // Refresh the rollup index to make sure all newly indexed docs are searchable
+        final Request refreshRollupIndex = new Request("POST", "results-rollup/_refresh");
+        toMap(client().performRequest(refreshRollupIndex));
+
+        String jsonRequestBody = "{\n" +
+            "  \"size\": 0,\n" +
+            "  \"query\": {\n" +
+            "    \"match_all\": {}\n" +
+            "  },\n" +
+            "  \"aggs\": {\n" +
+            "    \"date_histo\": {\n" +
+            "      \"date_histogram\": {\n" +
+            "        \"field\": \"timestamp\",\n" +
+            "        \"interval\": \"1h\"\n" +
+            "      },\n" +
+            "      \"aggs\": {\n" +
+            "        \"the_max\": {\n" +
+            "          \"max\": {\n" +
+            "            \"field\": \"value\"\n" +
+            "          }\n" +
+            "        }\n" +
+            "      }\n" +
+            "    }\n" +
+            "  }\n" +
+            "}";
+
+        Request request = new Request("GET", "rollup-docs/_search");
+        request.setJsonEntity(jsonRequestBody);
+        Response liveResponse = client().performRequest(request);
+        Map liveBody = toMap(liveResponse);
+
+        request = new Request("GET", "results-rollup/_rollup_search");
+        request.setJsonEntity(jsonRequestBody);
+        Response rollupResponse = client().performRequest(request);
+        Map rollupBody = toMap(rollupResponse);
+
+        // Do the live agg results match the rollup agg results?
+        assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
+            equalTo(ObjectPath.eval("aggregations.date_histo.buckets", rollupBody)));
+
+        request = new Request("GET", "rollup-docs/_rollup_search");
+        request.setJsonEntity(jsonRequestBody);
+        Response liveRollupResponse = client().performRequest(request);
+        Map liveRollupBody = toMap(liveRollupResponse);
+
+        // Does searching the live index via rollup_search work match the live search?
+        assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
+            equalTo(ObjectPath.eval("aggregations.date_histo.buckets", liveRollupBody)));
+
+    }
+
+    @SuppressWarnings("unchecked")
+    private void assertRollUpJob(final String rollupJob) throws Exception {
+        String[] states = new String[]{"indexing", "started"};
+        waitForRollUpJob(rollupJob, states);
+
+        // check that the rollup job is started using the RollUp API
+        final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob);
+        Map getRollupJobResponse = toMap(client().performRequest(getRollupJobRequest));
+        Map job = getJob(getRollupJobResponse, rollupJob);
+        if (job != null) {
+            assertThat(ObjectPath.eval("status.job_state", job), isOneOf(states));
+        }
+
+        // check that the rollup job is started using the Tasks API
+        final Request taskRequest = new Request("GET", "_tasks");
+        taskRequest.addParameter("detailed", "true");
+        taskRequest.addParameter("actions", "xpack/rollup/*");
+        Map taskResponse = toMap(client().performRequest(taskRequest));
+        Map taskResponseNodes = (Map) taskResponse.get("nodes");
+        Map taskResponseNode = (Map) taskResponseNodes.values().iterator().next();
+        Map taskResponseTasks = (Map) taskResponseNode.get("tasks");
+        Map taskResponseStatus = (Map) taskResponseTasks.values().iterator().next();
+        assertThat(ObjectPath.eval("status.job_state", taskResponseStatus), isOneOf(states));
+
+        // check that the rollup job is started using the Cluster State API
+        final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata");
+        Map clusterStateResponse = toMap(client().performRequest(clusterStateRequest));
+        List> rollupJobTasks = ObjectPath.eval("metadata.persistent_tasks.tasks", clusterStateResponse);
+
+        boolean hasRollupTask = false;
+        for (Map task : rollupJobTasks) {
+            if (ObjectPath.eval("id", task).equals(rollupJob)) {
+                hasRollupTask = true;
+
+                final String jobStateField = "task.xpack/rollup/job.state.job_state";
+                assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + task.get("id"),
+                    ObjectPath.eval(jobStateField, task), isOneOf(states));
+                break;
+            }
+        }
+        if (hasRollupTask == false) {
+            fail("Expected persistent task for [" + rollupJob + "] but none found.");
+        }
+
+    }
+
+    private void waitForRollUpJob(final String rollupJob,String[] expectedStates) throws Exception {
+        assertBusy(() -> {
+            final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob);
+            Response getRollupJobResponse = client().performRequest(getRollupJobRequest);
+            assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
+
+            Map job = getJob(getRollupJobResponse, rollupJob);
+            if (job != null) {
+                assertThat(ObjectPath.eval("status.job_state", job), isOneOf(expectedStates));
+            }
+        }, 30L, TimeUnit.SECONDS);
+    }
+
+    private Map getJob(Response response, String targetJobId) throws IOException {
+        return getJob(ESRestTestCase.entityAsMap(response), targetJobId);
+    }
+
+    @SuppressWarnings("unchecked")
+    private Map getJob(Map jobsMap, String targetJobId) throws IOException {
+
+        List> jobs =
+            (List>) XContentMapValues.extractValue("jobs", jobsMap);
+
+        if (jobs == null) {
+            return null;
+        }
+
+        for (Map job : jobs) {
+            String jobId = (String) ((Map) job.get("config")).get("id");
+            if (jobId.equals(targetJobId)) {
+                return job;
+            }
+        }
+        return null;
+    }
+
+    private void waitForPendingTasks() throws Exception {
+        ESTestCase.assertBusy(() -> {
+            try {
+                Request request = new Request("GET", "/_cat/tasks");
+                request.addParameter("detailed", "true");
+                Response response = adminClient().performRequest(request);
+                if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
+                    try (BufferedReader responseReader = new BufferedReader(
+                        new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
+                        int activeTasks = 0;
+                        String line;
+                        StringBuilder tasksListString = new StringBuilder();
+                        while ((line = responseReader.readLine()) != null) {
+
+                            // We only care about Rollup jobs, otherwise this fails too easily due to unrelated tasks
+                            if (line.startsWith(RollupJob.NAME) == true) {
+                                activeTasks++;
+                                tasksListString.append(line);
+                                tasksListString.append('\n');
+                            }
+                        }
+                        assertEquals(activeTasks + " active tasks found:\n" + tasksListString, 0, activeTasks);
+                    }
+                }
+            } catch (IOException e) {
+                throw new AssertionError("Error getting active tasks list", e);
+            }
+        });
+    }
+
+    @SuppressWarnings("unchecked")
+    private void deleteAllJobs() throws Exception {
+        Request request = new Request("GET", "/_xpack/rollup/job/_all");
+        Response response = adminClient().performRequest(request);
+        Map jobs = ESRestTestCase.entityAsMap(response);
+        @SuppressWarnings("unchecked")
+        List> jobConfigs =
+            (List>) XContentMapValues.extractValue("jobs", jobs);
+
+        if (jobConfigs == null) {
+            return;
+        }
+
+        for (Map jobConfig : jobConfigs) {
+            logger.debug(jobConfig);
+            String jobId = (String) ((Map) jobConfig.get("config")).get("id");
+            logger.debug("Deleting job " + jobId);
+            try {
+                request = new Request("DELETE", "/_xpack/rollup/job/" + jobId);
+                adminClient().performRequest(request);
+            } catch (Exception e) {
+                // ok
+            }
+        }
+    }
+
+    private static String responseEntityToString(Response response) throws Exception {
+        try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
+            return reader.lines().collect(Collectors.joining("\n"));
+        }
+    }
+}

From b1479bbed889787ee9febb3f6b3d3414ba071754 Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Mon, 16 Jul 2018 18:43:00 +0200
Subject: [PATCH 037/260] Scripting: Remove dead code from painless module
 (#32064)

---
 .../elasticsearch/painless/MethodWriter.java  |  8 ---
 .../painless/antlr/StashingTokenFactory.java  | 62 -------------------
 2 files changed, 70 deletions(-)
 delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java

diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
index e0a780d4188..5db7c6b3f71 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
@@ -227,14 +227,6 @@ public final class MethodWriter extends GeneratorAdapter {
         return Type.getType(clazz);
     }
 
-    public void writeBranch(final Label tru, final Label fals) {
-        if (tru != null) {
-            visitJumpInsn(Opcodes.IFNE, tru);
-        } else if (fals != null) {
-            visitJumpInsn(Opcodes.IFEQ, fals);
-        }
-    }
-
     /** Starts a new string concat.
      * @return the size of arguments pushed to stack (the object that does string concats, e.g. a StringBuilder)
      */
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java
deleted file mode 100644
index 3ac45705d55..00000000000
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.painless.antlr;
-
-import org.antlr.v4.runtime.CharStream;
-import org.antlr.v4.runtime.Lexer;
-import org.antlr.v4.runtime.Token;
-import org.antlr.v4.runtime.TokenFactory;
-import org.antlr.v4.runtime.TokenSource;
-import org.antlr.v4.runtime.misc.Pair;
-
-/**
- * Token factory that preserves that last non-whitespace token so you can do token level lookbehind in the lexer.
- */
-public class StashingTokenFactory implements TokenFactory {
-    private final TokenFactory delegate;
-
-    private T lastToken;
-
-    public StashingTokenFactory(TokenFactory delegate) {
-        this.delegate = delegate;
-    }
-
-    public T getLastToken() {
-        return lastToken;
-    }
-
-    @Override
-    public T create(Pair source, int type, String text, int channel, int start, int stop, int line,
-            int charPositionInLine) {
-        return maybeStash(delegate.create(source, type, text, channel, start, stop, line, charPositionInLine));
-    }
-
-    @Override
-    public T create(int type, String text) {
-        return maybeStash(delegate.create(type, text));
-    }
-
-    private T maybeStash(T token) {
-        if (token.getChannel() == Lexer.DEFAULT_TOKEN_CHANNEL) {
-            lastToken = token;
-        }
-        return token;
-    }
-}

From 2a1a28f19c63a268330b3fe351dda731ee0f6627 Mon Sep 17 00:00:00 2001
From: Jack Conradson 
Date: Mon, 16 Jul 2018 11:15:29 -0700
Subject: [PATCH 038/260] Painless: Separate PainlessLookup into PainlessLookup
 and PainlessLookupBuilder (#32054)

---
 .../painless/AnalyzerCaster.java              |   2 +-
 .../elasticsearch/painless/MethodWriter.java  |   2 +-
 .../painless/PainlessScriptEngine.java        |   8 +-
 .../painless/lookup/PainlessLookup.java       | 692 +---------------
 .../lookup/PainlessLookupBuilder.java         | 774 ++++++++++++++++++
 .../elasticsearch/painless/lookup/def.java    |  28 +
 .../painless/node/EAssignment.java            |   2 +-
 .../elasticsearch/painless/node/EBinary.java  |   2 +-
 .../painless/node/ECapturingFunctionRef.java  |   2 +-
 .../elasticsearch/painless/node/EComp.java    |   2 +-
 .../elasticsearch/painless/node/ELambda.java  |   2 +-
 .../painless/node/EListInit.java              |   2 +-
 .../elasticsearch/painless/node/EMapInit.java |   2 +-
 .../elasticsearch/painless/node/EUnary.java   |   2 +-
 .../elasticsearch/painless/node/PBrace.java   |   2 +-
 .../painless/node/PCallInvoke.java            |   2 +-
 .../elasticsearch/painless/node/PField.java   |   2 +-
 .../painless/node/PSubDefArray.java           |   2 +-
 .../painless/node/PSubDefCall.java            |   2 +-
 .../painless/node/PSubDefField.java           |   2 +-
 .../elasticsearch/painless/node/SEach.java    |   2 +-
 .../painless/node/SSubEachIterable.java       |   2 +-
 .../painless/BaseClassTests.java              |   3 +-
 .../elasticsearch/painless/DebugTests.java    |   3 +-
 .../org/elasticsearch/painless/Debugger.java  |   4 +-
 .../painless/DefBootstrapTests.java           |   3 +-
 .../painless/PainlessDocGenerator.java        |  14 +-
 .../painless/ScriptTestCase.java              |   3 +-
 .../painless/node/NodeToStringTests.java      |   3 +-
 29 files changed, 848 insertions(+), 723 deletions(-)
 create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java
 create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/def.java

diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java
index 69ef57faad6..457ec82a5e4 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java
@@ -21,7 +21,7 @@ package org.elasticsearch.painless;
 
 import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.lookup.PainlessCast;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 
 import java.util.Objects;
 
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
index 5db7c6b3f71..c339e7bfb26 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless;
 
 import org.elasticsearch.painless.lookup.PainlessCast;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.objectweb.asm.ClassVisitor;
 import org.objectweb.asm.Label;
 import org.objectweb.asm.Opcodes;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
index 4560fd85a65..1687cb24cb6 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
@@ -24,7 +24,7 @@ import org.elasticsearch.SpecialPermission;
 import org.elasticsearch.common.component.AbstractComponent;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.painless.Compiler.Loader;
-import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 import org.elasticsearch.script.ExecutableScript;
 import org.elasticsearch.script.ScriptContext;
@@ -102,9 +102,11 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
         for (Map.Entry, List> entry : contexts.entrySet()) {
             ScriptContext context = entry.getKey();
             if (context.instanceClazz.equals(SearchScript.class) || context.instanceClazz.equals(ExecutableScript.class)) {
-                contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class, new PainlessLookup(entry.getValue())));
+                contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class,
+                    new PainlessLookupBuilder(entry.getValue()).build()));
             } else {
-                contextsToCompilers.put(context, new Compiler(context.instanceClazz, new PainlessLookup(entry.getValue())));
+                contextsToCompilers.put(context, new Compiler(context.instanceClazz,
+                    new PainlessLookupBuilder(entry.getValue()).build()));
             }
         }
 
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java
index 5833767fbd3..feeaf4d34bc 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java
@@ -19,25 +19,10 @@
 
 package org.elasticsearch.painless.lookup;
 
-import org.elasticsearch.painless.spi.Whitelist;
-import org.elasticsearch.painless.spi.WhitelistClass;
-import org.elasticsearch.painless.spi.WhitelistConstructor;
-import org.elasticsearch.painless.spi.WhitelistField;
-import org.elasticsearch.painless.spi.WhitelistMethod;
-import org.objectweb.asm.Type;
-
-import java.lang.invoke.MethodHandle;
-import java.lang.invoke.MethodHandles;
-import java.lang.reflect.Modifier;
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
-import java.util.Stack;
-import java.util.regex.Pattern;
 
 /**
  * The entire API for Painless.  Also used as a whitelist for checking for legal
@@ -45,18 +30,6 @@ import java.util.regex.Pattern;
  */
 public final class PainlessLookup {
 
-    private static final Map methodCache = new HashMap<>();
-    private static final Map fieldCache = new HashMap<>();
-
-    private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$");
-
-    /** Marker class for def type to be used during type analysis. */
-    public static final class def {
-        private def() {
-
-        }
-    }
-
     public static Class getBoxedType(Class clazz) {
         if (clazz == boolean.class) {
             return Boolean.class;
@@ -205,22 +178,6 @@ public final class PainlessLookup {
         return clazz.getCanonicalName().replace('$', '.');
     }
 
-    private static String buildMethodCacheKey(String structName, String methodName, List> arguments) {
-        StringBuilder key = new StringBuilder();
-        key.append(structName);
-        key.append(methodName);
-
-        for (Class argument : arguments) {
-            key.append(argument.getName());
-        }
-
-        return key.toString();
-    }
-
-    private static String buildFieldCacheKey(String structName, String fieldName, String typeName) {
-        return structName + fieldName + typeName;
-    }
-
     public Collection getStructs() {
         return javaClassesToPainlessStructs.values();
     }
@@ -228,652 +185,9 @@ public final class PainlessLookup {
     private final Map> painlessTypesToJavaClasses;
     private final Map, PainlessClass> javaClassesToPainlessStructs;
 
-    public PainlessLookup(List whitelists) {
-        painlessTypesToJavaClasses = new HashMap<>();
-        javaClassesToPainlessStructs = new HashMap<>();
-
-        String origin = null;
-
-        painlessTypesToJavaClasses.put("def", def.class);
-        javaClassesToPainlessStructs.put(def.class, new PainlessClass("def", Object.class, Type.getType(Object.class)));
-
-        try {
-            // first iteration collects all the Painless type names that
-            // are used for validation during the second iteration
-            for (Whitelist whitelist : whitelists) {
-                for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
-                    String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
-                    PainlessClass painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
-
-                    if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) {
-                        throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " +
-                            "[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]");
-                    }
-
-                    origin = whitelistStruct.origin;
-                    addStruct(whitelist.javaClassLoader, whitelistStruct);
-
-                    painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
-                    javaClassesToPainlessStructs.put(painlessStruct.clazz, painlessStruct);
-                }
-            }
-
-            // second iteration adds all the constructors, methods, and fields that will
-            // be available in Painless along with validating they exist and all their types have
-            // been white-listed during the first iteration
-            for (Whitelist whitelist : whitelists) {
-                for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
-                    String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
-
-                    for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) {
-                        origin = whitelistConstructor.origin;
-                        addConstructor(painlessTypeName, whitelistConstructor);
-                    }
-
-                    for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) {
-                        origin = whitelistMethod.origin;
-                        addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod);
-                    }
-
-                    for (WhitelistField whitelistField : whitelistStruct.whitelistFields) {
-                        origin = whitelistField.origin;
-                        addField(painlessTypeName, whitelistField);
-                    }
-                }
-            }
-        } catch (Exception exception) {
-            throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception);
-        }
-
-        // goes through each Painless struct and determines the inheritance list,
-        // and then adds all inherited types to the Painless struct's whitelist
-        for (Class javaClass : javaClassesToPainlessStructs.keySet()) {
-            PainlessClass painlessStruct = javaClassesToPainlessStructs.get(javaClass);
-
-            List painlessSuperStructs = new ArrayList<>();
-            Class javaSuperClass = painlessStruct.clazz.getSuperclass();
-
-            Stack> javaInteraceLookups = new Stack<>();
-            javaInteraceLookups.push(painlessStruct.clazz);
-
-            // adds super classes to the inheritance list
-            if (javaSuperClass != null && javaSuperClass.isInterface() == false) {
-                while (javaSuperClass != null) {
-                    PainlessClass painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass);
-
-                    if (painlessSuperStruct != null) {
-                        painlessSuperStructs.add(painlessSuperStruct.name);
-                    }
-
-                    javaInteraceLookups.push(javaSuperClass);
-                    javaSuperClass = javaSuperClass.getSuperclass();
-                }
-            }
-
-            // adds all super interfaces to the inheritance list
-            while (javaInteraceLookups.isEmpty() == false) {
-                Class javaInterfaceLookup = javaInteraceLookups.pop();
-
-                for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) {
-                    PainlessClass painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface);
-
-                    if (painlessInterfaceStruct != null) {
-                        String painlessInterfaceStructName = painlessInterfaceStruct.name;
-
-                        if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) {
-                            painlessSuperStructs.add(painlessInterfaceStructName);
-                        }
-
-                        for (Class javaPushInterface : javaInterfaceLookup.getInterfaces()) {
-                            javaInteraceLookups.push(javaPushInterface);
-                        }
-                    }
-                }
-            }
-
-            // copies methods and fields from super structs to the parent struct
-            copyStruct(painlessStruct.name, painlessSuperStructs);
-
-            // copies methods and fields from Object into interface types
-            if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) {
-                PainlessClass painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class);
-
-                if (painlessObjectStruct != null) {
-                    copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name));
-                }
-            }
-        }
-
-        // precompute runtime classes
-        for (PainlessClass painlessStruct : javaClassesToPainlessStructs.values()) {
-            addRuntimeClass(painlessStruct);
-        }
-
-        // copy all structs to make them unmodifiable for outside users:
-        for (Map.Entry,PainlessClass> entry : javaClassesToPainlessStructs.entrySet()) {
-            entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue())));
-        }
-    }
-
-    private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) {
-        String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
-        String importedPainlessTypeName = painlessTypeName;
-
-        if (TYPE_NAME_PATTERN.matcher(painlessTypeName).matches() == false) {
-            throw new IllegalArgumentException("invalid struct type name [" + painlessTypeName + "]");
-        }
-
-        int index = whitelistStruct.javaClassName.lastIndexOf('.');
-
-        if (index != -1) {
-            importedPainlessTypeName = whitelistStruct.javaClassName.substring(index + 1).replace('$', '.');
-        }
-
-        Class javaClass;
-
-        if      ("void".equals(whitelistStruct.javaClassName))    javaClass = void.class;
-        else if ("boolean".equals(whitelistStruct.javaClassName)) javaClass = boolean.class;
-        else if ("byte".equals(whitelistStruct.javaClassName))    javaClass = byte.class;
-        else if ("short".equals(whitelistStruct.javaClassName))   javaClass = short.class;
-        else if ("char".equals(whitelistStruct.javaClassName))    javaClass = char.class;
-        else if ("int".equals(whitelistStruct.javaClassName))     javaClass = int.class;
-        else if ("long".equals(whitelistStruct.javaClassName))    javaClass = long.class;
-        else if ("float".equals(whitelistStruct.javaClassName))   javaClass = float.class;
-        else if ("double".equals(whitelistStruct.javaClassName))  javaClass = double.class;
-        else {
-            try {
-                javaClass = Class.forName(whitelistStruct.javaClassName, true, whitelistClassLoader);
-            } catch (ClassNotFoundException cnfe) {
-                throw new IllegalArgumentException("invalid java class name [" + whitelistStruct.javaClassName + "]" +
-                        " for struct [" + painlessTypeName + "]");
-            }
-        }
-
-        PainlessClass existingStruct = javaClassesToPainlessStructs.get(javaClass);
-
-        if (existingStruct == null) {
-            PainlessClass struct = new PainlessClass(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass));
-            painlessTypesToJavaClasses.put(painlessTypeName, javaClass);
-            javaClassesToPainlessStructs.put(javaClass, struct);
-        } else if (existingStruct.clazz.equals(javaClass) == false) {
-            throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " +
-                    "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " +
-                    "[" + existingStruct.clazz.getName() + "]");
-        }
-
-        if (painlessTypeName.equals(importedPainlessTypeName)) {
-            if (whitelistStruct.onlyFQNJavaClassName == false) {
-                throw new IllegalArgumentException("must use only_fqn parameter on type [" + painlessTypeName + "] with no package");
-            }
-        } else {
-            Class importedJavaClass = painlessTypesToJavaClasses.get(importedPainlessTypeName);
-
-            if (importedJavaClass == null) {
-                if (whitelistStruct.onlyFQNJavaClassName == false) {
-                    if (existingStruct != null) {
-                        throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
-                    }
-
-                    painlessTypesToJavaClasses.put(importedPainlessTypeName, javaClass);
-                }
-            } else if (importedJavaClass.equals(javaClass) == false) {
-                throw new IllegalArgumentException("imported name [" + painlessTypeName + "] is used to " +
-                    "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] " +
-                    "and [" + importedJavaClass.getName() + "]");
-            } else if (whitelistStruct.onlyFQNJavaClassName) {
-                throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
-            }
-        }
-    }
-
-    private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) {
-        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
-
-        if (ownerStruct == null) {
-            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " +
-                    "parameters " + whitelistConstructor.painlessParameterTypeNames);
-        }
-
-        List> painlessParametersTypes = new ArrayList<>(whitelistConstructor.painlessParameterTypeNames.size());
-        Class[] javaClassParameters = new Class[whitelistConstructor.painlessParameterTypeNames.size()];
-
-        for (int parameterCount = 0; parameterCount < whitelistConstructor.painlessParameterTypeNames.size(); ++parameterCount) {
-            String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount);
-
-            try {
-                Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
-
-                painlessParametersTypes.add(painlessParameterClass);
-                javaClassParameters[parameterCount] = defClassToObjectClass(painlessParameterClass);
-            } catch (IllegalArgumentException iae) {
-                throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " +
-                        "with owner struct [" + ownerStructName + "] and constructor parameters " +
-                        whitelistConstructor.painlessParameterTypeNames, iae);
-            }
-        }
-
-        java.lang.reflect.Constructor javaConstructor;
-
-        try {
-            javaConstructor = ownerStruct.clazz.getConstructor(javaClassParameters);
-        } catch (NoSuchMethodException exception) {
-            throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
-                    " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception);
-        }
-
-        PainlessMethodKey painlessMethodKey = new PainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size());
-        PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey);
-
-        if (painlessConstructor == null) {
-            org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor);
-            MethodHandle javaHandle;
-
-            try {
-                javaHandle = MethodHandles.publicLookup().in(ownerStruct.clazz).unreflectConstructor(javaConstructor);
-            } catch (IllegalAccessException exception) {
-                throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
-                        " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames);
-            }
-
-            painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "", painlessParametersTypes),
-                    key -> new PainlessMethod("", ownerStruct, null, void.class, painlessParametersTypes,
-                            asmConstructor, javaConstructor.getModifiers(), javaHandle));
-            ownerStruct.constructors.put(painlessMethodKey, painlessConstructor);
-        } else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){
-            throw new IllegalArgumentException(
-                    "illegal duplicate constructors [" + painlessMethodKey + "] found within the struct [" + ownerStruct.name + "] " +
-                    "with parameters " + painlessParametersTypes + " and " + painlessConstructor.arguments);
-        }
-    }
-
-    private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) {
-        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
-
-        if (ownerStruct == null) {
-            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
-                    "name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
-        }
-
-        if (TYPE_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) {
-            throw new IllegalArgumentException("invalid method name" +
-                    " [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "].");
-        }
-
-        Class javaAugmentedClass;
-
-        if (whitelistMethod.javaAugmentedClassName != null) {
-            try {
-                javaAugmentedClass = Class.forName(whitelistMethod.javaAugmentedClassName, true, whitelistClassLoader);
-            } catch (ClassNotFoundException cnfe) {
-                throw new IllegalArgumentException("augmented class [" + whitelistMethod.javaAugmentedClassName + "] " +
-                        "not found for method with name [" + whitelistMethod.javaMethodName + "] " +
-                        "and parameters " + whitelistMethod.painlessParameterTypeNames, cnfe);
-            }
-        } else {
-            javaAugmentedClass = null;
-        }
-
-        int augmentedOffset = javaAugmentedClass == null ? 0 : 1;
-
-        List> painlessParametersTypes = new ArrayList<>(whitelistMethod.painlessParameterTypeNames.size());
-        Class[] javaClassParameters = new Class[whitelistMethod.painlessParameterTypeNames.size() + augmentedOffset];
-
-        if (javaAugmentedClass != null) {
-            javaClassParameters[0] = ownerStruct.clazz;
-        }
-
-        for (int parameterCount = 0; parameterCount < whitelistMethod.painlessParameterTypeNames.size(); ++parameterCount) {
-            String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount);
-
-            try {
-                Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
-
-                painlessParametersTypes.add(painlessParameterClass);
-                javaClassParameters[parameterCount + augmentedOffset] = defClassToObjectClass(painlessParameterClass);
-            } catch (IllegalArgumentException iae) {
-                throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " +
-                        "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
-                        "and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
-            }
-        }
-
-        Class javaImplClass = javaAugmentedClass == null ? ownerStruct.clazz : javaAugmentedClass;
-        java.lang.reflect.Method javaMethod;
-
-        try {
-            javaMethod = javaImplClass.getMethod(whitelistMethod.javaMethodName, javaClassParameters);
-        } catch (NoSuchMethodException nsme) {
-            throw new IllegalArgumentException("method with name [" + whitelistMethod.javaMethodName + "] " +
-                    "and parameters " + whitelistMethod.painlessParameterTypeNames + " not found for class [" +
-                    javaImplClass.getName() + "]", nsme);
-        }
-
-        Class painlessReturnClass;
-
-        try {
-            painlessReturnClass = getJavaClassFromPainlessType(whitelistMethod.painlessReturnTypeName);
-        } catch (IllegalArgumentException iae) {
-            throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " +
-                    "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
-                    "and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
-        }
-
-        if (javaMethod.getReturnType() != defClassToObjectClass(painlessReturnClass)) {
-            throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " +
-                    "does not match the return type class [" + javaMethod.getReturnType() + "] for the " +
-                    "method with name [" + whitelistMethod.javaMethodName + "] " +
-                    "and parameters " + whitelistMethod.painlessParameterTypeNames);
-        }
-
-        PainlessMethodKey painlessMethodKey =
-                new PainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size());
-
-        if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) {
-            PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey);
-
-            if (painlessMethod == null) {
-                org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
-                MethodHandle javaMethodHandle;
-
-                try {
-                    javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
-                } catch (IllegalAccessException exception) {
-                    throw new IllegalArgumentException("method handle not found for method with name " +
-                        "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
-                }
-
-                painlessMethod = methodCache.computeIfAbsent(
-                        buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
-                        key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass,
-                                painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
-                ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod);
-            } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass &&
-                    painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
-                throw new IllegalArgumentException("illegal duplicate static methods [" + painlessMethodKey + "] " +
-                        "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
-                        "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
-                        "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
-            }
-        } else {
-            PainlessMethod painlessMethod = ownerStruct.methods.get(painlessMethodKey);
-
-            if (painlessMethod == null) {
-                org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
-                MethodHandle javaMethodHandle;
-
-                try {
-                    javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
-                } catch (IllegalAccessException exception) {
-                    throw new IllegalArgumentException("method handle not found for method with name " +
-                        "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
-                }
-
-                painlessMethod = methodCache.computeIfAbsent(
-                        buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
-                        key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass,
-                                painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
-                ownerStruct.methods.put(painlessMethodKey, painlessMethod);
-            } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) &&
-                painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
-                throw new IllegalArgumentException("illegal duplicate member methods [" + painlessMethodKey + "] " +
-                    "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
-                    "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
-                    "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
-            }
-        }
-    }
-
-    private void addField(String ownerStructName, WhitelistField whitelistField) {
-        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
-
-        if (ownerStruct == null) {
-            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
-                    "name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName);
-        }
-
-        if (TYPE_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) {
-            throw new IllegalArgumentException("invalid field name " +
-                    "[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "].");
-        }
-
-        java.lang.reflect.Field javaField;
-
-        try {
-            javaField = ownerStruct.clazz.getField(whitelistField.javaFieldName);
-        } catch (NoSuchFieldException exception) {
-            throw new IllegalArgumentException("field [" + whitelistField.javaFieldName + "] " +
-                    "not found for class [" + ownerStruct.clazz.getName() + "].");
-        }
-
-        Class painlessFieldClass;
-
-        try {
-            painlessFieldClass = getJavaClassFromPainlessType(whitelistField.painlessFieldTypeName);
-        } catch (IllegalArgumentException iae) {
-            throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " +
-                "with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae);
-        }
-
-        if (Modifier.isStatic(javaField.getModifiers())) {
-            if (Modifier.isFinal(javaField.getModifiers()) == false) {
-                throw new IllegalArgumentException("static [" + whitelistField.javaFieldName + "] " +
-                        "with owner struct [" + ownerStruct.name + "] is not final");
-            }
-
-            PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName);
-
-            if (painlessField == null) {
-                painlessField = fieldCache.computeIfAbsent(
-                        buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
-                        key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
-                                ownerStruct, painlessFieldClass, javaField.getModifiers(), null, null));
-                ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField);
-            } else if (painlessField.clazz != painlessFieldClass) {
-                throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " +
-                    "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
-            }
-        } else {
-            MethodHandle javaMethodHandleGetter;
-            MethodHandle javaMethodHandleSetter;
-
-            try {
-                if (Modifier.isStatic(javaField.getModifiers()) == false) {
-                    javaMethodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField);
-                    javaMethodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField);
-                } else {
-                    javaMethodHandleGetter = null;
-                    javaMethodHandleSetter = null;
-                }
-            } catch (IllegalAccessException exception) {
-                throw new IllegalArgumentException("getter/setter [" + whitelistField.javaFieldName + "]" +
-                    " not found for class [" + ownerStruct.clazz.getName() + "].");
-            }
-
-            PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName);
-
-            if (painlessField == null) {
-                painlessField = fieldCache.computeIfAbsent(
-                        buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
-                        key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
-                                ownerStruct, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter));
-                ownerStruct.members.put(whitelistField.javaFieldName, painlessField);
-            } else if (painlessField.clazz != painlessFieldClass) {
-                throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " +
-                    "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
-            }
-        }
-    }
-
-    private void copyStruct(String struct, List children) {
-        final PainlessClass owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct));
-
-        if (owner == null) {
-            throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy.");
-        }
-
-        for (int count = 0; count < children.size(); ++count) {
-            final PainlessClass child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count)));
-
-            if (child == null) {
-                throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" +
-                    " not defined for copy to owner struct [" + owner.name + "].");
-            }
-
-            if (!child.clazz.isAssignableFrom(owner.clazz)) {
-                throw new ClassCastException("Child struct [" + child.name + "]" +
-                    " is not a super type of owner struct [" + owner.name + "] in copy.");
-            }
-
-            for (Map.Entry kvPair : child.methods.entrySet()) {
-                PainlessMethodKey methodKey = kvPair.getKey();
-                PainlessMethod method = kvPair.getValue();
-                if (owner.methods.get(methodKey) == null) {
-                    // TODO: some of these are no longer valid or outright don't work
-                    // TODO: since classes may not come from the Painless classloader
-                    // TODO: and it was dependent on the order of the extends which
-                    // TODO: which no longer exists since this is generated automatically
-                    // sanity check, look for missing covariant/generic override
-                    /*if (owner.clazz.isInterface() && child.clazz == Object.class) {
-                        // ok
-                    } else if (child.clazz == Spliterator.OfPrimitive.class || child.clazz == PrimitiveIterator.class) {
-                        // ok, we rely on generics erasure for these (its guaranteed in the javadocs though!!!!)
-                    } else if (Constants.JRE_IS_MINIMUM_JAVA9 && owner.clazz == LocalDate.class) {
-                        // ok, java 9 added covariant override for LocalDate.getEra() to return IsoEra:
-                        // https://bugs.openjdk.java.net/browse/JDK-8072746
-                    } else {
-                        try {
-                            // TODO: we *have* to remove all these public members and use getter methods to encapsulate!
-                            final Class impl;
-                            final Class arguments[];
-                            if (method.augmentation != null) {
-                                impl = method.augmentation;
-                                arguments = new Class[method.arguments.size() + 1];
-                                arguments[0] = method.owner.clazz;
-                                for (int i = 0; i < method.arguments.size(); i++) {
-                                    arguments[i + 1] = method.arguments.get(i).clazz;
-                                }
-                            } else {
-                                impl = owner.clazz;
-                                arguments = new Class[method.arguments.size()];
-                                for (int i = 0; i < method.arguments.size(); i++) {
-                                    arguments[i] = method.arguments.get(i).clazz;
-                                }
-                            }
-                            java.lang.reflect.Method m = impl.getMethod(method.method.getName(), arguments);
-                            if (m.getReturnType() != method.rtn.clazz) {
-                                throw new IllegalStateException("missing covariant override for: " + m + " in " + owner.name);
-                            }
-                            if (m.isBridge() && !Modifier.isVolatile(method.modifiers)) {
-                                // its a bridge in the destination, but not in the source, but it might still be ok, check generics:
-                                java.lang.reflect.Method source = child.clazz.getMethod(method.method.getName(), arguments);
-                                if (!Arrays.equals(source.getGenericParameterTypes(), source.getParameterTypes())) {
-                                    throw new IllegalStateException("missing generic override for: " + m + " in " + owner.name);
-                                }
-                            }
-                        } catch (ReflectiveOperationException e) {
-                            throw new AssertionError(e);
-                        }
-                    }*/
-                    owner.methods.put(methodKey, method);
-                }
-            }
-
-            for (PainlessField field : child.members.values()) {
-                if (owner.members.get(field.name) == null) {
-                    owner.members.put(field.name,
-                        new PainlessField(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter));
-                }
-            }
-        }
-    }
-
-    /**
-     * Precomputes a more efficient structure for dynamic method/field access.
-     */
-    private void addRuntimeClass(final PainlessClass struct) {
-        // add all getters/setters
-        for (Map.Entry method : struct.methods.entrySet()) {
-            String name = method.getKey().name;
-            PainlessMethod m = method.getValue();
-
-            if (m.arguments.size() == 0 &&
-                name.startsWith("get") &&
-                name.length() > 3 &&
-                Character.isUpperCase(name.charAt(3))) {
-                StringBuilder newName = new StringBuilder();
-                newName.append(Character.toLowerCase(name.charAt(3)));
-                newName.append(name.substring(4));
-                struct.getters.putIfAbsent(newName.toString(), m.handle);
-            } else if (m.arguments.size() == 0 &&
-                name.startsWith("is") &&
-                name.length() > 2 &&
-                Character.isUpperCase(name.charAt(2))) {
-                StringBuilder newName = new StringBuilder();
-                newName.append(Character.toLowerCase(name.charAt(2)));
-                newName.append(name.substring(3));
-                struct.getters.putIfAbsent(newName.toString(), m.handle);
-            }
-
-            if (m.arguments.size() == 1 &&
-                name.startsWith("set") &&
-                name.length() > 3 &&
-                Character.isUpperCase(name.charAt(3))) {
-                StringBuilder newName = new StringBuilder();
-                newName.append(Character.toLowerCase(name.charAt(3)));
-                newName.append(name.substring(4));
-                struct.setters.putIfAbsent(newName.toString(), m.handle);
-            }
-        }
-
-        // add all members
-        for (Map.Entry member : struct.members.entrySet()) {
-            struct.getters.put(member.getKey(), member.getValue().getter);
-            struct.setters.put(member.getKey(), member.getValue().setter);
-        }
-    }
-
-    /** computes the functional interface method for a class, or returns null */
-    private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) {
-        if (!clazz.clazz.isInterface()) {
-            return null;
-        }
-        // if its marked with this annotation, we fail if the conditions don't hold (means whitelist bug)
-        // otherwise, this annotation is pretty useless.
-        boolean hasAnnotation = clazz.clazz.isAnnotationPresent(FunctionalInterface.class);
-        List methods = new ArrayList<>();
-        for (java.lang.reflect.Method m : clazz.clazz.getMethods()) {
-            // default interface methods don't count
-            if (m.isDefault()) {
-                continue;
-            }
-            // static methods don't count
-            if (Modifier.isStatic(m.getModifiers())) {
-                continue;
-            }
-            // if its from Object, it doesn't count
-            try {
-                Object.class.getMethod(m.getName(), m.getParameterTypes());
-                continue;
-            } catch (ReflectiveOperationException e) {
-                // it counts
-            }
-            methods.add(m);
-        }
-        if (methods.size() != 1) {
-            if (hasAnnotation) {
-                throw new IllegalArgumentException("Class: " + clazz.name +
-                    " is marked with FunctionalInterface but doesn't fit the bill: " + methods);
-            }
-            return null;
-        }
-        // inspect the one method found from the reflection API, it should match the whitelist!
-        java.lang.reflect.Method oneMethod = methods.get(0);
-        PainlessMethod painless = clazz.methods.get(new PainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount()));
-        if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) {
-            throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " +
-                "method is not whitelisted!");
-        }
-        return painless;
+    PainlessLookup(Map> painlessTypesToJavaClasses, Map, PainlessClass> javaClassesToPainlessStructs) {
+        this.painlessTypesToJavaClasses = Collections.unmodifiableMap(painlessTypesToJavaClasses);
+        this.javaClassesToPainlessStructs = Collections.unmodifiableMap(javaClassesToPainlessStructs);
     }
 
     public boolean isSimplePainlessType(String painlessType) {
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java
new file mode 100644
index 00000000000..1dadce318d6
--- /dev/null
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java
@@ -0,0 +1,774 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.painless.lookup;
+
+import org.elasticsearch.painless.spi.Whitelist;
+import org.elasticsearch.painless.spi.WhitelistClass;
+import org.elasticsearch.painless.spi.WhitelistConstructor;
+import org.elasticsearch.painless.spi.WhitelistField;
+import org.elasticsearch.painless.spi.WhitelistMethod;
+import org.objectweb.asm.Type;
+
+import java.lang.invoke.MethodHandle;
+import java.lang.invoke.MethodHandles;
+import java.lang.reflect.Modifier;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Stack;
+import java.util.regex.Pattern;
+
+public class PainlessLookupBuilder {
+    private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$");
+
+    private static final Map methodCache = new HashMap<>();
+    private static final Map fieldCache = new HashMap<>();
+
+    private static String buildMethodCacheKey(String structName, String methodName, List> arguments) {
+        StringBuilder key = new StringBuilder();
+        key.append(structName);
+        key.append(methodName);
+
+        for (Class argument : arguments) {
+            key.append(argument.getName());
+        }
+
+        return key.toString();
+    }
+
+    private static String buildFieldCacheKey(String structName, String fieldName, String typeName) {
+        return structName + fieldName + typeName;
+    }
+
+    private final Map> painlessTypesToJavaClasses;
+    private final Map, PainlessClass> javaClassesToPainlessStructs;
+
+    public PainlessLookupBuilder(List whitelists) {
+        painlessTypesToJavaClasses = new HashMap<>();
+        javaClassesToPainlessStructs = new HashMap<>();
+
+        String origin = null;
+
+        painlessTypesToJavaClasses.put("def", def.class);
+        javaClassesToPainlessStructs.put(def.class, new PainlessClass("def", Object.class, Type.getType(Object.class)));
+
+        try {
+            // first iteration collects all the Painless type names that
+            // are used for validation during the second iteration
+            for (Whitelist whitelist : whitelists) {
+                for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
+                    String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
+                    PainlessClass painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
+
+                    if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) {
+                        throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " +
+                            "[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]");
+                    }
+
+                    origin = whitelistStruct.origin;
+                    addStruct(whitelist.javaClassLoader, whitelistStruct);
+
+                    painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
+                    javaClassesToPainlessStructs.put(painlessStruct.clazz, painlessStruct);
+                }
+            }
+
+            // second iteration adds all the constructors, methods, and fields that will
+            // be available in Painless along with validating they exist and all their types have
+            // been white-listed during the first iteration
+            for (Whitelist whitelist : whitelists) {
+                for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
+                    String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
+
+                    for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) {
+                        origin = whitelistConstructor.origin;
+                        addConstructor(painlessTypeName, whitelistConstructor);
+                    }
+
+                    for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) {
+                        origin = whitelistMethod.origin;
+                        addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod);
+                    }
+
+                    for (WhitelistField whitelistField : whitelistStruct.whitelistFields) {
+                        origin = whitelistField.origin;
+                        addField(painlessTypeName, whitelistField);
+                    }
+                }
+            }
+        } catch (Exception exception) {
+            throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception);
+        }
+
+        // goes through each Painless struct and determines the inheritance list,
+        // and then adds all inherited types to the Painless struct's whitelist
+        for (Class javaClass : javaClassesToPainlessStructs.keySet()) {
+            PainlessClass painlessStruct = javaClassesToPainlessStructs.get(javaClass);
+
+            List painlessSuperStructs = new ArrayList<>();
+            Class javaSuperClass = painlessStruct.clazz.getSuperclass();
+
+            Stack> javaInteraceLookups = new Stack<>();
+            javaInteraceLookups.push(painlessStruct.clazz);
+
+            // adds super classes to the inheritance list
+            if (javaSuperClass != null && javaSuperClass.isInterface() == false) {
+                while (javaSuperClass != null) {
+                    PainlessClass painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass);
+
+                    if (painlessSuperStruct != null) {
+                        painlessSuperStructs.add(painlessSuperStruct.name);
+                    }
+
+                    javaInteraceLookups.push(javaSuperClass);
+                    javaSuperClass = javaSuperClass.getSuperclass();
+                }
+            }
+
+            // adds all super interfaces to the inheritance list
+            while (javaInteraceLookups.isEmpty() == false) {
+                Class javaInterfaceLookup = javaInteraceLookups.pop();
+
+                for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) {
+                    PainlessClass painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface);
+
+                    if (painlessInterfaceStruct != null) {
+                        String painlessInterfaceStructName = painlessInterfaceStruct.name;
+
+                        if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) {
+                            painlessSuperStructs.add(painlessInterfaceStructName);
+                        }
+
+                        for (Class javaPushInterface : javaInterfaceLookup.getInterfaces()) {
+                            javaInteraceLookups.push(javaPushInterface);
+                        }
+                    }
+                }
+            }
+
+            // copies methods and fields from super structs to the parent struct
+            copyStruct(painlessStruct.name, painlessSuperStructs);
+
+            // copies methods and fields from Object into interface types
+            if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) {
+                PainlessClass painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class);
+
+                if (painlessObjectStruct != null) {
+                    copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name));
+                }
+            }
+        }
+
+        // precompute runtime classes
+        for (PainlessClass painlessStruct : javaClassesToPainlessStructs.values()) {
+            addRuntimeClass(painlessStruct);
+        }
+
+        // copy all structs to make them unmodifiable for outside users:
+        for (Map.Entry,PainlessClass> entry : javaClassesToPainlessStructs.entrySet()) {
+            entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue())));
+        }
+    }
+
+    private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) {
+        String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
+        String importedPainlessTypeName = painlessTypeName;
+
+        if (TYPE_NAME_PATTERN.matcher(painlessTypeName).matches() == false) {
+            throw new IllegalArgumentException("invalid struct type name [" + painlessTypeName + "]");
+        }
+
+        int index = whitelistStruct.javaClassName.lastIndexOf('.');
+
+        if (index != -1) {
+            importedPainlessTypeName = whitelistStruct.javaClassName.substring(index + 1).replace('$', '.');
+        }
+
+        Class javaClass;
+
+        if      ("void".equals(whitelistStruct.javaClassName))    javaClass = void.class;
+        else if ("boolean".equals(whitelistStruct.javaClassName)) javaClass = boolean.class;
+        else if ("byte".equals(whitelistStruct.javaClassName))    javaClass = byte.class;
+        else if ("short".equals(whitelistStruct.javaClassName))   javaClass = short.class;
+        else if ("char".equals(whitelistStruct.javaClassName))    javaClass = char.class;
+        else if ("int".equals(whitelistStruct.javaClassName))     javaClass = int.class;
+        else if ("long".equals(whitelistStruct.javaClassName))    javaClass = long.class;
+        else if ("float".equals(whitelistStruct.javaClassName))   javaClass = float.class;
+        else if ("double".equals(whitelistStruct.javaClassName))  javaClass = double.class;
+        else {
+            try {
+                javaClass = Class.forName(whitelistStruct.javaClassName, true, whitelistClassLoader);
+            } catch (ClassNotFoundException cnfe) {
+                throw new IllegalArgumentException("invalid java class name [" + whitelistStruct.javaClassName + "]" +
+                    " for struct [" + painlessTypeName + "]");
+            }
+        }
+
+        PainlessClass existingStruct = javaClassesToPainlessStructs.get(javaClass);
+
+        if (existingStruct == null) {
+            PainlessClass struct = new PainlessClass(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass));
+            painlessTypesToJavaClasses.put(painlessTypeName, javaClass);
+            javaClassesToPainlessStructs.put(javaClass, struct);
+        } else if (existingStruct.clazz.equals(javaClass) == false) {
+            throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " +
+                "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " +
+                "[" + existingStruct.clazz.getName() + "]");
+        }
+
+        if (painlessTypeName.equals(importedPainlessTypeName)) {
+            if (whitelistStruct.onlyFQNJavaClassName == false) {
+                throw new IllegalArgumentException("must use only_fqn parameter on type [" + painlessTypeName + "] with no package");
+            }
+        } else {
+            Class importedJavaClass = painlessTypesToJavaClasses.get(importedPainlessTypeName);
+
+            if (importedJavaClass == null) {
+                if (whitelistStruct.onlyFQNJavaClassName == false) {
+                    if (existingStruct != null) {
+                        throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
+                    }
+
+                    painlessTypesToJavaClasses.put(importedPainlessTypeName, javaClass);
+                }
+            } else if (importedJavaClass.equals(javaClass) == false) {
+                throw new IllegalArgumentException("imported name [" + painlessTypeName + "] is used to " +
+                    "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] " +
+                    "and [" + importedJavaClass.getName() + "]");
+            } else if (whitelistStruct.onlyFQNJavaClassName) {
+                throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
+            }
+        }
+    }
+
+    private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) {
+        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
+
+        if (ownerStruct == null) {
+            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " +
+                "parameters " + whitelistConstructor.painlessParameterTypeNames);
+        }
+
+        List> painlessParametersTypes = new ArrayList<>(whitelistConstructor.painlessParameterTypeNames.size());
+        Class[] javaClassParameters = new Class[whitelistConstructor.painlessParameterTypeNames.size()];
+
+        for (int parameterCount = 0; parameterCount < whitelistConstructor.painlessParameterTypeNames.size(); ++parameterCount) {
+            String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount);
+
+            try {
+                Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
+
+                painlessParametersTypes.add(painlessParameterClass);
+                javaClassParameters[parameterCount] = PainlessLookup.defClassToObjectClass(painlessParameterClass);
+            } catch (IllegalArgumentException iae) {
+                throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " +
+                    "with owner struct [" + ownerStructName + "] and constructor parameters " +
+                    whitelistConstructor.painlessParameterTypeNames, iae);
+            }
+        }
+
+        java.lang.reflect.Constructor javaConstructor;
+
+        try {
+            javaConstructor = ownerStruct.clazz.getConstructor(javaClassParameters);
+        } catch (NoSuchMethodException exception) {
+            throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
+                " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception);
+        }
+
+        PainlessMethodKey painlessMethodKey = new PainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size());
+        PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey);
+
+        if (painlessConstructor == null) {
+            org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor);
+            MethodHandle javaHandle;
+
+            try {
+                javaHandle = MethodHandles.publicLookup().in(ownerStruct.clazz).unreflectConstructor(javaConstructor);
+            } catch (IllegalAccessException exception) {
+                throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
+                    " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames);
+            }
+
+            painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "", painlessParametersTypes),
+                key -> new PainlessMethod("", ownerStruct, null, void.class, painlessParametersTypes,
+                    asmConstructor, javaConstructor.getModifiers(), javaHandle));
+            ownerStruct.constructors.put(painlessMethodKey, painlessConstructor);
+        } else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){
+            throw new IllegalArgumentException(
+                "illegal duplicate constructors [" + painlessMethodKey + "] found within the struct [" + ownerStruct.name + "] " +
+                    "with parameters " + painlessParametersTypes + " and " + painlessConstructor.arguments);
+        }
+    }
+
+    private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) {
+        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
+
+        if (ownerStruct == null) {
+            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
+                "name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
+        }
+
+        if (TYPE_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) {
+            throw new IllegalArgumentException("invalid method name" +
+                " [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "].");
+        }
+
+        Class javaAugmentedClass;
+
+        if (whitelistMethod.javaAugmentedClassName != null) {
+            try {
+                javaAugmentedClass = Class.forName(whitelistMethod.javaAugmentedClassName, true, whitelistClassLoader);
+            } catch (ClassNotFoundException cnfe) {
+                throw new IllegalArgumentException("augmented class [" + whitelistMethod.javaAugmentedClassName + "] " +
+                    "not found for method with name [" + whitelistMethod.javaMethodName + "] " +
+                    "and parameters " + whitelistMethod.painlessParameterTypeNames, cnfe);
+            }
+        } else {
+            javaAugmentedClass = null;
+        }
+
+        int augmentedOffset = javaAugmentedClass == null ? 0 : 1;
+
+        List> painlessParametersTypes = new ArrayList<>(whitelistMethod.painlessParameterTypeNames.size());
+        Class[] javaClassParameters = new Class[whitelistMethod.painlessParameterTypeNames.size() + augmentedOffset];
+
+        if (javaAugmentedClass != null) {
+            javaClassParameters[0] = ownerStruct.clazz;
+        }
+
+        for (int parameterCount = 0; parameterCount < whitelistMethod.painlessParameterTypeNames.size(); ++parameterCount) {
+            String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount);
+
+            try {
+                Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
+
+                painlessParametersTypes.add(painlessParameterClass);
+                javaClassParameters[parameterCount + augmentedOffset] = PainlessLookup.defClassToObjectClass(painlessParameterClass);
+            } catch (IllegalArgumentException iae) {
+                throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " +
+                    "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
+                    "and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
+            }
+        }
+
+        Class javaImplClass = javaAugmentedClass == null ? ownerStruct.clazz : javaAugmentedClass;
+        java.lang.reflect.Method javaMethod;
+
+        try {
+            javaMethod = javaImplClass.getMethod(whitelistMethod.javaMethodName, javaClassParameters);
+        } catch (NoSuchMethodException nsme) {
+            throw new IllegalArgumentException("method with name [" + whitelistMethod.javaMethodName + "] " +
+                "and parameters " + whitelistMethod.painlessParameterTypeNames + " not found for class [" +
+                javaImplClass.getName() + "]", nsme);
+        }
+
+        Class painlessReturnClass;
+
+        try {
+            painlessReturnClass = getJavaClassFromPainlessType(whitelistMethod.painlessReturnTypeName);
+        } catch (IllegalArgumentException iae) {
+            throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " +
+                "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
+                "and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
+        }
+
+        if (javaMethod.getReturnType() != PainlessLookup.defClassToObjectClass(painlessReturnClass)) {
+            throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " +
+                "does not match the return type class [" + javaMethod.getReturnType() + "] for the " +
+                "method with name [" + whitelistMethod.javaMethodName + "] " +
+                "and parameters " + whitelistMethod.painlessParameterTypeNames);
+        }
+
+        PainlessMethodKey painlessMethodKey =
+            new PainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size());
+
+        if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) {
+            PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey);
+
+            if (painlessMethod == null) {
+                org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
+                MethodHandle javaMethodHandle;
+
+                try {
+                    javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
+                } catch (IllegalAccessException exception) {
+                    throw new IllegalArgumentException("method handle not found for method with name " +
+                        "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
+                }
+
+                painlessMethod = methodCache.computeIfAbsent(
+                    buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
+                    key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass,
+                        painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
+                ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod);
+            } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass &&
+                painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
+                throw new IllegalArgumentException("illegal duplicate static methods [" + painlessMethodKey + "] " +
+                    "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
+                    "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
+                    "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
+            }
+        } else {
+            PainlessMethod painlessMethod = ownerStruct.methods.get(painlessMethodKey);
+
+            if (painlessMethod == null) {
+                org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
+                MethodHandle javaMethodHandle;
+
+                try {
+                    javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
+                } catch (IllegalAccessException exception) {
+                    throw new IllegalArgumentException("method handle not found for method with name " +
+                        "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
+                }
+
+                painlessMethod = methodCache.computeIfAbsent(
+                    buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
+                    key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass,
+                        painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
+                ownerStruct.methods.put(painlessMethodKey, painlessMethod);
+            } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) &&
+                painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
+                throw new IllegalArgumentException("illegal duplicate member methods [" + painlessMethodKey + "] " +
+                    "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
+                    "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
+                    "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
+            }
+        }
+    }
+
+    private void addField(String ownerStructName, WhitelistField whitelistField) {
+        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
+
+        if (ownerStruct == null) {
+            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
+                "name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName);
+        }
+
+        if (TYPE_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) {
+            throw new IllegalArgumentException("invalid field name " +
+                "[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "].");
+        }
+
+        java.lang.reflect.Field javaField;
+
+        try {
+            javaField = ownerStruct.clazz.getField(whitelistField.javaFieldName);
+        } catch (NoSuchFieldException exception) {
+            throw new IllegalArgumentException("field [" + whitelistField.javaFieldName + "] " +
+                "not found for class [" + ownerStruct.clazz.getName() + "].");
+        }
+
+        Class painlessFieldClass;
+
+        try {
+            painlessFieldClass = getJavaClassFromPainlessType(whitelistField.painlessFieldTypeName);
+        } catch (IllegalArgumentException iae) {
+            throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " +
+                "with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae);
+        }
+
+        if (Modifier.isStatic(javaField.getModifiers())) {
+            if (Modifier.isFinal(javaField.getModifiers()) == false) {
+                throw new IllegalArgumentException("static [" + whitelistField.javaFieldName + "] " +
+                    "with owner struct [" + ownerStruct.name + "] is not final");
+            }
+
+            PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName);
+
+            if (painlessField == null) {
+                painlessField = fieldCache.computeIfAbsent(
+                    buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
+                    key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
+                        ownerStruct, painlessFieldClass, javaField.getModifiers(), null, null));
+                ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField);
+            } else if (painlessField.clazz != painlessFieldClass) {
+                throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " +
+                    "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
+            }
+        } else {
+            MethodHandle javaMethodHandleGetter;
+            MethodHandle javaMethodHandleSetter;
+
+            try {
+                if (Modifier.isStatic(javaField.getModifiers()) == false) {
+                    javaMethodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField);
+                    javaMethodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField);
+                } else {
+                    javaMethodHandleGetter = null;
+                    javaMethodHandleSetter = null;
+                }
+            } catch (IllegalAccessException exception) {
+                throw new IllegalArgumentException("getter/setter [" + whitelistField.javaFieldName + "]" +
+                    " not found for class [" + ownerStruct.clazz.getName() + "].");
+            }
+
+            PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName);
+
+            if (painlessField == null) {
+                painlessField = fieldCache.computeIfAbsent(
+                    buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
+                    key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
+                        ownerStruct, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter));
+                ownerStruct.members.put(whitelistField.javaFieldName, painlessField);
+            } else if (painlessField.clazz != painlessFieldClass) {
+                throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " +
+                    "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
+            }
+        }
+    }
+
+    private void copyStruct(String struct, List children) {
+        final PainlessClass owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct));
+
+        if (owner == null) {
+            throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy.");
+        }
+
+        for (int count = 0; count < children.size(); ++count) {
+            final PainlessClass child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count)));
+
+            if (child == null) {
+                throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" +
+                    " not defined for copy to owner struct [" + owner.name + "].");
+            }
+
+            if (!child.clazz.isAssignableFrom(owner.clazz)) {
+                throw new ClassCastException("Child struct [" + child.name + "]" +
+                    " is not a super type of owner struct [" + owner.name + "] in copy.");
+            }
+
+            for (Map.Entry kvPair : child.methods.entrySet()) {
+                PainlessMethodKey methodKey = kvPair.getKey();
+                PainlessMethod method = kvPair.getValue();
+                if (owner.methods.get(methodKey) == null) {
+                    // TODO: some of these are no longer valid or outright don't work
+                    // TODO: since classes may not come from the Painless classloader
+                    // TODO: and it was dependent on the order of the extends which
+                    // TODO: which no longer exists since this is generated automatically
+                    // sanity check, look for missing covariant/generic override
+                    /*if (owner.clazz.isInterface() && child.clazz == Object.class) {
+                        // ok
+                    } else if (child.clazz == Spliterator.OfPrimitive.class || child.clazz == PrimitiveIterator.class) {
+                        // ok, we rely on generics erasure for these (its guaranteed in the javadocs though!!!!)
+                    } else if (Constants.JRE_IS_MINIMUM_JAVA9 && owner.clazz == LocalDate.class) {
+                        // ok, java 9 added covariant override for LocalDate.getEra() to return IsoEra:
+                        // https://bugs.openjdk.java.net/browse/JDK-8072746
+                    } else {
+                        try {
+                            // TODO: we *have* to remove all these public members and use getter methods to encapsulate!
+                            final Class impl;
+                            final Class arguments[];
+                            if (method.augmentation != null) {
+                                impl = method.augmentation;
+                                arguments = new Class[method.arguments.size() + 1];
+                                arguments[0] = method.owner.clazz;
+                                for (int i = 0; i < method.arguments.size(); i++) {
+                                    arguments[i + 1] = method.arguments.get(i).clazz;
+                                }
+                            } else {
+                                impl = owner.clazz;
+                                arguments = new Class[method.arguments.size()];
+                                for (int i = 0; i < method.arguments.size(); i++) {
+                                    arguments[i] = method.arguments.get(i).clazz;
+                                }
+                            }
+                            java.lang.reflect.Method m = impl.getMethod(method.method.getName(), arguments);
+                            if (m.getReturnType() != method.rtn.clazz) {
+                                throw new IllegalStateException("missing covariant override for: " + m + " in " + owner.name);
+                            }
+                            if (m.isBridge() && !Modifier.isVolatile(method.modifiers)) {
+                                // its a bridge in the destination, but not in the source, but it might still be ok, check generics:
+                                java.lang.reflect.Method source = child.clazz.getMethod(method.method.getName(), arguments);
+                                if (!Arrays.equals(source.getGenericParameterTypes(), source.getParameterTypes())) {
+                                    throw new IllegalStateException("missing generic override for: " + m + " in " + owner.name);
+                                }
+                            }
+                        } catch (ReflectiveOperationException e) {
+                            throw new AssertionError(e);
+                        }
+                    }*/
+                    owner.methods.put(methodKey, method);
+                }
+            }
+
+            for (PainlessField field : child.members.values()) {
+                if (owner.members.get(field.name) == null) {
+                    owner.members.put(field.name,
+                        new PainlessField(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter));
+                }
+            }
+        }
+    }
+
+    /**
+     * Precomputes a more efficient structure for dynamic method/field access.
+     */
+    private void addRuntimeClass(final PainlessClass struct) {
+        // add all getters/setters
+        for (Map.Entry method : struct.methods.entrySet()) {
+            String name = method.getKey().name;
+            PainlessMethod m = method.getValue();
+
+            if (m.arguments.size() == 0 &&
+                name.startsWith("get") &&
+                name.length() > 3 &&
+                Character.isUpperCase(name.charAt(3))) {
+                StringBuilder newName = new StringBuilder();
+                newName.append(Character.toLowerCase(name.charAt(3)));
+                newName.append(name.substring(4));
+                struct.getters.putIfAbsent(newName.toString(), m.handle);
+            } else if (m.arguments.size() == 0 &&
+                name.startsWith("is") &&
+                name.length() > 2 &&
+                Character.isUpperCase(name.charAt(2))) {
+                StringBuilder newName = new StringBuilder();
+                newName.append(Character.toLowerCase(name.charAt(2)));
+                newName.append(name.substring(3));
+                struct.getters.putIfAbsent(newName.toString(), m.handle);
+            }
+
+            if (m.arguments.size() == 1 &&
+                name.startsWith("set") &&
+                name.length() > 3 &&
+                Character.isUpperCase(name.charAt(3))) {
+                StringBuilder newName = new StringBuilder();
+                newName.append(Character.toLowerCase(name.charAt(3)));
+                newName.append(name.substring(4));
+                struct.setters.putIfAbsent(newName.toString(), m.handle);
+            }
+        }
+
+        // add all members
+        for (Map.Entry member : struct.members.entrySet()) {
+            struct.getters.put(member.getKey(), member.getValue().getter);
+            struct.setters.put(member.getKey(), member.getValue().setter);
+        }
+    }
+
+    /** computes the functional interface method for a class, or returns null */
+    private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) {
+        if (!clazz.clazz.isInterface()) {
+            return null;
+        }
+        // if its marked with this annotation, we fail if the conditions don't hold (means whitelist bug)
+        // otherwise, this annotation is pretty useless.
+        boolean hasAnnotation = clazz.clazz.isAnnotationPresent(FunctionalInterface.class);
+        List methods = new ArrayList<>();
+        for (java.lang.reflect.Method m : clazz.clazz.getMethods()) {
+            // default interface methods don't count
+            if (m.isDefault()) {
+                continue;
+            }
+            // static methods don't count
+            if (Modifier.isStatic(m.getModifiers())) {
+                continue;
+            }
+            // if its from Object, it doesn't count
+            try {
+                Object.class.getMethod(m.getName(), m.getParameterTypes());
+                continue;
+            } catch (ReflectiveOperationException e) {
+                // it counts
+            }
+            methods.add(m);
+        }
+        if (methods.size() != 1) {
+            if (hasAnnotation) {
+                throw new IllegalArgumentException("Class: " + clazz.name +
+                    " is marked with FunctionalInterface but doesn't fit the bill: " + methods);
+            }
+            return null;
+        }
+        // inspect the one method found from the reflection API, it should match the whitelist!
+        java.lang.reflect.Method oneMethod = methods.get(0);
+        PainlessMethod painless = clazz.methods.get(new PainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount()));
+        if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) {
+            throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " +
+                "method is not whitelisted!");
+        }
+        return painless;
+    }
+
+    public Class getJavaClassFromPainlessType(String painlessType) {
+        Class javaClass = painlessTypesToJavaClasses.get(painlessType);
+
+        if (javaClass != null) {
+            return javaClass;
+        }
+        int arrayDimensions = 0;
+        int arrayIndex = painlessType.indexOf('[');
+
+        if (arrayIndex != -1) {
+            int length = painlessType.length();
+
+            while (arrayIndex < length) {
+                if (painlessType.charAt(arrayIndex) == '[' && ++arrayIndex < length && painlessType.charAt(arrayIndex++) == ']') {
+                    ++arrayDimensions;
+                } else {
+                    throw new IllegalArgumentException("invalid painless type [" + painlessType + "].");
+                }
+            }
+
+            painlessType = painlessType.substring(0, painlessType.indexOf('['));
+            javaClass = painlessTypesToJavaClasses.get(painlessType);
+
+            char braces[] = new char[arrayDimensions];
+            Arrays.fill(braces, '[');
+            String descriptor = new String(braces);
+
+            if (javaClass == boolean.class) {
+                descriptor += "Z";
+            } else if (javaClass == byte.class) {
+                descriptor += "B";
+            } else if (javaClass == short.class) {
+                descriptor += "S";
+            } else if (javaClass == char.class) {
+                descriptor += "C";
+            } else if (javaClass == int.class) {
+                descriptor += "I";
+            } else if (javaClass == long.class) {
+                descriptor += "J";
+            } else if (javaClass == float.class) {
+                descriptor += "F";
+            } else if (javaClass == double.class) {
+                descriptor += "D";
+            } else {
+                descriptor += "L" + javaClass.getName() + ";";
+            }
+
+            try {
+                return Class.forName(descriptor);
+            } catch (ClassNotFoundException cnfe) {
+                throw new IllegalStateException("invalid painless type [" + painlessType + "]", cnfe);
+            }
+        }
+
+        throw new IllegalArgumentException("invalid painless type [" + painlessType + "]");
+    }
+
+    public PainlessLookup build() {
+        return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessStructs);
+    }
+}
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/def.java
new file mode 100644
index 00000000000..4336236be3f
--- /dev/null
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/def.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.painless.lookup;
+
+/** Marker class for def type to be used during type analysis. */
+public final class def {
+
+    private def() {
+
+    }
+}
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java
index dda246b5f6c..a0a29ed59dd 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java
@@ -23,7 +23,7 @@ package org.elasticsearch.painless.node;
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.lookup.PainlessCast;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java
index 46fbeefd6f5..422300072dc 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java
@@ -22,7 +22,7 @@ package org.elasticsearch.painless.node;
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java
index a3e1b4bde6a..c0345b6308c 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java
@@ -22,7 +22,7 @@ package org.elasticsearch.painless.node;
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.FunctionRef;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java
index c0fccab8e8a..806204d051a 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java
@@ -22,7 +22,7 @@ package org.elasticsearch.painless.node;
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
index a7b7a41fe05..8977f4f0ef3 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
@@ -22,7 +22,7 @@ package org.elasticsearch.painless.node;
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.lookup.PainlessMethod;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.FunctionRef;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java
index 518f1953525..820cce685ed 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java
@@ -21,7 +21,7 @@ package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessMethodKey;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java
index 45158aedcf7..b6c7fb80af9 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java
@@ -21,7 +21,7 @@ package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessMethodKey;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java
index 8e293556eac..3a5102ebdc9 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java
@@ -22,7 +22,7 @@ package org.elasticsearch.painless.node;
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java
index ec7d0f6d7bb..5b282abdce9 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java
index 12ff4832483..f23ae9f1887 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java
@@ -23,7 +23,7 @@ import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessMethodKey;
 import org.elasticsearch.painless.lookup.PainlessClass;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java
index 8d27162fc36..78a18b91ab2 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java
@@ -23,7 +23,7 @@ import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.lookup.PainlessField;
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessClass;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java
index 8e30d434329..ccbc25db4f2 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.DefBootstrap;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java
index 0882f191770..a9021000e2d 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.DefBootstrap;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java
index 41fcf563d24..1c081c9422e 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.DefBootstrap;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java
index e7d18ece059..c402d8982d8 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Locals.Variable;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java
index faee2ed74a6..cfc87536b6b 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java
@@ -25,7 +25,7 @@ import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.lookup.PainlessCast;
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessMethodKey;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Locals.Variable;
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java
index 78e5814e963..c0e0bd7ed9d 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java
@@ -24,6 +24,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 
 import static java.util.Collections.emptyMap;
@@ -37,7 +38,7 @@ import static org.hamcrest.Matchers.startsWith;
  */
 public class BaseClassTests extends ScriptTestCase {
 
-    private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
+    private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
 
     public abstract static class Gets {
 
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java
index 987eef31eee..7edc90bb0a0 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java
@@ -23,6 +23,7 @@ import org.elasticsearch.ElasticsearchException;
 import org.elasticsearch.common.io.stream.BytesStreamOutput;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 import org.elasticsearch.script.ScriptException;
 
@@ -36,7 +37,7 @@ import static org.hamcrest.Matchers.hasKey;
 import static org.hamcrest.Matchers.not;
 
 public class DebugTests extends ScriptTestCase {
-    private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
+    private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
 
     public void testExplain() {
         // Debug.explain can explain an object
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java
index 0d5e2748b7b..73adf92779d 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java
@@ -19,7 +19,7 @@
 
 package org.elasticsearch.painless;
 
-import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 import org.objectweb.asm.util.Textifier;
 
@@ -40,7 +40,7 @@ final class Debugger {
         PrintWriter outputWriter = new PrintWriter(output);
         Textifier textifier = new Textifier();
         try {
-            new Compiler(iface, new PainlessLookup(Whitelist.BASE_WHITELISTS))
+            new Compiler(iface, new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build())
                     .compile("", source, settings, textifier);
         } catch (RuntimeException e) {
             textifier.print(outputWriter);
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java
index ab4844dd58b..07f45ff67c0 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java
@@ -28,11 +28,12 @@ import java.util.Collections;
 import java.util.HashMap;
 
 import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 import org.elasticsearch.test.ESTestCase;
 
 public class DefBootstrapTests extends ESTestCase {
-    private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
+    private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
 
     /** calls toString() on integers, twice */
     public void testOneType() throws Throwable {
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java
index 5177d64cbdb..5e8e6ad47d8 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java
@@ -20,13 +20,16 @@
 package org.elasticsearch.painless;
 
 import org.apache.logging.log4j.Logger;
-import org.elasticsearch.core.internal.io.IOUtils;
 import org.elasticsearch.common.io.PathUtils;
 import org.elasticsearch.common.logging.ESLoggerFactory;
-import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessField;
-import org.elasticsearch.painless.lookup.PainlessMethod;
+import org.elasticsearch.core.internal.io.IOUtils;
 import org.elasticsearch.painless.lookup.PainlessClass;
+import org.elasticsearch.painless.lookup.PainlessField;
+import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
+import org.elasticsearch.painless.lookup.PainlessMethod;
+import org.elasticsearch.painless.spi.Whitelist;
+
 import java.io.IOException;
 import java.io.PrintStream;
 import java.lang.reflect.Modifier;
@@ -42,14 +45,13 @@ import java.util.function.Consumer;
 
 import static java.util.Comparator.comparing;
 import static java.util.stream.Collectors.toList;
-import static org.elasticsearch.painless.spi.Whitelist.BASE_WHITELISTS;
 
 /**
  * Generates an API reference from the method and type whitelists in {@link PainlessLookup}.
  */
 public class PainlessDocGenerator {
 
-    private static final PainlessLookup PAINLESS_LOOKUP = new PainlessLookup(BASE_WHITELISTS);
+    private static final PainlessLookup PAINLESS_LOOKUP = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
     private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class);
     private static final Comparator FIELD_NAME = comparing(f -> f.name);
     private static final Comparator METHOD_NAME = comparing(m -> m.name);
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java
index 1a4770e560a..eebf1d701ee 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java
@@ -25,6 +25,7 @@ import org.elasticsearch.common.lucene.ScorerAware;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.painless.antlr.Walker;
 import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 import org.elasticsearch.script.ExecutableScript;
 import org.elasticsearch.script.ScriptContext;
@@ -91,7 +92,7 @@ public abstract class ScriptTestCase extends ESTestCase {
     public Object exec(String script, Map vars, Map compileParams, Scorer scorer, boolean picky) {
         // test for ambiguity errors before running the actual script if picky is true
         if (picky) {
-            PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
+            PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
             ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, GenericElasticsearchScript.class);
             CompilerSettings pickySettings = new CompilerSettings();
             pickySettings.setPicky(true);
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java
index 3e9f724743f..86d365e0fcc 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java
@@ -23,6 +23,7 @@ import org.elasticsearch.painless.CompilerSettings;
 import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.lookup.PainlessCast;
 import org.elasticsearch.painless.lookup.PainlessField;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessMethodKey;
 import org.elasticsearch.painless.lookup.PainlessClass;
@@ -48,7 +49,7 @@ import static org.elasticsearch.painless.node.SSource.MainMethodReserved;
  * Tests {@link Object#toString} implementations on all extensions of {@link ANode}.
  */
 public class NodeToStringTests extends ESTestCase {
-    private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
+    private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
 
     public void testEAssignment() {
         assertToString(

From ecd05d5be408d5b864189c1c51b9154d627e536b Mon Sep 17 00:00:00 2001
From: DeDe Morton 
Date: Mon, 16 Jul 2018 12:11:24 -0700
Subject: [PATCH 039/260] Use correct formatting for links (#29460)

---
 docs/reference/how-to/recipes.asciidoc | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/reference/how-to/recipes.asciidoc b/docs/reference/how-to/recipes.asciidoc
index e798f8819d0..451e192ad6a 100644
--- a/docs/reference/how-to/recipes.asciidoc
+++ b/docs/reference/how-to/recipes.asciidoc
@@ -3,8 +3,8 @@
 
 This section includes a few recipes to help with common problems:
 
-* mixing-exact-search-with-stemming
-* consistent-scoring
+* <>
+* <>
 
 include::recipes/stemming.asciidoc[]
 include::recipes/scoring.asciidoc[]

From 637cac90614018d6e46c781547ddac2000d71459 Mon Sep 17 00:00:00 2001
From: Michael Basnight 
Date: Mon, 16 Jul 2018 14:20:16 -0500
Subject: [PATCH 040/260] Watcher: Store username on watch execution (#31873)

There is currently no way to see what user executed a watch. This commit
adds the decrypted username to each execution in the watch history, in a
new field "user".

Closes #31772
---
 .../rest-api/watcher/execute-watch.asciidoc   |  4 +-
 .../core/security/authc/Authentication.java   | 10 +++-
 .../execution/WatchExecutionContext.java      | 25 +++++++++
 .../core/watcher/history/WatchRecord.java     | 17 ++++--
 .../WatcherIndexTemplateRegistryField.java    |  3 +-
 .../src/main/resources/watch-history.json     |  3 +
 .../execution/ExecutionServiceTests.java      | 31 ++++++++++
 .../roles.yml                                 |  1 +
 .../20_test_run_as_execute_watch.yml          | 56 +++++++++++++++++++
 9 files changed, 140 insertions(+), 10 deletions(-)

diff --git a/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc b/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc
index 91cd89bca6d..ec2c60c543b 100644
--- a/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc
+++ b/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc
@@ -263,7 +263,8 @@ This is an example of the output:
           "type": "index"
         }
       ]
-    }
+    },
+    "user": "test_admin" <4>
   }
 }
 --------------------------------------------------
@@ -281,6 +282,7 @@ This is an example of the output:
 <1> The id of the watch record as it would be stored in the `.watcher-history` index.
 <2> The watch record document as it would be stored in the `.watcher-history` index.
 <3> The watch execution results.
+<4> The user used to execute the watch.
 
 You can set a different execution mode for every action by associating the mode
 name with the action id:
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java
index 2a2fdd95d61..161d9d44999 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java
@@ -88,13 +88,17 @@ public class Authentication {
             throws IOException, IllegalArgumentException {
         assert ctx.getTransient(AuthenticationField.AUTHENTICATION_KEY) == null;
 
+        Authentication authentication = decode(header);
+        ctx.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
+        return authentication;
+    }
+
+    public static Authentication decode(String header) throws IOException {
         byte[] bytes = Base64.getDecoder().decode(header);
         StreamInput input = StreamInput.wrap(bytes);
         Version version = Version.readVersion(input);
         input.setVersion(version);
-        Authentication authentication = new Authentication(input);
-        ctx.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
-        return authentication;
+        return new Authentication(input);
     }
 
     /**
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java
index 62216ff681e..dbbff33dcef 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java
@@ -8,6 +8,8 @@ package org.elasticsearch.xpack.core.watcher.execution;
 import org.elasticsearch.common.CheckedSupplier;
 import org.elasticsearch.common.unit.TimeValue;
 import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
+import org.elasticsearch.xpack.core.security.authc.Authentication;
+import org.elasticsearch.xpack.core.security.authc.AuthenticationField;
 import org.elasticsearch.xpack.core.watcher.actions.ActionWrapperResult;
 import org.elasticsearch.xpack.core.watcher.condition.Condition;
 import org.elasticsearch.xpack.core.watcher.history.WatchRecord;
@@ -18,6 +20,7 @@ import org.elasticsearch.xpack.core.watcher.watch.Payload;
 import org.elasticsearch.xpack.core.watcher.watch.Watch;
 import org.joda.time.DateTime;
 
+import java.io.IOException;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
@@ -43,6 +46,7 @@ public abstract class WatchExecutionContext {
     private Transform.Result transformResult;
     private ConcurrentMap actionsResults = ConcurrentCollections.newConcurrentMap();
     private String nodeId;
+    private String user;
 
     public WatchExecutionContext(String watchId, DateTime executionTime, TriggerEvent triggerEvent, TimeValue defaultThrottlePeriod) {
         this.id = new Wid(watchId, executionTime);
@@ -85,6 +89,7 @@ public abstract class WatchExecutionContext {
     public final void ensureWatchExists(CheckedSupplier supplier) throws Exception {
         if (watch == null) {
             watch = supplier.get();
+            user = WatchExecutionContext.getUsernameFromWatch(watch);
         }
     }
 
@@ -137,6 +142,11 @@ public abstract class WatchExecutionContext {
         return nodeId;
     }
 
+    /**
+     * @return The user that executes the watch, which will be stored in the watch history
+     */
+    public String getUser() { return user; }
+
     public void start() {
         assert phase == ExecutionPhase.AWAITS_EXECUTION;
         relativeStartTime = System.nanoTime();
@@ -243,4 +253,19 @@ public abstract class WatchExecutionContext {
     public WatchExecutionSnapshot createSnapshot(Thread executionThread) {
         return new WatchExecutionSnapshot(this, executionThread.getStackTrace());
     }
+
+    /**
+     * Given a watch, this extracts and decodes the relevant auth header and returns the principal of the user that is
+     * executing the watch.
+     */
+    public static String getUsernameFromWatch(Watch watch) throws IOException {
+        if (watch != null && watch.status() != null && watch.status().getHeaders() != null) {
+            String header = watch.status().getHeaders().get(AuthenticationField.AUTHENTICATION_KEY);
+            if (header != null) {
+                Authentication auth = Authentication.decode(header);
+                return auth.getUser().principal();
+            }
+        }
+        return null;
+    }
 }
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java
index 74e7b2115fa..2b28c2f15c9 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java
@@ -43,12 +43,14 @@ public abstract class WatchRecord implements ToXContentObject {
     private static final ParseField METADATA = new ParseField("metadata");
     private static final ParseField EXECUTION_RESULT = new ParseField("result");
     private static final ParseField EXCEPTION = new ParseField("exception");
+    private static final ParseField USER = new ParseField("user");
 
     protected final Wid id;
     protected final Watch watch;
     private final String nodeId;
     protected final TriggerEvent triggerEvent;
     protected final ExecutionState state;
+    private final String user;
 
     // only emitted to xcontent in "debug" mode
     protected final Map vars;
@@ -60,7 +62,7 @@ public abstract class WatchRecord implements ToXContentObject {
 
     private WatchRecord(Wid id, TriggerEvent triggerEvent, ExecutionState state, Map vars, ExecutableInput input,
                         ExecutableCondition condition, Map metadata, Watch watch, WatchExecutionResult executionResult,
-                        String nodeId) {
+                        String nodeId, String user) {
         this.id = id;
         this.triggerEvent = triggerEvent;
         this.state = state;
@@ -71,15 +73,16 @@ public abstract class WatchRecord implements ToXContentObject {
         this.executionResult = executionResult;
         this.watch = watch;
         this.nodeId = nodeId;
+        this.user = user;
     }
 
     private WatchRecord(Wid id, TriggerEvent triggerEvent, ExecutionState state, String nodeId) {
-        this(id, triggerEvent, state, Collections.emptyMap(), null, null, null, null, null, nodeId);
+        this(id, triggerEvent, state, Collections.emptyMap(), null, null, null, null, null, nodeId, null);
     }
 
     private WatchRecord(WatchRecord record, ExecutionState state) {
         this(record.id, record.triggerEvent, state, record.vars, record.input, record.condition, record.metadata, record.watch,
-                record.executionResult, record.nodeId);
+                record.executionResult, record.nodeId, record.user);
     }
 
     private WatchRecord(WatchExecutionContext context, ExecutionState state) {
@@ -88,12 +91,13 @@ public abstract class WatchRecord implements ToXContentObject {
                 context.watch() != null ? context.watch().condition() : null,
                 context.watch() != null ? context.watch().metadata() : null,
                 context.watch(),
-                null, context.getNodeId());
+                null, context.getNodeId(), context.getUser());
     }
 
     private WatchRecord(WatchExecutionContext context, WatchExecutionResult executionResult) {
         this(context.id(), context.triggerEvent(), getState(executionResult), context.vars(), context.watch().input(),
-                context.watch().condition(), context.watch().metadata(), context.watch(), executionResult, context.getNodeId());
+                context.watch().condition(), context.watch().metadata(), context.watch(), executionResult, context.getNodeId(),
+                context.getUser());
     }
 
     public static ExecutionState getState(WatchExecutionResult executionResult) {
@@ -152,6 +156,9 @@ public abstract class WatchRecord implements ToXContentObject {
         builder.field(NODE.getPreferredName(), nodeId);
         builder.field(STATE.getPreferredName(), state.id());
 
+        if (user != null) {
+            builder.field(USER.getPreferredName(), user);
+        }
         if (watch != null && watch.status() != null) {
             builder.field(STATUS.getPreferredName(), watch.status(), params);
         }
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java
index 25e2c928d9a..b42506b81b3 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java
@@ -13,8 +13,9 @@ public final class WatcherIndexTemplateRegistryField {
     // version 6: upgrade to ES 6, removal of _status field
     // version 7: add full exception stack traces for better debugging
     // version 8: fix slack attachment property not to be dynamic, causing field type issues
+    // version 9: add a user field defining which user executed the watch
     // Note: if you change this, also inform the kibana team around the watcher-ui
-    public static final String INDEX_TEMPLATE_VERSION = "8";
+    public static final String INDEX_TEMPLATE_VERSION = "9";
     public static final String HISTORY_TEMPLATE_NAME = ".watch-history-" + INDEX_TEMPLATE_VERSION;
     public static final String TRIGGERED_TEMPLATE_NAME = ".triggered_watches";
     public static final String WATCHES_TEMPLATE_NAME = ".watches";
diff --git a/x-pack/plugin/core/src/main/resources/watch-history.json b/x-pack/plugin/core/src/main/resources/watch-history.json
index 86a967fc14f..9a4a96409b0 100644
--- a/x-pack/plugin/core/src/main/resources/watch-history.json
+++ b/x-pack/plugin/core/src/main/resources/watch-history.json
@@ -120,6 +120,9 @@
         "messages": {
           "type": "text"
         },
+        "user": {
+          "type": "text"
+        },
         "exception" : {
           "type" : "object",
           "enabled" : false
diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java
index 73f0e820720..d3f46d3d452 100644
--- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java
+++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java
@@ -31,6 +31,9 @@ import org.elasticsearch.index.IndexNotFoundException;
 import org.elasticsearch.index.get.GetResult;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.xpack.core.security.authc.Authentication;
+import org.elasticsearch.xpack.core.security.authc.AuthenticationField;
+import org.elasticsearch.xpack.core.security.user.User;
 import org.elasticsearch.xpack.core.watcher.actions.Action;
 import org.elasticsearch.xpack.core.watcher.actions.ActionStatus;
 import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper;
@@ -85,6 +88,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import static java.util.Arrays.asList;
 import static java.util.Collections.singletonMap;
 import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
+import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.greaterThan;
 import static org.hamcrest.Matchers.hasSize;
 import static org.hamcrest.Matchers.instanceOf;
@@ -1072,6 +1076,33 @@ public class ExecutionServiceTests extends ESTestCase {
         assertThat(watchRecord.state(), is(ExecutionState.EXECUTED));
     }
 
+    public void testLoadingWatchExecutionUser() throws Exception {
+        DateTime now = now(UTC);
+        Watch watch = mock(Watch.class);
+        WatchStatus status = mock(WatchStatus.class);
+        ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now);
+
+        // Should be null
+        TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5));
+        context.ensureWatchExists(() -> watch);
+        assertNull(context.getUser());
+
+        // Should still be null, header is not yet set
+        when(watch.status()).thenReturn(status);
+        context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5));
+        context.ensureWatchExists(() -> watch);
+        assertNull(context.getUser());
+
+        Authentication authentication = new Authentication(new User("joe", "admin"),
+            new Authentication.RealmRef("native_realm", "native", "node1"), null);
+
+        // Should no longer be null now that the proper header is set
+        when(status.getHeaders()).thenReturn(Collections.singletonMap(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()));
+        context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5));
+        context.ensureWatchExists(() -> watch);
+        assertThat(context.getUser(), equalTo("joe"));
+    }
+
     private WatchExecutionContext createMockWatchExecutionContext(String watchId, DateTime executionTime) {
         WatchExecutionContext ctx = mock(WatchExecutionContext.class);
         when(ctx.id()).thenReturn(new Wid(watchId, executionTime));
diff --git a/x-pack/qa/smoke-test-watcher-with-security/roles.yml b/x-pack/qa/smoke-test-watcher-with-security/roles.yml
index bebfa883fcb..b52fe6c5c59 100644
--- a/x-pack/qa/smoke-test-watcher-with-security/roles.yml
+++ b/x-pack/qa/smoke-test-watcher-with-security/roles.yml
@@ -21,6 +21,7 @@ watcher_manager:
   run_as:
     - powerless_user
     - watcher_manager
+    - x_pack_rest_user
 
 watcher_monitor:
   cluster:
diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/resources/rest-api-spec/test/watcher/watcher_and_security/20_test_run_as_execute_watch.yml b/x-pack/qa/smoke-test-watcher-with-security/src/test/resources/rest-api-spec/test/watcher/watcher_and_security/20_test_run_as_execute_watch.yml
index 9bc7724b2c0..7a0634f5187 100644
--- a/x-pack/qa/smoke-test-watcher-with-security/src/test/resources/rest-api-spec/test/watcher/watcher_and_security/20_test_run_as_execute_watch.yml
+++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/resources/rest-api-spec/test/watcher/watcher_and_security/20_test_run_as_execute_watch.yml
@@ -74,10 +74,63 @@ teardown:
         id: "my_watch"
   - match: { watch_record.watch_id: "my_watch" }
   - match: { watch_record.state: "executed" }
+  - match: { watch_record.user: "watcher_manager" }
 
 
 
 
+---
+"Test watch is runas user properly recorded":
+  - do:
+      xpack.watcher.put_watch:
+        id: "my_watch"
+        body:  >
+          {
+            "trigger": {
+              "schedule" : { "cron" : "0 0 0 1 * ? 2099" }
+            },
+            "input": {
+              "search" : {
+                "request" : {
+                  "indices" : [ "my_test_index" ],
+                  "body" :{
+                    "query" : { "match_all": {} }
+                  }
+                }
+              }
+            },
+            "condition" : {
+              "compare" : {
+                "ctx.payload.hits.total" : {
+                  "gte" : 1
+                }
+              }
+            },
+            "actions": {
+              "logging": {
+                "logging": {
+                  "text": "Successfully ran my_watch to test for search input"
+                }
+              }
+            }
+          }
+  - match: { _id: "my_watch" }
+
+  - do:
+      xpack.watcher.get_watch:
+        id: "my_watch"
+  - match: { _id: "my_watch" }
+  - is_false: watch.status.headers
+
+  - do:
+      headers: { es-security-runas-user: x_pack_rest_user }
+      xpack.watcher.execute_watch:
+        id: "my_watch"
+  - match: { watch_record.watch_id: "my_watch" }
+  - match: { watch_record.state: "executed" }
+  - match: { watch_record.user: "x_pack_rest_user" }
+
+
 ---
 "Test watch search input does not work against index user is not allowed to read":
 
@@ -130,6 +183,7 @@ teardown:
   - match: { watch_record.watch_id: "my_watch" }
   # because we are not allowed to read the index, there wont be any data
   - match: { watch_record.state: "execution_not_needed" }
+  - match: { watch_record.user: "watcher_manager" }
 
 
 ---
@@ -272,6 +326,7 @@ teardown:
         id: "my_watch"
   - match: { watch_record.watch_id: "my_watch" }
   - match: { watch_record.state: "executed" }
+  - match: { watch_record.user: "watcher_manager" }
 
   - do:
       get:
@@ -320,6 +375,7 @@ teardown:
         id: "my_watch"
   - match: { watch_record.watch_id: "my_watch" }
   - match: { watch_record.state: "executed" }
+  - match: { watch_record.user: "watcher_manager" }
 
   - do:
       get:

From cccc3f7a641f4e9cbe73adf968ffe1419c715a3f Mon Sep 17 00:00:00 2001
From: debadair 
Date: Mon, 16 Jul 2018 12:47:57 -0700
Subject: [PATCH 041/260] Tweaked Elasticsearch Service links for SEO

---
 docs/reference/setup/install.asciidoc | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc
index 4433ffb8c38..c0ebfb60fa7 100644
--- a/docs/reference/setup/install.asciidoc
+++ b/docs/reference/setup/install.asciidoc
@@ -3,10 +3,12 @@
 
 [float]
 === Hosted Elasticsearch
-Elasticsearch can be run on your own hardware or using our hosted
-Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
-available on AWS and GCP. You can
-https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
+
+You can run Elasticsearch on your own hardware, or use our
+https://www.elastic.co/cloud/elasticsearch-service[hosted Elasticsearch Service]
+on Elastic Cloud. The Elasticsearch Service is available on both AWS and GCP.
+https://www.elastic.co/cloud/elasticsearch-service/signup[Try out the
+Elasticsearch Service for free].
 
 [float]
 === Installing Elasticsearch Yourself

From ff8260c234a6f522941528e82a11fcde6f181ce8 Mon Sep 17 00:00:00 2001
From: debadair 
Date: Mon, 16 Jul 2018 12:58:08 -0700
Subject: [PATCH 042/260] Tweaked Elasticsearch Service links for SEO

---
 docs/reference/getting-started.asciidoc | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc
index ff00c310a43..e2dae285688 100755
--- a/docs/reference/getting-started.asciidoc
+++ b/docs/reference/getting-started.asciidoc
@@ -106,10 +106,11 @@ With that out of the way, let's get started with the fun part...
 
 [TIP]
 ==============
-You can skip installation completely by using our hosted
-Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
-available on AWS and GCP. You can
-https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
+You can skip having to install Elasticsearch by using our
+https://www.elastic.co/cloud/elasticsearch-service[hosted Elasticsearch Service]
+on Elastic Cloud. The Elasticsearch Service is available on both AWS and GCP.
+https://www.elastic.co/cloud/elasticsearch-service/signup[Try out the
+Elasticsearch Service for free].
 ==============
 
 Elasticsearch requires at least Java 8. Specifically as of this writing, it is recommended that you use the Oracle JDK version {jdk}. Java installation varies from platform to platform so we won't go into those details here. Oracle's recommended installation documentation can be found on http://docs.oracle.com/javase/8/docs/technotes/guides/install/install_overview.html[Oracle's website]. Suffice to say, before you install Elasticsearch, please check your Java version first by running (and then install/upgrade accordingly if needed):

From 69c85331b0f574323994dc1ccb4f70ab0e122a4c Mon Sep 17 00:00:00 2001
From: Andy Bristol 
Date: Mon, 16 Jul 2018 13:02:53 -0700
Subject: [PATCH 043/260] [test] turn on host io cache for opensuse (#32053)

The hope is that this will resolve the problems with very slow io we're
seeing on this box in #30295
---
 Vagrantfile | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/Vagrantfile b/Vagrantfile
index 7322399fed5..de344e18183 100644
--- a/Vagrantfile
+++ b/Vagrantfile
@@ -115,6 +115,11 @@ Vagrant.configure(2) do |config|
   'opensuse-42'.tap do |box|
     config.vm.define box, define_opts do |config|
       config.vm.box = 'elastic/opensuse-42-x86_64'
+
+      # https://github.com/elastic/elasticsearch/issues/30295
+      config.vm.provider 'virtualbox' do |vbox|
+        vbox.customize ['storagectl', :id, '--name', 'SATA Controller', '--hostiocache', 'on']
+      end
       suse_common config, box
     end
   end

From 5b65e2ccbf66370524b66a11305ebd4555cad27b Mon Sep 17 00:00:00 2001
From: ahmedakef 
Date: Mon, 16 Jul 2018 22:00:57 +0200
Subject: [PATCH 044/260] DOCS: put LIMIT 10 to the SQL query (#32065)

Provides a more precise equivalent SQL query for the aggregation example in the getting started guide.
---
 docs/reference/getting-started.asciidoc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc
index e2dae285688..b89021e1cfe 100755
--- a/docs/reference/getting-started.asciidoc
+++ b/docs/reference/getting-started.asciidoc
@@ -1122,7 +1122,7 @@ In SQL, the above aggregation is similar in concept to:
 
 [source,sh]
 --------------------------------------------------
-SELECT state, COUNT(*) FROM bank GROUP BY state ORDER BY COUNT(*) DESC
+SELECT state, COUNT(*) FROM bank GROUP BY state ORDER BY COUNT(*) DESC LIMIT 10;
 --------------------------------------------------
 
 And the response (partially shown):

From 20ea72e2f26ed0c4d21b6865820298a26091daa8 Mon Sep 17 00:00:00 2001
From: Costin Leau 
Date: Mon, 16 Jul 2018 23:42:44 +0300
Subject: [PATCH 045/260] SQL: allow LEFT and RIGHT as function names (#32066)

Due to the way ANTLR works, any declared tokens need to be accounted for
manually inside function names (otherwise a different rule gets applied).

Fix #32046
---
 x-pack/plugin/sql/src/main/antlr/SqlBase.g4   |    7 +-
 .../xpack/sql/parser/ExpressionBuilder.java   |    3 +-
 .../xpack/sql/parser/SqlBaseBaseListener.java |   12 +
 .../xpack/sql/parser/SqlBaseBaseVisitor.java  |    7 +
 .../xpack/sql/parser/SqlBaseListener.java     |   10 +
 .../xpack/sql/parser/SqlBaseParser.java       | 1706 +++++++++--------
 .../xpack/sql/parser/SqlBaseVisitor.java      |    6 +
 .../xpack/sql/parser/ExpressionTests.java     |    8 +
 .../xpack/sql/parser/SqlParserTests.java      |    6 +
 9 files changed, 961 insertions(+), 804 deletions(-)

diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
index 2c3288babd6..490a5486b42 100644
--- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
+++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
@@ -243,7 +243,12 @@ functionExpression
     ;
     
 functionTemplate
-    : identifier '(' (setQuantifier? expression (',' expression)*)? ')'
+    : functionName '(' (setQuantifier? expression (',' expression)*)? ')'
+    ;
+functionName
+    : LEFT 
+    | RIGHT 
+    | identifier
     ;
     
 constant
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
index 66ec98ea53c..e202803b261 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
@@ -396,8 +396,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
     @Override
     public Function visitFunctionExpression(FunctionExpressionContext ctx) {
         FunctionTemplateContext template = ctx.functionTemplate();
-
-        String name = visitIdentifier(template.identifier());
+        String name = template.functionName().getText();
         boolean isDistinct = template.setQuantifier() != null && template.setQuantifier().DISTINCT() != null;
         UnresolvedFunction.ResolutionType resolutionType =
                 isDistinct ? UnresolvedFunction.ResolutionType.DISTINCT : UnresolvedFunction.ResolutionType.STANDARD;
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
index b353bcf6521..72c417992e3 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
@@ -803,6 +803,18 @@ class SqlBaseBaseListener implements SqlBaseListener {
    * 

The default implementation does nothing.

*/ @Override public void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterFunctionName(SqlBaseParser.FunctionNameContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitFunctionName(SqlBaseParser.FunctionNameContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java index d40ae6daa6e..fd35ec421f6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java @@ -473,6 +473,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitFunctionName(SqlBaseParser.FunctionNameContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java index 70fe777384c..18b2a21c02e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java @@ -745,6 +745,16 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#functionName}. + * @param ctx the parse tree + */ + void enterFunctionName(SqlBaseParser.FunctionNameContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#functionName}. + * @param ctx the parse tree + */ + void exitFunctionName(SqlBaseParser.FunctionNameContext ctx); /** * Enter a parse tree produced by the {@code nullLiteral} * labeled alternative in {@link SqlBaseParser#constant}. diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 32a1b062fee..c91758dadbc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -44,10 +44,11 @@ class SqlBaseParser extends Parser { RULE_predicate = 24, RULE_pattern = 25, RULE_patternEscape = 26, RULE_valueExpression = 27, RULE_primaryExpression = 28, RULE_castExpression = 29, RULE_castTemplate = 30, RULE_extractExpression = 31, RULE_extractTemplate = 32, RULE_functionExpression = 33, - RULE_functionTemplate = 34, RULE_constant = 35, RULE_comparisonOperator = 36, - RULE_booleanValue = 37, RULE_dataType = 38, RULE_qualifiedName = 39, RULE_identifier = 40, - RULE_tableIdentifier = 41, RULE_quoteIdentifier = 42, RULE_unquoteIdentifier = 43, - RULE_number = 44, RULE_string = 45, RULE_nonReserved = 46; + RULE_functionTemplate = 34, RULE_functionName = 35, RULE_constant = 36, + RULE_comparisonOperator = 37, RULE_booleanValue = 38, RULE_dataType = 39, + RULE_qualifiedName = 40, RULE_identifier = 41, RULE_tableIdentifier = 42, + RULE_quoteIdentifier = 43, RULE_unquoteIdentifier = 44, RULE_number = 45, + RULE_string = 46, RULE_nonReserved = 47; public static final String[] ruleNames = { "singleStatement", "singleExpression", "statement", "query", "queryNoWith", "limitClause", "queryTerm", "orderBy", "querySpecification", "fromClause", @@ -56,9 +57,9 @@ class SqlBaseParser extends Parser { "relationPrimary", "expression", "booleanExpression", "predicated", "predicate", "pattern", "patternEscape", "valueExpression", "primaryExpression", "castExpression", "castTemplate", "extractExpression", "extractTemplate", "functionExpression", - "functionTemplate", "constant", "comparisonOperator", "booleanValue", - "dataType", "qualifiedName", "identifier", "tableIdentifier", "quoteIdentifier", - "unquoteIdentifier", "number", "string", "nonReserved" + "functionTemplate", "functionName", "constant", "comparisonOperator", + "booleanValue", "dataType", "qualifiedName", "identifier", "tableIdentifier", + "quoteIdentifier", "unquoteIdentifier", "number", "string", "nonReserved" }; private static final String[] _LITERAL_NAMES = { @@ -172,9 +173,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(94); + setState(96); statement(); - setState(95); + setState(97); match(EOF); } } @@ -219,9 +220,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(97); + setState(99); expression(); - setState(98); + setState(100); match(EOF); } } @@ -607,14 +608,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 4, RULE_statement); int _la; try { - setState(209); + setState(211); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: _localctx = new StatementDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(100); + setState(102); query(); } break; @@ -622,27 +623,27 @@ class SqlBaseParser extends Parser { _localctx = new ExplainContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(101); + setState(103); match(EXPLAIN); - setState(115); + setState(117); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: { - setState(102); + setState(104); match(T__0); - setState(111); + setState(113); _errHandler.sync(this); _la = _input.LA(1); while (((((_la - 28)) & ~0x3f) == 0 && ((1L << (_la - 28)) & ((1L << (FORMAT - 28)) | (1L << (PLAN - 28)) | (1L << (VERIFY - 28)))) != 0)) { { - setState(109); + setState(111); switch (_input.LA(1)) { case PLAN: { - setState(103); + setState(105); match(PLAN); - setState(104); + setState(106); ((ExplainContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ALL) | (1L << ANALYZED) | (1L << EXECUTABLE) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED))) != 0)) ) { @@ -654,9 +655,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(105); + setState(107); match(FORMAT); - setState(106); + setState(108); ((ExplainContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -668,9 +669,9 @@ class SqlBaseParser extends Parser { break; case VERIFY: { - setState(107); + setState(109); match(VERIFY); - setState(108); + setState(110); ((ExplainContext)_localctx).verify = booleanValue(); } break; @@ -678,16 +679,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(113); + setState(115); _errHandler.sync(this); _la = _input.LA(1); } - setState(114); + setState(116); match(T__1); } break; } - setState(117); + setState(119); statement(); } break; @@ -695,27 +696,27 @@ class SqlBaseParser extends Parser { _localctx = new DebugContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(118); + setState(120); match(DEBUG); - setState(130); + setState(132); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(119); + setState(121); match(T__0); - setState(126); + setState(128); _errHandler.sync(this); _la = _input.LA(1); while (_la==FORMAT || _la==PLAN) { { - setState(124); + setState(126); switch (_input.LA(1)) { case PLAN: { - setState(120); + setState(122); match(PLAN); - setState(121); + setState(123); ((DebugContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !(_la==ANALYZED || _la==OPTIMIZED) ) { @@ -727,9 +728,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(122); + setState(124); match(FORMAT); - setState(123); + setState(125); ((DebugContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -743,16 +744,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(128); + setState(130); _errHandler.sync(this); _la = _input.LA(1); } - setState(129); + setState(131); match(T__1); } break; } - setState(132); + setState(134); statement(); } break; @@ -760,24 +761,24 @@ class SqlBaseParser extends Parser { _localctx = new ShowTablesContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(133); + setState(135); match(SHOW); - setState(134); + setState(136); match(TABLES); - setState(139); + setState(141); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(136); + setState(138); _la = _input.LA(1); if (_la==LIKE) { { - setState(135); + setState(137); match(LIKE); } } - setState(138); + setState(140); pattern(); } } @@ -788,28 +789,13 @@ class SqlBaseParser extends Parser { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(141); - match(SHOW); - setState(142); - match(COLUMNS); setState(143); - _la = _input.LA(1); - if ( !(_la==FROM || _la==IN) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } + match(SHOW); setState(144); - tableIdentifier(); - } - break; - case 6: - _localctx = new ShowColumnsContext(_localctx); - enterOuterAlt(_localctx, 6); - { + match(COLUMNS); setState(145); _la = _input.LA(1); - if ( !(_la==DESC || _la==DESCRIBE) ) { + if ( !(_la==FROM || _la==IN) ) { _errHandler.recoverInline(this); } else { consume(); @@ -818,28 +804,43 @@ class SqlBaseParser extends Parser { tableIdentifier(); } break; + case 6: + _localctx = new ShowColumnsContext(_localctx); + enterOuterAlt(_localctx, 6); + { + setState(147); + _la = _input.LA(1); + if ( !(_la==DESC || _la==DESCRIBE) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(148); + tableIdentifier(); + } + break; case 7: _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(147); + setState(149); match(SHOW); - setState(148); + setState(150); match(FUNCTIONS); - setState(153); + setState(155); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(150); + setState(152); _la = _input.LA(1); if (_la==LIKE) { { - setState(149); + setState(151); match(LIKE); } } - setState(152); + setState(154); pattern(); } } @@ -850,9 +851,9 @@ class SqlBaseParser extends Parser { _localctx = new ShowSchemasContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(155); + setState(157); match(SHOW); - setState(156); + setState(158); match(SCHEMAS); } break; @@ -860,9 +861,9 @@ class SqlBaseParser extends Parser { _localctx = new SysCatalogsContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(157); + setState(159); match(SYS); - setState(158); + setState(160); match(CATALOGS); } break; @@ -870,69 +871,69 @@ class SqlBaseParser extends Parser { _localctx = new SysTablesContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(159); + setState(161); match(SYS); - setState(160); + setState(162); match(TABLES); - setState(166); + setState(168); _la = _input.LA(1); if (_la==CATALOG) { { - setState(161); - match(CATALOG); setState(163); + match(CATALOG); + setState(165); _la = _input.LA(1); if (_la==LIKE) { { - setState(162); + setState(164); match(LIKE); } } - setState(165); + setState(167); ((SysTablesContext)_localctx).clusterPattern = pattern(); } } - setState(172); + setState(174); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(169); + setState(171); _la = _input.LA(1); if (_la==LIKE) { { - setState(168); + setState(170); match(LIKE); } } - setState(171); + setState(173); ((SysTablesContext)_localctx).tablePattern = pattern(); } } - setState(183); + setState(185); _la = _input.LA(1); if (_la==TYPE) { { - setState(174); + setState(176); match(TYPE); - setState(175); + setState(177); string(); - setState(180); + setState(182); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(176); + setState(178); match(T__2); - setState(177); + setState(179); string(); } } - setState(182); + setState(184); _errHandler.sync(this); _la = _input.LA(1); } @@ -945,55 +946,55 @@ class SqlBaseParser extends Parser { _localctx = new SysColumnsContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(185); + setState(187); match(SYS); - setState(186); + setState(188); match(COLUMNS); - setState(189); + setState(191); _la = _input.LA(1); if (_la==CATALOG) { { - setState(187); + setState(189); match(CATALOG); - setState(188); + setState(190); ((SysColumnsContext)_localctx).cluster = string(); } } - setState(196); + setState(198); _la = _input.LA(1); if (_la==TABLE) { { - setState(191); - match(TABLE); setState(193); + match(TABLE); + setState(195); _la = _input.LA(1); if (_la==LIKE) { { - setState(192); + setState(194); match(LIKE); } } - setState(195); + setState(197); ((SysColumnsContext)_localctx).indexPattern = pattern(); } } - setState(202); + setState(204); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(199); + setState(201); _la = _input.LA(1); if (_la==LIKE) { { - setState(198); + setState(200); match(LIKE); } } - setState(201); + setState(203); ((SysColumnsContext)_localctx).columnPattern = pattern(); } } @@ -1004,9 +1005,9 @@ class SqlBaseParser extends Parser { _localctx = new SysTypesContext(_localctx); enterOuterAlt(_localctx, 12); { - setState(204); + setState(206); match(SYS); - setState(205); + setState(207); match(TYPES); } break; @@ -1014,11 +1015,11 @@ class SqlBaseParser extends Parser { _localctx = new SysTableTypesContext(_localctx); enterOuterAlt(_localctx, 13); { - setState(206); - match(SYS); - setState(207); - match(TABLE); setState(208); + match(SYS); + setState(209); + match(TABLE); + setState(210); match(TYPES); } break; @@ -1072,34 +1073,34 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(220); + setState(222); _la = _input.LA(1); if (_la==WITH) { { - setState(211); + setState(213); match(WITH); - setState(212); + setState(214); namedQuery(); - setState(217); + setState(219); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(213); + setState(215); match(T__2); - setState(214); + setState(216); namedQuery(); } } - setState(219); + setState(221); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(222); + setState(224); queryNoWith(); } } @@ -1155,42 +1156,42 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(224); + setState(226); queryTerm(); - setState(235); + setState(237); _la = _input.LA(1); if (_la==ORDER) { { - setState(225); - match(ORDER); - setState(226); - match(BY); setState(227); + match(ORDER); + setState(228); + match(BY); + setState(229); orderBy(); - setState(232); + setState(234); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(228); + setState(230); match(T__2); - setState(229); + setState(231); orderBy(); } } - setState(234); + setState(236); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(238); + setState(240); _la = _input.LA(1); if (_la==LIMIT || _la==LIMIT_ESC) { { - setState(237); + setState(239); limitClause(); } } @@ -1239,14 +1240,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 10, RULE_limitClause); int _la; try { - setState(245); + setState(247); switch (_input.LA(1)) { case LIMIT: enterOuterAlt(_localctx, 1); { - setState(240); + setState(242); match(LIMIT); - setState(241); + setState(243); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1259,9 +1260,9 @@ class SqlBaseParser extends Parser { case LIMIT_ESC: enterOuterAlt(_localctx, 2); { - setState(242); + setState(244); match(LIMIT_ESC); - setState(243); + setState(245); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1269,7 +1270,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(244); + setState(246); match(ESC_END); } break; @@ -1342,13 +1343,13 @@ class SqlBaseParser extends Parser { QueryTermContext _localctx = new QueryTermContext(_ctx, getState()); enterRule(_localctx, 12, RULE_queryTerm); try { - setState(252); + setState(254); switch (_input.LA(1)) { case SELECT: _localctx = new QueryPrimaryDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(247); + setState(249); querySpecification(); } break; @@ -1356,11 +1357,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(248); - match(T__0); - setState(249); - queryNoWith(); setState(250); + match(T__0); + setState(251); + queryNoWith(); + setState(252); match(T__1); } break; @@ -1412,13 +1413,13 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(254); - expression(); setState(256); + expression(); + setState(258); _la = _input.LA(1); if (_la==ASC || _la==DESC) { { - setState(255); + setState(257); ((OrderByContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -1497,75 +1498,75 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(258); - match(SELECT); setState(260); + match(SELECT); + setState(262); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(259); + setState(261); setQuantifier(); } } - setState(262); + setState(264); selectItem(); - setState(267); + setState(269); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(263); + setState(265); match(T__2); - setState(264); + setState(266); selectItem(); } } - setState(269); + setState(271); _errHandler.sync(this); _la = _input.LA(1); } - setState(271); + setState(273); _la = _input.LA(1); if (_la==FROM) { { - setState(270); + setState(272); fromClause(); } } - setState(275); + setState(277); _la = _input.LA(1); if (_la==WHERE) { { - setState(273); + setState(275); match(WHERE); - setState(274); + setState(276); ((QuerySpecificationContext)_localctx).where = booleanExpression(0); } } - setState(280); + setState(282); _la = _input.LA(1); if (_la==GROUP) { { - setState(277); - match(GROUP); - setState(278); - match(BY); setState(279); + match(GROUP); + setState(280); + match(BY); + setState(281); groupBy(); } } - setState(284); + setState(286); _la = _input.LA(1); if (_la==HAVING) { { - setState(282); + setState(284); match(HAVING); - setState(283); + setState(285); ((QuerySpecificationContext)_localctx).having = booleanExpression(0); } } @@ -1617,23 +1618,23 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(286); + setState(288); match(FROM); - setState(287); + setState(289); relation(); - setState(292); + setState(294); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(288); + setState(290); match(T__2); - setState(289); + setState(291); relation(); } } - setState(294); + setState(296); _errHandler.sync(this); _la = _input.LA(1); } @@ -1686,30 +1687,30 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(296); + setState(298); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(295); + setState(297); setQuantifier(); } } - setState(298); + setState(300); groupingElement(); - setState(303); + setState(305); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(299); + setState(301); match(T__2); - setState(300); + setState(302); groupingElement(); } } - setState(305); + setState(307); _errHandler.sync(this); _la = _input.LA(1); } @@ -1764,7 +1765,7 @@ class SqlBaseParser extends Parser { _localctx = new SingleGroupingSetContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(306); + setState(308); groupingExpressions(); } } @@ -1810,47 +1811,47 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 24, RULE_groupingExpressions); int _la; try { - setState(321); + setState(323); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(308); + setState(310); match(T__0); - setState(317); + setState(319); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << LEFT) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RIGHT) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(309); + setState(311); expression(); - setState(314); + setState(316); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(310); + setState(312); match(T__2); - setState(311); + setState(313); expression(); } } - setState(316); + setState(318); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(319); + setState(321); match(T__1); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(320); + setState(322); expression(); } break; @@ -1901,15 +1902,15 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(323); - ((NamedQueryContext)_localctx).name = identifier(); - setState(324); - match(AS); setState(325); - match(T__0); + ((NamedQueryContext)_localctx).name = identifier(); setState(326); - queryNoWith(); + match(AS); setState(327); + match(T__0); + setState(328); + queryNoWith(); + setState(329); match(T__1); } } @@ -1953,7 +1954,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(329); + setState(331); _la = _input.LA(1); if ( !(_la==ALL || _la==DISTINCT) ) { _errHandler.recoverInline(this); @@ -2016,22 +2017,22 @@ class SqlBaseParser extends Parser { _localctx = new SelectExpressionContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(331); + setState(333); expression(); - setState(336); + setState(338); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(333); + setState(335); _la = _input.LA(1); if (_la==AS) { { - setState(332); + setState(334); match(AS); } } - setState(335); + setState(337); identifier(); } } @@ -2085,19 +2086,19 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(338); + setState(340); relationPrimary(); - setState(342); + setState(344); _errHandler.sync(this); _la = _input.LA(1); while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FULL) | (1L << INNER) | (1L << JOIN) | (1L << LEFT) | (1L << NATURAL) | (1L << RIGHT))) != 0)) { { { - setState(339); + setState(341); joinRelation(); } } - setState(344); + setState(346); _errHandler.sync(this); _la = _input.LA(1); } @@ -2151,7 +2152,7 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 34, RULE_joinRelation); int _la; try { - setState(356); + setState(358); switch (_input.LA(1)) { case FULL: case INNER: @@ -2161,18 +2162,18 @@ class SqlBaseParser extends Parser { enterOuterAlt(_localctx, 1); { { - setState(345); + setState(347); joinType(); } - setState(346); + setState(348); match(JOIN); - setState(347); - ((JoinRelationContext)_localctx).right = relationPrimary(); setState(349); + ((JoinRelationContext)_localctx).right = relationPrimary(); + setState(351); _la = _input.LA(1); if (_la==ON || _la==USING) { { - setState(348); + setState(350); joinCriteria(); } } @@ -2182,13 +2183,13 @@ class SqlBaseParser extends Parser { case NATURAL: enterOuterAlt(_localctx, 2); { - setState(351); - match(NATURAL); - setState(352); - joinType(); setState(353); - match(JOIN); + match(NATURAL); setState(354); + joinType(); + setState(355); + match(JOIN); + setState(356); ((JoinRelationContext)_localctx).right = relationPrimary(); } break; @@ -2237,17 +2238,17 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 36, RULE_joinType); int _la; try { - setState(373); + setState(375); switch (_input.LA(1)) { case INNER: case JOIN: enterOuterAlt(_localctx, 1); { - setState(359); + setState(361); _la = _input.LA(1); if (_la==INNER) { { - setState(358); + setState(360); match(INNER); } } @@ -2257,13 +2258,13 @@ class SqlBaseParser extends Parser { case LEFT: enterOuterAlt(_localctx, 2); { - setState(361); - match(LEFT); setState(363); + match(LEFT); + setState(365); _la = _input.LA(1); if (_la==OUTER) { { - setState(362); + setState(364); match(OUTER); } } @@ -2273,13 +2274,13 @@ class SqlBaseParser extends Parser { case RIGHT: enterOuterAlt(_localctx, 3); { - setState(365); - match(RIGHT); setState(367); + match(RIGHT); + setState(369); _la = _input.LA(1); if (_la==OUTER) { { - setState(366); + setState(368); match(OUTER); } } @@ -2289,13 +2290,13 @@ class SqlBaseParser extends Parser { case FULL: enterOuterAlt(_localctx, 4); { - setState(369); - match(FULL); setState(371); + match(FULL); + setState(373); _la = _input.LA(1); if (_la==OUTER) { { - setState(370); + setState(372); match(OUTER); } } @@ -2353,43 +2354,43 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 38, RULE_joinCriteria); int _la; try { - setState(389); + setState(391); switch (_input.LA(1)) { case ON: enterOuterAlt(_localctx, 1); { - setState(375); + setState(377); match(ON); - setState(376); + setState(378); booleanExpression(0); } break; case USING: enterOuterAlt(_localctx, 2); { - setState(377); - match(USING); - setState(378); - match(T__0); setState(379); + match(USING); + setState(380); + match(T__0); + setState(381); identifier(); - setState(384); + setState(386); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(380); + setState(382); match(T__2); - setState(381); + setState(383); identifier(); } } - setState(386); + setState(388); _errHandler.sync(this); _la = _input.LA(1); } - setState(387); + setState(389); match(T__1); } break; @@ -2494,29 +2495,29 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 40, RULE_relationPrimary); int _la; try { - setState(416); + setState(418); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,60,_ctx) ) { case 1: _localctx = new TableNameContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(391); + setState(393); tableIdentifier(); - setState(396); + setState(398); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(393); + setState(395); _la = _input.LA(1); if (_la==AS) { { - setState(392); + setState(394); match(AS); } } - setState(395); + setState(397); qualifiedName(); } } @@ -2527,26 +2528,26 @@ class SqlBaseParser extends Parser { _localctx = new AliasedQueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(398); - match(T__0); - setState(399); - queryNoWith(); setState(400); + match(T__0); + setState(401); + queryNoWith(); + setState(402); match(T__1); - setState(405); + setState(407); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(402); + setState(404); _la = _input.LA(1); if (_la==AS) { { - setState(401); + setState(403); match(AS); } } - setState(404); + setState(406); qualifiedName(); } } @@ -2557,26 +2558,26 @@ class SqlBaseParser extends Parser { _localctx = new AliasedRelationContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(407); - match(T__0); - setState(408); - relation(); setState(409); + match(T__0); + setState(410); + relation(); + setState(411); match(T__1); - setState(414); + setState(416); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(411); + setState(413); _la = _input.LA(1); if (_la==AS) { { - setState(410); + setState(412); match(AS); } } - setState(413); + setState(415); qualifiedName(); } } @@ -2625,7 +2626,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(418); + setState(420); booleanExpression(0); } } @@ -2834,7 +2835,7 @@ class SqlBaseParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(469); + setState(471); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { case 1: @@ -2843,9 +2844,9 @@ class SqlBaseParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(421); + setState(423); match(NOT); - setState(422); + setState(424); booleanExpression(8); } break; @@ -2854,13 +2855,13 @@ class SqlBaseParser extends Parser { _localctx = new ExistsContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(423); - match(EXISTS); - setState(424); - match(T__0); setState(425); - query(); + match(EXISTS); setState(426); + match(T__0); + setState(427); + query(); + setState(428); match(T__1); } break; @@ -2869,29 +2870,29 @@ class SqlBaseParser extends Parser { _localctx = new StringQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(428); - match(QUERY); - setState(429); - match(T__0); setState(430); + match(QUERY); + setState(431); + match(T__0); + setState(432); ((StringQueryContext)_localctx).queryString = string(); - setState(435); + setState(437); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(431); + setState(433); match(T__2); - setState(432); + setState(434); ((StringQueryContext)_localctx).options = string(); } } - setState(437); + setState(439); _errHandler.sync(this); _la = _input.LA(1); } - setState(438); + setState(440); match(T__1); } break; @@ -2900,33 +2901,33 @@ class SqlBaseParser extends Parser { _localctx = new MatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(440); - match(MATCH); - setState(441); - match(T__0); setState(442); - ((MatchQueryContext)_localctx).singleField = qualifiedName(); + match(MATCH); setState(443); - match(T__2); + match(T__0); setState(444); + ((MatchQueryContext)_localctx).singleField = qualifiedName(); + setState(445); + match(T__2); + setState(446); ((MatchQueryContext)_localctx).queryString = string(); - setState(449); + setState(451); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(445); + setState(447); match(T__2); - setState(446); + setState(448); ((MatchQueryContext)_localctx).options = string(); } } - setState(451); + setState(453); _errHandler.sync(this); _la = _input.LA(1); } - setState(452); + setState(454); match(T__1); } break; @@ -2935,33 +2936,33 @@ class SqlBaseParser extends Parser { _localctx = new MultiMatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(454); - match(MATCH); - setState(455); - match(T__0); setState(456); - ((MultiMatchQueryContext)_localctx).multiFields = string(); + match(MATCH); setState(457); - match(T__2); + match(T__0); setState(458); + ((MultiMatchQueryContext)_localctx).multiFields = string(); + setState(459); + match(T__2); + setState(460); ((MultiMatchQueryContext)_localctx).queryString = string(); - setState(463); + setState(465); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(459); + setState(461); match(T__2); - setState(460); + setState(462); ((MultiMatchQueryContext)_localctx).options = string(); } } - setState(465); + setState(467); _errHandler.sync(this); _la = _input.LA(1); } - setState(466); + setState(468); match(T__1); } break; @@ -2970,13 +2971,13 @@ class SqlBaseParser extends Parser { _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(468); + setState(470); predicated(); } break; } _ctx.stop = _input.LT(-1); - setState(479); + setState(481); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,66,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -2984,7 +2985,7 @@ class SqlBaseParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(477); + setState(479); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,65,_ctx) ) { case 1: @@ -2992,11 +2993,11 @@ class SqlBaseParser extends Parser { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(471); - if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(472); - ((LogicalBinaryContext)_localctx).operator = match(AND); setState(473); + if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); + setState(474); + ((LogicalBinaryContext)_localctx).operator = match(AND); + setState(475); ((LogicalBinaryContext)_localctx).right = booleanExpression(3); } break; @@ -3005,18 +3006,18 @@ class SqlBaseParser extends Parser { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(474); - if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(475); - ((LogicalBinaryContext)_localctx).operator = match(OR); setState(476); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); + setState(477); + ((LogicalBinaryContext)_localctx).operator = match(OR); + setState(478); ((LogicalBinaryContext)_localctx).right = booleanExpression(2); } break; } } } - setState(481); + setState(483); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,66,_ctx); } @@ -3065,14 +3066,14 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(482); - valueExpression(0); setState(484); + valueExpression(0); + setState(486); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,67,_ctx) ) { case 1: { - setState(483); + setState(485); predicate(); } break; @@ -3148,142 +3149,142 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 48, RULE_predicate); int _la; try { - setState(532); + setState(534); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,75,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(487); + setState(489); _la = _input.LA(1); if (_la==NOT) { { - setState(486); + setState(488); match(NOT); } } - setState(489); - ((PredicateContext)_localctx).kind = match(BETWEEN); - setState(490); - ((PredicateContext)_localctx).lower = valueExpression(0); setState(491); - match(AND); + ((PredicateContext)_localctx).kind = match(BETWEEN); setState(492); + ((PredicateContext)_localctx).lower = valueExpression(0); + setState(493); + match(AND); + setState(494); ((PredicateContext)_localctx).upper = valueExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(495); + setState(497); _la = _input.LA(1); if (_la==NOT) { { - setState(494); + setState(496); match(NOT); } } - setState(497); - ((PredicateContext)_localctx).kind = match(IN); - setState(498); - match(T__0); setState(499); + ((PredicateContext)_localctx).kind = match(IN); + setState(500); + match(T__0); + setState(501); expression(); - setState(504); + setState(506); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(500); + setState(502); match(T__2); - setState(501); + setState(503); expression(); } } - setState(506); + setState(508); _errHandler.sync(this); _la = _input.LA(1); } - setState(507); + setState(509); match(T__1); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(510); + setState(512); _la = _input.LA(1); if (_la==NOT) { { - setState(509); + setState(511); match(NOT); } } - setState(512); - ((PredicateContext)_localctx).kind = match(IN); - setState(513); - match(T__0); setState(514); - query(); + ((PredicateContext)_localctx).kind = match(IN); setState(515); + match(T__0); + setState(516); + query(); + setState(517); match(T__1); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(518); + setState(520); _la = _input.LA(1); if (_la==NOT) { { - setState(517); + setState(519); match(NOT); } } - setState(520); + setState(522); ((PredicateContext)_localctx).kind = match(LIKE); - setState(521); + setState(523); pattern(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(523); + setState(525); _la = _input.LA(1); if (_la==NOT) { { - setState(522); + setState(524); match(NOT); } } - setState(525); + setState(527); ((PredicateContext)_localctx).kind = match(RLIKE); - setState(526); + setState(528); ((PredicateContext)_localctx).regex = string(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(527); - match(IS); setState(529); + match(IS); + setState(531); _la = _input.LA(1); if (_la==NOT) { { - setState(528); + setState(530); match(NOT); } } - setState(531); + setState(533); ((PredicateContext)_localctx).kind = match(NULL); } break; @@ -3333,14 +3334,14 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(534); - ((PatternContext)_localctx).value = string(); setState(536); + ((PatternContext)_localctx).value = string(); + setState(538); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) { case 1: { - setState(535); + setState(537); patternEscape(); } break; @@ -3388,25 +3389,25 @@ class SqlBaseParser extends Parser { PatternEscapeContext _localctx = new PatternEscapeContext(_ctx, getState()); enterRule(_localctx, 52, RULE_patternEscape); try { - setState(544); + setState(546); switch (_input.LA(1)) { case ESCAPE: enterOuterAlt(_localctx, 1); { - setState(538); + setState(540); match(ESCAPE); - setState(539); + setState(541); ((PatternEscapeContext)_localctx).escape = string(); } break; case ESCAPE_ESC: enterOuterAlt(_localctx, 2); { - setState(540); - match(ESCAPE_ESC); - setState(541); - ((PatternEscapeContext)_localctx).escape = string(); setState(542); + match(ESCAPE_ESC); + setState(543); + ((PatternEscapeContext)_localctx).escape = string(); + setState(544); match(ESC_END); } break; @@ -3551,7 +3552,7 @@ class SqlBaseParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(550); + setState(552); switch (_input.LA(1)) { case T__0: case ANALYZE: @@ -3567,12 +3568,14 @@ class SqlBaseParser extends Parser { case FORMAT: case FUNCTIONS: case GRAPHVIZ: + case LEFT: case MAPPED: case NULL: case OPTIMIZED: case PARSED: case PHYSICAL: case PLAN: + case RIGHT: case RLIKE: case QUERY: case SCHEMAS: @@ -3603,7 +3606,7 @@ class SqlBaseParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(547); + setState(549); primaryExpression(); } break; @@ -3613,7 +3616,7 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(548); + setState(550); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3621,7 +3624,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(549); + setState(551); valueExpression(4); } break; @@ -3629,7 +3632,7 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(564); + setState(566); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,80,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -3637,7 +3640,7 @@ class SqlBaseParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(562); + setState(564); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,79,_ctx) ) { case 1: @@ -3645,9 +3648,9 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(552); + setState(554); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(553); + setState(555); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 88)) & ~0x3f) == 0 && ((1L << (_la - 88)) & ((1L << (ASTERISK - 88)) | (1L << (SLASH - 88)) | (1L << (PERCENT - 88)))) != 0)) ) { @@ -3655,7 +3658,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(554); + setState(556); ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); } break; @@ -3664,9 +3667,9 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(555); + setState(557); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(556); + setState(558); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3674,7 +3677,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(557); + setState(559); ((ArithmeticBinaryContext)_localctx).right = valueExpression(3); } break; @@ -3683,18 +3686,18 @@ class SqlBaseParser extends Parser { _localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState)); ((ComparisonContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(558); - if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(559); - comparisonOperator(); setState(560); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); + setState(561); + comparisonOperator(); + setState(562); ((ComparisonContext)_localctx).right = valueExpression(2); } break; } } } - setState(566); + setState(568); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,80,_ctx); } @@ -3901,14 +3904,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 56, RULE_primaryExpression); int _la; try { - setState(588); + setState(590); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,82,_ctx) ) { case 1: _localctx = new CastContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(567); + setState(569); castExpression(); } break; @@ -3916,7 +3919,7 @@ class SqlBaseParser extends Parser { _localctx = new ExtractContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(568); + setState(570); extractExpression(); } break; @@ -3924,7 +3927,7 @@ class SqlBaseParser extends Parser { _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(569); + setState(571); constant(); } break; @@ -3932,7 +3935,7 @@ class SqlBaseParser extends Parser { _localctx = new StarContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(570); + setState(572); match(ASTERISK); } break; @@ -3940,18 +3943,18 @@ class SqlBaseParser extends Parser { _localctx = new StarContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(574); + setState(576); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(571); + setState(573); qualifiedName(); - setState(572); + setState(574); match(DOT); } } - setState(576); + setState(578); match(ASTERISK); } break; @@ -3959,7 +3962,7 @@ class SqlBaseParser extends Parser { _localctx = new FunctionContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(577); + setState(579); functionExpression(); } break; @@ -3967,11 +3970,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryExpressionContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(578); - match(T__0); - setState(579); - query(); setState(580); + match(T__0); + setState(581); + query(); + setState(582); match(T__1); } break; @@ -3979,7 +3982,7 @@ class SqlBaseParser extends Parser { _localctx = new ColumnReferenceContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(582); + setState(584); identifier(); } break; @@ -3987,7 +3990,7 @@ class SqlBaseParser extends Parser { _localctx = new DereferenceContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(583); + setState(585); qualifiedName(); } break; @@ -3995,11 +3998,11 @@ class SqlBaseParser extends Parser { _localctx = new ParenthesizedExpressionContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(584); - match(T__0); - setState(585); - expression(); setState(586); + match(T__0); + setState(587); + expression(); + setState(588); match(T__1); } break; @@ -4045,23 +4048,23 @@ class SqlBaseParser extends Parser { CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState()); enterRule(_localctx, 58, RULE_castExpression); try { - setState(595); + setState(597); switch (_input.LA(1)) { case CAST: enterOuterAlt(_localctx, 1); { - setState(590); + setState(592); castTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(591); - match(FUNCTION_ESC); - setState(592); - castTemplate(); setState(593); + match(FUNCTION_ESC); + setState(594); + castTemplate(); + setState(595); match(ESC_END); } break; @@ -4114,17 +4117,17 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(597); - match(CAST); - setState(598); - match(T__0); setState(599); - expression(); + match(CAST); setState(600); - match(AS); + match(T__0); setState(601); - dataType(); + expression(); setState(602); + match(AS); + setState(603); + dataType(); + setState(604); match(T__1); } } @@ -4168,23 +4171,23 @@ class SqlBaseParser extends Parser { ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState()); enterRule(_localctx, 62, RULE_extractExpression); try { - setState(609); + setState(611); switch (_input.LA(1)) { case EXTRACT: enterOuterAlt(_localctx, 1); { - setState(604); + setState(606); extractTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(605); - match(FUNCTION_ESC); - setState(606); - extractTemplate(); setState(607); + match(FUNCTION_ESC); + setState(608); + extractTemplate(); + setState(609); match(ESC_END); } break; @@ -4238,17 +4241,17 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(611); - match(EXTRACT); - setState(612); - match(T__0); setState(613); - ((ExtractTemplateContext)_localctx).field = identifier(); + match(EXTRACT); setState(614); - match(FROM); + match(T__0); setState(615); - valueExpression(0); + ((ExtractTemplateContext)_localctx).field = identifier(); setState(616); + match(FROM); + setState(617); + valueExpression(0); + setState(618); match(T__1); } } @@ -4291,7 +4294,7 @@ class SqlBaseParser extends Parser { FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); enterRule(_localctx, 66, RULE_functionExpression); try { - setState(623); + setState(625); switch (_input.LA(1)) { case ANALYZE: case ANALYZED: @@ -4303,11 +4306,13 @@ class SqlBaseParser extends Parser { case FORMAT: case FUNCTIONS: case GRAPHVIZ: + case LEFT: case MAPPED: case OPTIMIZED: case PARSED: case PHYSICAL: case PLAN: + case RIGHT: case RLIKE: case QUERY: case SCHEMAS: @@ -4324,18 +4329,18 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(618); + setState(620); functionTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(619); - match(FUNCTION_ESC); - setState(620); - functionTemplate(); setState(621); + match(FUNCTION_ESC); + setState(622); + functionTemplate(); + setState(623); match(ESC_END); } break; @@ -4355,8 +4360,8 @@ class SqlBaseParser extends Parser { } public static class FunctionTemplateContext extends ParserRuleContext { - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); + public FunctionNameContext functionName() { + return getRuleContext(FunctionNameContext.class,0); } public List expression() { return getRuleContexts(ExpressionContext.class); @@ -4393,45 +4398,45 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(625); - identifier(); - setState(626); + setState(627); + functionName(); + setState(628); match(T__0); - setState(638); + setState(640); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << LEFT) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RIGHT) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(628); + setState(630); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(627); + setState(629); setQuantifier(); } } - setState(630); + setState(632); expression(); - setState(635); + setState(637); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(631); + setState(633); match(T__2); - setState(632); + setState(634); expression(); } } - setState(637); + setState(639); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(640); + setState(642); match(T__1); } } @@ -4446,6 +4451,101 @@ class SqlBaseParser extends Parser { return _localctx; } + public static class FunctionNameContext extends ParserRuleContext { + public TerminalNode LEFT() { return getToken(SqlBaseParser.LEFT, 0); } + public TerminalNode RIGHT() { return getToken(SqlBaseParser.RIGHT, 0); } + public IdentifierContext identifier() { + return getRuleContext(IdentifierContext.class,0); + } + public FunctionNameContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_functionName; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFunctionName(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFunctionName(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitFunctionName(this); + else return visitor.visitChildren(this); + } + } + + public final FunctionNameContext functionName() throws RecognitionException { + FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); + enterRule(_localctx, 70, RULE_functionName); + try { + setState(647); + switch (_input.LA(1)) { + case LEFT: + enterOuterAlt(_localctx, 1); + { + setState(644); + match(LEFT); + } + break; + case RIGHT: + enterOuterAlt(_localctx, 2); + { + setState(645); + match(RIGHT); + } + break; + case ANALYZE: + case ANALYZED: + case CATALOGS: + case COLUMNS: + case DEBUG: + case EXECUTABLE: + case EXPLAIN: + case FORMAT: + case FUNCTIONS: + case GRAPHVIZ: + case MAPPED: + case OPTIMIZED: + case PARSED: + case PHYSICAL: + case PLAN: + case RLIKE: + case QUERY: + case SCHEMAS: + case SHOW: + case SYS: + case TABLES: + case TEXT: + case TYPE: + case TYPES: + case VERIFY: + case IDENTIFIER: + case DIGIT_IDENTIFIER: + case QUOTED_IDENTIFIER: + case BACKQUOTED_IDENTIFIER: + enterOuterAlt(_localctx, 3); + { + setState(646); + identifier(); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public static class ConstantContext extends ParserRuleContext { public ConstantContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -4636,16 +4736,16 @@ class SqlBaseParser extends Parser { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_constant); + enterRule(_localctx, 72, RULE_constant); try { int _alt; - setState(667); + setState(674); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(642); + setState(649); match(NULL); } break; @@ -4654,7 +4754,7 @@ class SqlBaseParser extends Parser { _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(643); + setState(650); number(); } break; @@ -4663,7 +4763,7 @@ class SqlBaseParser extends Parser { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(644); + setState(651); booleanValue(); } break; @@ -4671,7 +4771,7 @@ class SqlBaseParser extends Parser { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(646); + setState(653); _errHandler.sync(this); _alt = 1; do { @@ -4679,7 +4779,7 @@ class SqlBaseParser extends Parser { case 1: { { - setState(645); + setState(652); match(STRING); } } @@ -4687,9 +4787,9 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(648); + setState(655); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,89,_ctx); + _alt = getInterpreter().adaptivePredict(_input,90,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; @@ -4697,7 +4797,7 @@ class SqlBaseParser extends Parser { _localctx = new ParamLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(650); + setState(657); match(PARAM); } break; @@ -4705,11 +4805,11 @@ class SqlBaseParser extends Parser { _localctx = new DateEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(651); + setState(658); match(DATE_ESC); - setState(652); + setState(659); string(); - setState(653); + setState(660); match(ESC_END); } break; @@ -4717,11 +4817,11 @@ class SqlBaseParser extends Parser { _localctx = new TimeEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(655); + setState(662); match(TIME_ESC); - setState(656); + setState(663); string(); - setState(657); + setState(664); match(ESC_END); } break; @@ -4729,11 +4829,11 @@ class SqlBaseParser extends Parser { _localctx = new TimestampEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(659); + setState(666); match(TIMESTAMP_ESC); - setState(660); + setState(667); string(); - setState(661); + setState(668); match(ESC_END); } break; @@ -4741,11 +4841,11 @@ class SqlBaseParser extends Parser { _localctx = new GuidEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(663); + setState(670); match(GUID_ESC); - setState(664); + setState(671); string(); - setState(665); + setState(672); match(ESC_END); } break; @@ -4792,12 +4892,12 @@ class SqlBaseParser extends Parser { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_comparisonOperator); + enterRule(_localctx, 74, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(669); + setState(676); _la = _input.LA(1); if ( !(((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (EQ - 80)) | (1L << (NEQ - 80)) | (1L << (LT - 80)) | (1L << (LTE - 80)) | (1L << (GT - 80)) | (1L << (GTE - 80)))) != 0)) ) { _errHandler.recoverInline(this); @@ -4841,12 +4941,12 @@ class SqlBaseParser extends Parser { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_booleanValue); + enterRule(_localctx, 76, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(671); + setState(678); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4899,12 +4999,12 @@ class SqlBaseParser extends Parser { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_dataType); + enterRule(_localctx, 78, RULE_dataType); try { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(673); + setState(680); identifier(); } } @@ -4951,30 +5051,30 @@ class SqlBaseParser extends Parser { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_qualifiedName); + enterRule(_localctx, 80, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(680); + setState(687); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,91,_ctx); + _alt = getInterpreter().adaptivePredict(_input,92,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(675); + setState(682); identifier(); - setState(676); + setState(683); match(DOT); } } } - setState(682); + setState(689); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,91,_ctx); + _alt = getInterpreter().adaptivePredict(_input,92,_ctx); } - setState(683); + setState(690); identifier(); } } @@ -5017,15 +5117,15 @@ class SqlBaseParser extends Parser { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_identifier); + enterRule(_localctx, 82, RULE_identifier); try { - setState(687); + setState(694); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(685); + setState(692); quoteIdentifier(); } break; @@ -5058,7 +5158,7 @@ class SqlBaseParser extends Parser { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(686); + setState(693); unquoteIdentifier(); } break; @@ -5108,46 +5208,46 @@ class SqlBaseParser extends Parser { public final TableIdentifierContext tableIdentifier() throws RecognitionException { TableIdentifierContext _localctx = new TableIdentifierContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_tableIdentifier); + enterRule(_localctx, 84, RULE_tableIdentifier); int _la; try { - setState(701); + setState(708); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,95,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,96,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(692); + setState(699); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(689); + setState(696); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(690); + setState(697); match(T__3); } } - setState(694); + setState(701); match(TABLE_IDENTIFIER); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(698); + setState(705); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,94,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,95,_ctx) ) { case 1: { - setState(695); + setState(702); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(696); + setState(703); match(T__3); } break; } - setState(700); + setState(707); ((TableIdentifierContext)_localctx).name = identifier(); } break; @@ -5212,15 +5312,15 @@ class SqlBaseParser extends Parser { public final QuoteIdentifierContext quoteIdentifier() throws RecognitionException { QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_quoteIdentifier); + enterRule(_localctx, 86, RULE_quoteIdentifier); try { - setState(705); + setState(712); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(703); + setState(710); match(QUOTED_IDENTIFIER); } break; @@ -5228,7 +5328,7 @@ class SqlBaseParser extends Parser { _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(704); + setState(711); match(BACKQUOTED_IDENTIFIER); } break; @@ -5298,15 +5398,15 @@ class SqlBaseParser extends Parser { public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionException { UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_unquoteIdentifier); + enterRule(_localctx, 88, RULE_unquoteIdentifier); try { - setState(710); + setState(717); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(707); + setState(714); match(IDENTIFIER); } break; @@ -5338,7 +5438,7 @@ class SqlBaseParser extends Parser { _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(708); + setState(715); nonReserved(); } break; @@ -5346,7 +5446,7 @@ class SqlBaseParser extends Parser { _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(709); + setState(716); match(DIGIT_IDENTIFIER); } break; @@ -5413,15 +5513,15 @@ class SqlBaseParser extends Parser { public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_number); + enterRule(_localctx, 90, RULE_number); try { - setState(714); + setState(721); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(712); + setState(719); match(DECIMAL_VALUE); } break; @@ -5429,7 +5529,7 @@ class SqlBaseParser extends Parser { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(713); + setState(720); match(INTEGER_VALUE); } break; @@ -5472,12 +5572,12 @@ class SqlBaseParser extends Parser { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_string); + enterRule(_localctx, 92, RULE_string); int _la; try { enterOuterAlt(_localctx, 1); { - setState(716); + setState(723); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -5544,12 +5644,12 @@ class SqlBaseParser extends Parser { public final NonReservedContext nonReserved() throws RecognitionException { NonReservedContext _localctx = new NonReservedContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_nonReserved); + enterRule(_localctx, 94, RULE_nonReserved); int _la; try { enterOuterAlt(_localctx, 1); { - setState(718); + setState(725); _la = _input.LA(1); if ( !(((((_la - 6)) & ~0x3f) == 0 && ((1L << (_la - 6)) & ((1L << (ANALYZE - 6)) | (1L << (ANALYZED - 6)) | (1L << (CATALOGS - 6)) | (1L << (COLUMNS - 6)) | (1L << (DEBUG - 6)) | (1L << (EXECUTABLE - 6)) | (1L << (EXPLAIN - 6)) | (1L << (FORMAT - 6)) | (1L << (FUNCTIONS - 6)) | (1L << (GRAPHVIZ - 6)) | (1L << (MAPPED - 6)) | (1L << (OPTIMIZED - 6)) | (1L << (PARSED - 6)) | (1L << (PHYSICAL - 6)) | (1L << (PLAN - 6)) | (1L << (RLIKE - 6)) | (1L << (QUERY - 6)) | (1L << (SCHEMAS - 6)) | (1L << (SHOW - 6)) | (1L << (SYS - 6)) | (1L << (TABLES - 6)) | (1L << (TEXT - 6)) | (1L << (TYPE - 6)) | (1L << (TYPES - 6)) | (1L << (VERIFY - 6)))) != 0)) ) { _errHandler.recoverInline(this); @@ -5600,291 +5700,295 @@ class SqlBaseParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02d3\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02da\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ - ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3"+ - "\4\3\4\3\4\3\4\3\4\3\4\7\4p\n\4\f\4\16\4s\13\4\3\4\5\4v\n\4\3\4\3\4\3"+ - "\4\3\4\3\4\3\4\3\4\7\4\177\n\4\f\4\16\4\u0082\13\4\3\4\5\4\u0085\n\4\3"+ - "\4\3\4\3\4\3\4\5\4\u008b\n\4\3\4\5\4\u008e\n\4\3\4\3\4\3\4\3\4\3\4\3\4"+ - "\3\4\3\4\3\4\5\4\u0099\n\4\3\4\5\4\u009c\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3"+ - "\4\3\4\5\4\u00a6\n\4\3\4\5\4\u00a9\n\4\3\4\5\4\u00ac\n\4\3\4\5\4\u00af"+ - "\n\4\3\4\3\4\3\4\3\4\7\4\u00b5\n\4\f\4\16\4\u00b8\13\4\5\4\u00ba\n\4\3"+ - "\4\3\4\3\4\3\4\5\4\u00c0\n\4\3\4\3\4\5\4\u00c4\n\4\3\4\5\4\u00c7\n\4\3"+ - "\4\5\4\u00ca\n\4\3\4\5\4\u00cd\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u00d4\n\4\3"+ - "\5\3\5\3\5\3\5\7\5\u00da\n\5\f\5\16\5\u00dd\13\5\5\5\u00df\n\5\3\5\3\5"+ - "\3\6\3\6\3\6\3\6\3\6\3\6\7\6\u00e9\n\6\f\6\16\6\u00ec\13\6\5\6\u00ee\n"+ - "\6\3\6\5\6\u00f1\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u00f8\n\7\3\b\3\b\3\b\3\b"+ - "\3\b\5\b\u00ff\n\b\3\t\3\t\5\t\u0103\n\t\3\n\3\n\5\n\u0107\n\n\3\n\3\n"+ - "\3\n\7\n\u010c\n\n\f\n\16\n\u010f\13\n\3\n\5\n\u0112\n\n\3\n\3\n\5\n\u0116"+ - "\n\n\3\n\3\n\3\n\5\n\u011b\n\n\3\n\3\n\5\n\u011f\n\n\3\13\3\13\3\13\3"+ - "\13\7\13\u0125\n\13\f\13\16\13\u0128\13\13\3\f\5\f\u012b\n\f\3\f\3\f\3"+ - "\f\7\f\u0130\n\f\f\f\16\f\u0133\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16"+ - "\u013b\n\16\f\16\16\16\u013e\13\16\5\16\u0140\n\16\3\16\3\16\5\16\u0144"+ - "\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21\u0150\n\21"+ - "\3\21\5\21\u0153\n\21\3\22\3\22\7\22\u0157\n\22\f\22\16\22\u015a\13\22"+ - "\3\23\3\23\3\23\3\23\5\23\u0160\n\23\3\23\3\23\3\23\3\23\3\23\5\23\u0167"+ - "\n\23\3\24\5\24\u016a\n\24\3\24\3\24\5\24\u016e\n\24\3\24\3\24\5\24\u0172"+ - "\n\24\3\24\3\24\5\24\u0176\n\24\5\24\u0178\n\24\3\25\3\25\3\25\3\25\3"+ - "\25\3\25\3\25\7\25\u0181\n\25\f\25\16\25\u0184\13\25\3\25\3\25\5\25\u0188"+ - "\n\25\3\26\3\26\5\26\u018c\n\26\3\26\5\26\u018f\n\26\3\26\3\26\3\26\3"+ - "\26\5\26\u0195\n\26\3\26\5\26\u0198\n\26\3\26\3\26\3\26\3\26\5\26\u019e"+ - "\n\26\3\26\5\26\u01a1\n\26\5\26\u01a3\n\26\3\27\3\27\3\30\3\30\3\30\3"+ - "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01b4\n\30\f\30"+ - "\16\30\u01b7\13\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01c2"+ - "\n\30\f\30\16\30\u01c5\13\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ - "\30\7\30\u01d0\n\30\f\30\16\30\u01d3\13\30\3\30\3\30\3\30\5\30\u01d8\n"+ - "\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01e0\n\30\f\30\16\30\u01e3\13"+ - "\30\3\31\3\31\5\31\u01e7\n\31\3\32\5\32\u01ea\n\32\3\32\3\32\3\32\3\32"+ - "\3\32\3\32\5\32\u01f2\n\32\3\32\3\32\3\32\3\32\3\32\7\32\u01f9\n\32\f"+ - "\32\16\32\u01fc\13\32\3\32\3\32\3\32\5\32\u0201\n\32\3\32\3\32\3\32\3"+ - "\32\3\32\3\32\5\32\u0209\n\32\3\32\3\32\3\32\5\32\u020e\n\32\3\32\3\32"+ - "\3\32\3\32\5\32\u0214\n\32\3\32\5\32\u0217\n\32\3\33\3\33\5\33\u021b\n"+ - "\33\3\34\3\34\3\34\3\34\3\34\3\34\5\34\u0223\n\34\3\35\3\35\3\35\3\35"+ - "\5\35\u0229\n\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\7\35"+ - "\u0235\n\35\f\35\16\35\u0238\13\35\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+ - "\5\36\u0241\n\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+ - "\3\36\5\36\u024f\n\36\3\37\3\37\3\37\3\37\3\37\5\37\u0256\n\37\3 \3 \3"+ - " \3 \3 \3 \3 \3!\3!\3!\3!\3!\5!\u0264\n!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3"+ - "#\3#\3#\3#\3#\5#\u0272\n#\3$\3$\3$\5$\u0277\n$\3$\3$\3$\7$\u027c\n$\f"+ - "$\16$\u027f\13$\5$\u0281\n$\3$\3$\3%\3%\3%\3%\6%\u0289\n%\r%\16%\u028a"+ - "\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\5%\u029e\n%\3&\3&"+ - "\3\'\3\'\3(\3(\3)\3)\3)\7)\u02a9\n)\f)\16)\u02ac\13)\3)\3)\3*\3*\5*\u02b2"+ - "\n*\3+\3+\3+\5+\u02b7\n+\3+\3+\3+\3+\5+\u02bd\n+\3+\5+\u02c0\n+\3,\3,"+ - "\5,\u02c4\n,\3-\3-\3-\5-\u02c9\n-\3.\3.\5.\u02cd\n.\3/\3/\3\60\3\60\3"+ - "\60\2\4.8\61\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64"+ - "\668:<>@BDFHJLNPRTVXZ\\^\2\20\b\2\7\7\t\t\31\31,,\62\62\66\66\4\2\"\""+ - "BB\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26\4\2\7\7aa\4\2\r\r\25\25\4\2\7\7"+ - "\27\27\3\2XY\3\2Z\\\3\2RW\4\2\35\35CC\3\2_`\20\2\b\t\22\24\31\31\33\33"+ - "\36\36!\",,\62\62\668:<>?ABDEGG\u032e\2`\3\2\2\2\4c\3\2\2\2\6\u00d3\3"+ - "\2\2\2\b\u00de\3\2\2\2\n\u00e2\3\2\2\2\f\u00f7\3\2\2\2\16\u00fe\3\2\2"+ - "\2\20\u0100\3\2\2\2\22\u0104\3\2\2\2\24\u0120\3\2\2\2\26\u012a\3\2\2\2"+ - "\30\u0134\3\2\2\2\32\u0143\3\2\2\2\34\u0145\3\2\2\2\36\u014b\3\2\2\2 "+ - "\u014d\3\2\2\2\"\u0154\3\2\2\2$\u0166\3\2\2\2&\u0177\3\2\2\2(\u0187\3"+ - "\2\2\2*\u01a2\3\2\2\2,\u01a4\3\2\2\2.\u01d7\3\2\2\2\60\u01e4\3\2\2\2\62"+ - "\u0216\3\2\2\2\64\u0218\3\2\2\2\66\u0222\3\2\2\28\u0228\3\2\2\2:\u024e"+ - "\3\2\2\2<\u0255\3\2\2\2>\u0257\3\2\2\2@\u0263\3\2\2\2B\u0265\3\2\2\2D"+ - "\u0271\3\2\2\2F\u0273\3\2\2\2H\u029d\3\2\2\2J\u029f\3\2\2\2L\u02a1\3\2"+ - "\2\2N\u02a3\3\2\2\2P\u02aa\3\2\2\2R\u02b1\3\2\2\2T\u02bf\3\2\2\2V\u02c3"+ - "\3\2\2\2X\u02c8\3\2\2\2Z\u02cc\3\2\2\2\\\u02ce\3\2\2\2^\u02d0\3\2\2\2"+ - "`a\5\6\4\2ab\7\2\2\3b\3\3\2\2\2cd\5,\27\2de\7\2\2\3e\5\3\2\2\2f\u00d4"+ - "\5\b\5\2gu\7\33\2\2hq\7\3\2\2ij\78\2\2jp\t\2\2\2kl\7\36\2\2lp\t\3\2\2"+ - "mn\7G\2\2np\5L\'\2oi\3\2\2\2ok\3\2\2\2om\3\2\2\2ps\3\2\2\2qo\3\2\2\2q"+ - "r\3\2\2\2rt\3\2\2\2sq\3\2\2\2tv\7\4\2\2uh\3\2\2\2uv\3\2\2\2vw\3\2\2\2"+ - "w\u00d4\5\6\4\2x\u0084\7\24\2\2y\u0080\7\3\2\2z{\78\2\2{\177\t\4\2\2|"+ - "}\7\36\2\2}\177\t\3\2\2~z\3\2\2\2~|\3\2\2\2\177\u0082\3\2\2\2\u0080~\3"+ - "\2\2\2\u0080\u0081\3\2\2\2\u0081\u0083\3\2\2\2\u0082\u0080\3\2\2\2\u0083"+ - "\u0085\7\4\2\2\u0084y\3\2\2\2\u0084\u0085\3\2\2\2\u0085\u0086\3\2\2\2"+ - "\u0086\u00d4\5\6\4\2\u0087\u0088\7>\2\2\u0088\u008d\7A\2\2\u0089\u008b"+ - "\7*\2\2\u008a\u0089\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c"+ - "\u008e\5\64\33\2\u008d\u008a\3\2\2\2\u008d\u008e\3\2\2\2\u008e\u00d4\3"+ - "\2\2\2\u008f\u0090\7>\2\2\u0090\u0091\7\23\2\2\u0091\u0092\t\5\2\2\u0092"+ - "\u00d4\5T+\2\u0093\u0094\t\6\2\2\u0094\u00d4\5T+\2\u0095\u0096\7>\2\2"+ - "\u0096\u009b\7!\2\2\u0097\u0099\7*\2\2\u0098\u0097\3\2\2\2\u0098\u0099"+ - "\3\2\2\2\u0099\u009a\3\2\2\2\u009a\u009c\5\64\33\2\u009b\u0098\3\2\2\2"+ - "\u009b\u009c\3\2\2\2\u009c\u00d4\3\2\2\2\u009d\u009e\7>\2\2\u009e\u00d4"+ - "\7<\2\2\u009f\u00a0\7?\2\2\u00a0\u00d4\7\22\2\2\u00a1\u00a2\7?\2\2\u00a2"+ - "\u00a8\7A\2\2\u00a3\u00a5\7\21\2\2\u00a4\u00a6\7*\2\2\u00a5\u00a4\3\2"+ - "\2\2\u00a5\u00a6\3\2\2\2\u00a6\u00a7\3\2\2\2\u00a7\u00a9\5\64\33\2\u00a8"+ - "\u00a3\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a9\u00ae\3\2\2\2\u00aa\u00ac\7*"+ - "\2\2\u00ab\u00aa\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad"+ - "\u00af\5\64\33\2\u00ae\u00ab\3\2\2\2\u00ae\u00af\3\2\2\2\u00af\u00b9\3"+ - "\2\2\2\u00b0\u00b1\7D\2\2\u00b1\u00b6\5\\/\2\u00b2\u00b3\7\5\2\2\u00b3"+ - "\u00b5\5\\/\2\u00b4\u00b2\3\2\2\2\u00b5\u00b8\3\2\2\2\u00b6\u00b4\3\2"+ - "\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00ba\3\2\2\2\u00b8\u00b6\3\2\2\2\u00b9"+ - "\u00b0\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00d4\3\2\2\2\u00bb\u00bc\7?"+ - "\2\2\u00bc\u00bf\7\23\2\2\u00bd\u00be\7\21\2\2\u00be\u00c0\5\\/\2\u00bf"+ - "\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\u00c6\3\2\2\2\u00c1\u00c3\7@"+ - "\2\2\u00c2\u00c4\7*\2\2\u00c3\u00c2\3\2\2\2\u00c3\u00c4\3\2\2\2\u00c4"+ - "\u00c5\3\2\2\2\u00c5\u00c7\5\64\33\2\u00c6\u00c1\3\2\2\2\u00c6\u00c7\3"+ - "\2\2\2\u00c7\u00cc\3\2\2\2\u00c8\u00ca\7*\2\2\u00c9\u00c8\3\2\2\2\u00c9"+ - "\u00ca\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb\u00cd\5\64\33\2\u00cc\u00c9\3"+ - "\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00d4\3\2\2\2\u00ce\u00cf\7?\2\2\u00cf"+ - "\u00d4\7E\2\2\u00d0\u00d1\7?\2\2\u00d1\u00d2\7@\2\2\u00d2\u00d4\7E\2\2"+ - "\u00d3f\3\2\2\2\u00d3g\3\2\2\2\u00d3x\3\2\2\2\u00d3\u0087\3\2\2\2\u00d3"+ - "\u008f\3\2\2\2\u00d3\u0093\3\2\2\2\u00d3\u0095\3\2\2\2\u00d3\u009d\3\2"+ - "\2\2\u00d3\u009f\3\2\2\2\u00d3\u00a1\3\2\2\2\u00d3\u00bb\3\2\2\2\u00d3"+ - "\u00ce\3\2\2\2\u00d3\u00d0\3\2\2\2\u00d4\7\3\2\2\2\u00d5\u00d6\7I\2\2"+ - "\u00d6\u00db\5\34\17\2\u00d7\u00d8\7\5\2\2\u00d8\u00da\5\34\17\2\u00d9"+ - "\u00d7\3\2\2\2\u00da\u00dd\3\2\2\2\u00db\u00d9\3\2\2\2\u00db\u00dc\3\2"+ - "\2\2\u00dc\u00df\3\2\2\2\u00dd\u00db\3\2\2\2\u00de\u00d5\3\2\2\2\u00de"+ - "\u00df\3\2\2\2\u00df\u00e0\3\2\2\2\u00e0\u00e1\5\n\6\2\u00e1\t\3\2\2\2"+ - "\u00e2\u00ed\5\16\b\2\u00e3\u00e4\7\64\2\2\u00e4\u00e5\7\17\2\2\u00e5"+ - "\u00ea\5\20\t\2\u00e6\u00e7\7\5\2\2\u00e7\u00e9\5\20\t\2\u00e8\u00e6\3"+ - "\2\2\2\u00e9\u00ec\3\2\2\2\u00ea\u00e8\3\2\2\2\u00ea\u00eb\3\2\2\2\u00eb"+ - "\u00ee\3\2\2\2\u00ec\u00ea\3\2\2\2\u00ed\u00e3\3\2\2\2\u00ed\u00ee\3\2"+ - "\2\2\u00ee\u00f0\3\2\2\2\u00ef\u00f1\5\f\7\2\u00f0\u00ef\3\2\2\2\u00f0"+ - "\u00f1\3\2\2\2\u00f1\13\3\2\2\2\u00f2\u00f3\7+\2\2\u00f3\u00f8\t\7\2\2"+ - "\u00f4\u00f5\7L\2\2\u00f5\u00f6\t\7\2\2\u00f6\u00f8\7Q\2\2\u00f7\u00f2"+ - "\3\2\2\2\u00f7\u00f4\3\2\2\2\u00f8\r\3\2\2\2\u00f9\u00ff\5\22\n\2\u00fa"+ - "\u00fb\7\3\2\2\u00fb\u00fc\5\n\6\2\u00fc\u00fd\7\4\2\2\u00fd\u00ff\3\2"+ - "\2\2\u00fe\u00f9\3\2\2\2\u00fe\u00fa\3\2\2\2\u00ff\17\3\2\2\2\u0100\u0102"+ - "\5,\27\2\u0101\u0103\t\b\2\2\u0102\u0101\3\2\2\2\u0102\u0103\3\2\2\2\u0103"+ - "\21\3\2\2\2\u0104\u0106\7=\2\2\u0105\u0107\5\36\20\2\u0106\u0105\3\2\2"+ - "\2\u0106\u0107\3\2\2\2\u0107\u0108\3\2\2\2\u0108\u010d\5 \21\2\u0109\u010a"+ - "\7\5\2\2\u010a\u010c\5 \21\2\u010b\u0109\3\2\2\2\u010c\u010f\3\2\2\2\u010d"+ - "\u010b\3\2\2\2\u010d\u010e\3\2\2\2\u010e\u0111\3\2\2\2\u010f\u010d\3\2"+ - "\2\2\u0110\u0112\5\24\13\2\u0111\u0110\3\2\2\2\u0111\u0112\3\2\2\2\u0112"+ - "\u0115\3\2\2\2\u0113\u0114\7H\2\2\u0114\u0116\5.\30\2\u0115\u0113\3\2"+ - "\2\2\u0115\u0116\3\2\2\2\u0116\u011a\3\2\2\2\u0117\u0118\7#\2\2\u0118"+ - "\u0119\7\17\2\2\u0119\u011b\5\26\f\2\u011a\u0117\3\2\2\2\u011a\u011b\3"+ - "\2\2\2\u011b\u011e\3\2\2\2\u011c\u011d\7$\2\2\u011d\u011f\5.\30\2\u011e"+ - "\u011c\3\2\2\2\u011e\u011f\3\2\2\2\u011f\23\3\2\2\2\u0120\u0121\7\37\2"+ - "\2\u0121\u0126\5\"\22\2\u0122\u0123\7\5\2\2\u0123\u0125\5\"\22\2\u0124"+ - "\u0122\3\2\2\2\u0125\u0128\3\2\2\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2"+ - "\2\2\u0127\25\3\2\2\2\u0128\u0126\3\2\2\2\u0129\u012b\5\36\20\2\u012a"+ - "\u0129\3\2\2\2\u012a\u012b\3\2\2\2\u012b\u012c\3\2\2\2\u012c\u0131\5\30"+ - "\r\2\u012d\u012e\7\5\2\2\u012e\u0130\5\30\r\2\u012f\u012d\3\2\2\2\u0130"+ - "\u0133\3\2\2\2\u0131\u012f\3\2\2\2\u0131\u0132\3\2\2\2\u0132\27\3\2\2"+ - "\2\u0133\u0131\3\2\2\2\u0134\u0135\5\32\16\2\u0135\31\3\2\2\2\u0136\u013f"+ - "\7\3\2\2\u0137\u013c\5,\27\2\u0138\u0139\7\5\2\2\u0139\u013b\5,\27\2\u013a"+ - "\u0138\3\2\2\2\u013b\u013e\3\2\2\2\u013c\u013a\3\2\2\2\u013c\u013d\3\2"+ - "\2\2\u013d\u0140\3\2\2\2\u013e\u013c\3\2\2\2\u013f\u0137\3\2\2\2\u013f"+ - "\u0140\3\2\2\2\u0140\u0141\3\2\2\2\u0141\u0144\7\4\2\2\u0142\u0144\5,"+ - "\27\2\u0143\u0136\3\2\2\2\u0143\u0142\3\2\2\2\u0144\33\3\2\2\2\u0145\u0146"+ - "\5R*\2\u0146\u0147\7\f\2\2\u0147\u0148\7\3\2\2\u0148\u0149\5\n\6\2\u0149"+ - "\u014a\7\4\2\2\u014a\35\3\2\2\2\u014b\u014c\t\t\2\2\u014c\37\3\2\2\2\u014d"+ - "\u0152\5,\27\2\u014e\u0150\7\f\2\2\u014f\u014e\3\2\2\2\u014f\u0150\3\2"+ - "\2\2\u0150\u0151\3\2\2\2\u0151\u0153\5R*\2\u0152\u014f\3\2\2\2\u0152\u0153"+ - "\3\2\2\2\u0153!\3\2\2\2\u0154\u0158\5*\26\2\u0155\u0157\5$\23\2\u0156"+ - "\u0155\3\2\2\2\u0157\u015a\3\2\2\2\u0158\u0156\3\2\2\2\u0158\u0159\3\2"+ - "\2\2\u0159#\3\2\2\2\u015a\u0158\3\2\2\2\u015b\u015c\5&\24\2\u015c\u015d"+ - "\7(\2\2\u015d\u015f\5*\26\2\u015e\u0160\5(\25\2\u015f\u015e\3\2\2\2\u015f"+ - "\u0160\3\2\2\2\u0160\u0167\3\2\2\2\u0161\u0162\7.\2\2\u0162\u0163\5&\24"+ - "\2\u0163\u0164\7(\2\2\u0164\u0165\5*\26\2\u0165\u0167\3\2\2\2\u0166\u015b"+ - "\3\2\2\2\u0166\u0161\3\2\2\2\u0167%\3\2\2\2\u0168\u016a\7&\2\2\u0169\u0168"+ - "\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u0178\3\2\2\2\u016b\u016d\7)\2\2\u016c"+ - "\u016e\7\65\2\2\u016d\u016c\3\2\2\2\u016d\u016e\3\2\2\2\u016e\u0178\3"+ - "\2\2\2\u016f\u0171\79\2\2\u0170\u0172\7\65\2\2\u0171\u0170\3\2\2\2\u0171"+ - "\u0172\3\2\2\2\u0172\u0178\3\2\2\2\u0173\u0175\7 \2\2\u0174\u0176\7\65"+ - "\2\2\u0175\u0174\3\2\2\2\u0175\u0176\3\2\2\2\u0176\u0178\3\2\2\2\u0177"+ - "\u0169\3\2\2\2\u0177\u016b\3\2\2\2\u0177\u016f\3\2\2\2\u0177\u0173\3\2"+ - "\2\2\u0178\'\3\2\2\2\u0179\u017a\7\61\2\2\u017a\u0188\5.\30\2\u017b\u017c"+ - "\7F\2\2\u017c\u017d\7\3\2\2\u017d\u0182\5R*\2\u017e\u017f\7\5\2\2\u017f"+ - "\u0181\5R*\2\u0180\u017e\3\2\2\2\u0181\u0184\3\2\2\2\u0182\u0180\3\2\2"+ - "\2\u0182\u0183\3\2\2\2\u0183\u0185\3\2\2\2\u0184\u0182\3\2\2\2\u0185\u0186"+ - "\7\4\2\2\u0186\u0188\3\2\2\2\u0187\u0179\3\2\2\2\u0187\u017b\3\2\2\2\u0188"+ - ")\3\2\2\2\u0189\u018e\5T+\2\u018a\u018c\7\f\2\2\u018b\u018a\3\2\2\2\u018b"+ - "\u018c\3\2\2\2\u018c\u018d\3\2\2\2\u018d\u018f\5P)\2\u018e\u018b\3\2\2"+ - "\2\u018e\u018f\3\2\2\2\u018f\u01a3\3\2\2\2\u0190\u0191\7\3\2\2\u0191\u0192"+ - "\5\n\6\2\u0192\u0197\7\4\2\2\u0193\u0195\7\f\2\2\u0194\u0193\3\2\2\2\u0194"+ - "\u0195\3\2\2\2\u0195\u0196\3\2\2\2\u0196\u0198\5P)\2\u0197\u0194\3\2\2"+ - "\2\u0197\u0198\3\2\2\2\u0198\u01a3\3\2\2\2\u0199\u019a\7\3\2\2\u019a\u019b"+ - "\5\"\22\2\u019b\u01a0\7\4\2\2\u019c\u019e\7\f\2\2\u019d\u019c\3\2\2\2"+ - "\u019d\u019e\3\2\2\2\u019e\u019f\3\2\2\2\u019f\u01a1\5P)\2\u01a0\u019d"+ - "\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1\u01a3\3\2\2\2\u01a2\u0189\3\2\2\2\u01a2"+ - "\u0190\3\2\2\2\u01a2\u0199\3\2\2\2\u01a3+\3\2\2\2\u01a4\u01a5\5.\30\2"+ - "\u01a5-\3\2\2\2\u01a6\u01a7\b\30\1\2\u01a7\u01a8\7/\2\2\u01a8\u01d8\5"+ - ".\30\n\u01a9\u01aa\7\32\2\2\u01aa\u01ab\7\3\2\2\u01ab\u01ac\5\b\5\2\u01ac"+ - "\u01ad\7\4\2\2\u01ad\u01d8\3\2\2\2\u01ae\u01af\7;\2\2\u01af\u01b0\7\3"+ - "\2\2\u01b0\u01b5\5\\/\2\u01b1\u01b2\7\5\2\2\u01b2\u01b4\5\\/\2\u01b3\u01b1"+ - "\3\2\2\2\u01b4\u01b7\3\2\2\2\u01b5\u01b3\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6"+ - "\u01b8\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8\u01b9\7\4\2\2\u01b9\u01d8\3\2"+ - "\2\2\u01ba\u01bb\7-\2\2\u01bb\u01bc\7\3\2\2\u01bc\u01bd\5P)\2\u01bd\u01be"+ - "\7\5\2\2\u01be\u01c3\5\\/\2\u01bf\u01c0\7\5\2\2\u01c0\u01c2\5\\/\2\u01c1"+ - "\u01bf\3\2\2\2\u01c2\u01c5\3\2\2\2\u01c3\u01c1\3\2\2\2\u01c3\u01c4\3\2"+ - "\2\2\u01c4\u01c6\3\2\2\2\u01c5\u01c3\3\2\2\2\u01c6\u01c7\7\4\2\2\u01c7"+ - "\u01d8\3\2\2\2\u01c8\u01c9\7-\2\2\u01c9\u01ca\7\3\2\2\u01ca\u01cb\5\\"+ - "/\2\u01cb\u01cc\7\5\2\2\u01cc\u01d1\5\\/\2\u01cd\u01ce\7\5\2\2\u01ce\u01d0"+ - "\5\\/\2\u01cf\u01cd\3\2\2\2\u01d0\u01d3\3\2\2\2\u01d1\u01cf\3\2\2\2\u01d1"+ - "\u01d2\3\2\2\2\u01d2\u01d4\3\2\2\2\u01d3\u01d1\3\2\2\2\u01d4\u01d5\7\4"+ - "\2\2\u01d5\u01d8\3\2\2\2\u01d6\u01d8\5\60\31\2\u01d7\u01a6\3\2\2\2\u01d7"+ - "\u01a9\3\2\2\2\u01d7\u01ae\3\2\2\2\u01d7\u01ba\3\2\2\2\u01d7\u01c8\3\2"+ - "\2\2\u01d7\u01d6\3\2\2\2\u01d8\u01e1\3\2\2\2\u01d9\u01da\f\4\2\2\u01da"+ - "\u01db\7\n\2\2\u01db\u01e0\5.\30\5\u01dc\u01dd\f\3\2\2\u01dd\u01de\7\63"+ - "\2\2\u01de\u01e0\5.\30\4\u01df\u01d9\3\2\2\2\u01df\u01dc\3\2\2\2\u01e0"+ - "\u01e3\3\2\2\2\u01e1\u01df\3\2\2\2\u01e1\u01e2\3\2\2\2\u01e2/\3\2\2\2"+ - "\u01e3\u01e1\3\2\2\2\u01e4\u01e6\58\35\2\u01e5\u01e7\5\62\32\2\u01e6\u01e5"+ - "\3\2\2\2\u01e6\u01e7\3\2\2\2\u01e7\61\3\2\2\2\u01e8\u01ea\7/\2\2\u01e9"+ - "\u01e8\3\2\2\2\u01e9\u01ea\3\2\2\2\u01ea\u01eb\3\2\2\2\u01eb\u01ec\7\16"+ - "\2\2\u01ec\u01ed\58\35\2\u01ed\u01ee\7\n\2\2\u01ee\u01ef\58\35\2\u01ef"+ - "\u0217\3\2\2\2\u01f0\u01f2\7/\2\2\u01f1\u01f0\3\2\2\2\u01f1\u01f2\3\2"+ - "\2\2\u01f2\u01f3\3\2\2\2\u01f3\u01f4\7%\2\2\u01f4\u01f5\7\3\2\2\u01f5"+ - "\u01fa\5,\27\2\u01f6\u01f7\7\5\2\2\u01f7\u01f9\5,\27\2\u01f8\u01f6\3\2"+ - "\2\2\u01f9\u01fc\3\2\2\2\u01fa\u01f8\3\2\2\2\u01fa\u01fb\3\2\2\2\u01fb"+ - "\u01fd\3\2\2\2\u01fc\u01fa\3\2\2\2\u01fd\u01fe\7\4\2\2\u01fe\u0217\3\2"+ - "\2\2\u01ff\u0201\7/\2\2\u0200\u01ff\3\2\2\2\u0200\u0201\3\2\2\2\u0201"+ - "\u0202\3\2\2\2\u0202\u0203\7%\2\2\u0203\u0204\7\3\2\2\u0204\u0205\5\b"+ - "\5\2\u0205\u0206\7\4\2\2\u0206\u0217\3\2\2\2\u0207\u0209\7/\2\2\u0208"+ - "\u0207\3\2\2\2\u0208\u0209\3\2\2\2\u0209\u020a\3\2\2\2\u020a\u020b\7*"+ - "\2\2\u020b\u0217\5\64\33\2\u020c\u020e\7/\2\2\u020d\u020c\3\2\2\2\u020d"+ - "\u020e\3\2\2\2\u020e\u020f\3\2\2\2\u020f\u0210\7:\2\2\u0210\u0217\5\\"+ - "/\2\u0211\u0213\7\'\2\2\u0212\u0214\7/\2\2\u0213\u0212\3\2\2\2\u0213\u0214"+ - "\3\2\2\2\u0214\u0215\3\2\2\2\u0215\u0217\7\60\2\2\u0216\u01e9\3\2\2\2"+ - "\u0216\u01f1\3\2\2\2\u0216\u0200\3\2\2\2\u0216\u0208\3\2\2\2\u0216\u020d"+ - "\3\2\2\2\u0216\u0211\3\2\2\2\u0217\63\3\2\2\2\u0218\u021a\5\\/\2\u0219"+ - "\u021b\5\66\34\2\u021a\u0219\3\2\2\2\u021a\u021b\3\2\2\2\u021b\65\3\2"+ - "\2\2\u021c\u021d\7\30\2\2\u021d\u0223\5\\/\2\u021e\u021f\7J\2\2\u021f"+ - "\u0220\5\\/\2\u0220\u0221\7Q\2\2\u0221\u0223\3\2\2\2\u0222\u021c\3\2\2"+ - "\2\u0222\u021e\3\2\2\2\u0223\67\3\2\2\2\u0224\u0225\b\35\1\2\u0225\u0229"+ - "\5:\36\2\u0226\u0227\t\n\2\2\u0227\u0229\58\35\6\u0228\u0224\3\2\2\2\u0228"+ - "\u0226\3\2\2\2\u0229\u0236\3\2\2\2\u022a\u022b\f\5\2\2\u022b\u022c\t\13"+ - "\2\2\u022c\u0235\58\35\6\u022d\u022e\f\4\2\2\u022e\u022f\t\n\2\2\u022f"+ - "\u0235\58\35\5\u0230\u0231\f\3\2\2\u0231\u0232\5J&\2\u0232\u0233\58\35"+ - "\4\u0233\u0235\3\2\2\2\u0234\u022a\3\2\2\2\u0234\u022d\3\2\2\2\u0234\u0230"+ - "\3\2\2\2\u0235\u0238\3\2\2\2\u0236\u0234\3\2\2\2\u0236\u0237\3\2\2\2\u0237"+ - "9\3\2\2\2\u0238\u0236\3\2\2\2\u0239\u024f\5<\37\2\u023a\u024f\5@!\2\u023b"+ - "\u024f\5H%\2\u023c\u024f\7Z\2\2\u023d\u023e\5P)\2\u023e\u023f\7^\2\2\u023f"+ - "\u0241\3\2\2\2\u0240\u023d\3\2\2\2\u0240\u0241\3\2\2\2\u0241\u0242\3\2"+ - "\2\2\u0242\u024f\7Z\2\2\u0243\u024f\5D#\2\u0244\u0245\7\3\2\2\u0245\u0246"+ - "\5\b\5\2\u0246\u0247\7\4\2\2\u0247\u024f\3\2\2\2\u0248\u024f\5R*\2\u0249"+ - "\u024f\5P)\2\u024a\u024b\7\3\2\2\u024b\u024c\5,\27\2\u024c\u024d\7\4\2"+ - "\2\u024d\u024f\3\2\2\2\u024e\u0239\3\2\2\2\u024e\u023a\3\2\2\2\u024e\u023b"+ - "\3\2\2\2\u024e\u023c\3\2\2\2\u024e\u0240\3\2\2\2\u024e\u0243\3\2\2\2\u024e"+ - "\u0244\3\2\2\2\u024e\u0248\3\2\2\2\u024e\u0249\3\2\2\2\u024e\u024a\3\2"+ - "\2\2\u024f;\3\2\2\2\u0250\u0256\5> \2\u0251\u0252\7K\2\2\u0252\u0253\5"+ - "> \2\u0253\u0254\7Q\2\2\u0254\u0256\3\2\2\2\u0255\u0250\3\2\2\2\u0255"+ - "\u0251\3\2\2\2\u0256=\3\2\2\2\u0257\u0258\7\20\2\2\u0258\u0259\7\3\2\2"+ - "\u0259\u025a\5,\27\2\u025a\u025b\7\f\2\2\u025b\u025c\5N(\2\u025c\u025d"+ - "\7\4\2\2\u025d?\3\2\2\2\u025e\u0264\5B\"\2\u025f\u0260\7K\2\2\u0260\u0261"+ - "\5B\"\2\u0261\u0262\7Q\2\2\u0262\u0264\3\2\2\2\u0263\u025e\3\2\2\2\u0263"+ - "\u025f\3\2\2\2\u0264A\3\2\2\2\u0265\u0266\7\34\2\2\u0266\u0267\7\3\2\2"+ - "\u0267\u0268\5R*\2\u0268\u0269\7\37\2\2\u0269\u026a\58\35\2\u026a\u026b"+ - "\7\4\2\2\u026bC\3\2\2\2\u026c\u0272\5F$\2\u026d\u026e\7K\2\2\u026e\u026f"+ - "\5F$\2\u026f\u0270\7Q\2\2\u0270\u0272\3\2\2\2\u0271\u026c\3\2\2\2\u0271"+ - "\u026d\3\2\2\2\u0272E\3\2\2\2\u0273\u0274\5R*\2\u0274\u0280\7\3\2\2\u0275"+ - "\u0277\5\36\20\2\u0276\u0275\3\2\2\2\u0276\u0277\3\2\2\2\u0277\u0278\3"+ - "\2\2\2\u0278\u027d\5,\27\2\u0279\u027a\7\5\2\2\u027a\u027c\5,\27\2\u027b"+ - "\u0279\3\2\2\2\u027c\u027f\3\2\2\2\u027d\u027b\3\2\2\2\u027d\u027e\3\2"+ - "\2\2\u027e\u0281\3\2\2\2\u027f\u027d\3\2\2\2\u0280\u0276\3\2\2\2\u0280"+ - "\u0281\3\2\2\2\u0281\u0282\3\2\2\2\u0282\u0283\7\4\2\2\u0283G\3\2\2\2"+ - "\u0284\u029e\7\60\2\2\u0285\u029e\5Z.\2\u0286\u029e\5L\'\2\u0287\u0289"+ - "\7`\2\2\u0288\u0287\3\2\2\2\u0289\u028a\3\2\2\2\u028a\u0288\3\2\2\2\u028a"+ - "\u028b\3\2\2\2\u028b\u029e\3\2\2\2\u028c\u029e\7_\2\2\u028d\u028e\7M\2"+ - "\2\u028e\u028f\5\\/\2\u028f\u0290\7Q\2\2\u0290\u029e\3\2\2\2\u0291\u0292"+ - "\7N\2\2\u0292\u0293\5\\/\2\u0293\u0294\7Q\2\2\u0294\u029e\3\2\2\2\u0295"+ - "\u0296\7O\2\2\u0296\u0297\5\\/\2\u0297\u0298\7Q\2\2\u0298\u029e\3\2\2"+ - "\2\u0299\u029a\7P\2\2\u029a\u029b\5\\/\2\u029b\u029c\7Q\2\2\u029c\u029e"+ - "\3\2\2\2\u029d\u0284\3\2\2\2\u029d\u0285\3\2\2\2\u029d\u0286\3\2\2\2\u029d"+ - "\u0288\3\2\2\2\u029d\u028c\3\2\2\2\u029d\u028d\3\2\2\2\u029d\u0291\3\2"+ - "\2\2\u029d\u0295\3\2\2\2\u029d\u0299\3\2\2\2\u029eI\3\2\2\2\u029f\u02a0"+ - "\t\f\2\2\u02a0K\3\2\2\2\u02a1\u02a2\t\r\2\2\u02a2M\3\2\2\2\u02a3\u02a4"+ - "\5R*\2\u02a4O\3\2\2\2\u02a5\u02a6\5R*\2\u02a6\u02a7\7^\2\2\u02a7\u02a9"+ - "\3\2\2\2\u02a8\u02a5\3\2\2\2\u02a9\u02ac\3\2\2\2\u02aa\u02a8\3\2\2\2\u02aa"+ - "\u02ab\3\2\2\2\u02ab\u02ad\3\2\2\2\u02ac\u02aa\3\2\2\2\u02ad\u02ae\5R"+ - "*\2\u02aeQ\3\2\2\2\u02af\u02b2\5V,\2\u02b0\u02b2\5X-\2\u02b1\u02af\3\2"+ - "\2\2\u02b1\u02b0\3\2\2\2\u02b2S\3\2\2\2\u02b3\u02b4\5R*\2\u02b4\u02b5"+ - "\7\6\2\2\u02b5\u02b7\3\2\2\2\u02b6\u02b3\3\2\2\2\u02b6\u02b7\3\2\2\2\u02b7"+ - "\u02b8\3\2\2\2\u02b8\u02c0\7e\2\2\u02b9\u02ba\5R*\2\u02ba\u02bb\7\6\2"+ - "\2\u02bb\u02bd\3\2\2\2\u02bc\u02b9\3\2\2\2\u02bc\u02bd\3\2\2\2\u02bd\u02be"+ - "\3\2\2\2\u02be\u02c0\5R*\2\u02bf\u02b6\3\2\2\2\u02bf\u02bc\3\2\2\2\u02c0"+ - "U\3\2\2\2\u02c1\u02c4\7f\2\2\u02c2\u02c4\7g\2\2\u02c3\u02c1\3\2\2\2\u02c3"+ - "\u02c2\3\2\2\2\u02c4W\3\2\2\2\u02c5\u02c9\7c\2\2\u02c6\u02c9\5^\60\2\u02c7"+ - "\u02c9\7d\2\2\u02c8\u02c5\3\2\2\2\u02c8\u02c6\3\2\2\2\u02c8\u02c7\3\2"+ - "\2\2\u02c9Y\3\2\2\2\u02ca\u02cd\7b\2\2\u02cb\u02cd\7a\2\2\u02cc\u02ca"+ - "\3\2\2\2\u02cc\u02cb\3\2\2\2\u02cd[\3\2\2\2\u02ce\u02cf\t\16\2\2\u02cf"+ - "]\3\2\2\2\u02d0\u02d1\t\17\2\2\u02d1_\3\2\2\2eoqu~\u0080\u0084\u008a\u008d"+ - "\u0098\u009b\u00a5\u00a8\u00ab\u00ae\u00b6\u00b9\u00bf\u00c3\u00c6\u00c9"+ - "\u00cc\u00d3\u00db\u00de\u00ea\u00ed\u00f0\u00f7\u00fe\u0102\u0106\u010d"+ - "\u0111\u0115\u011a\u011e\u0126\u012a\u0131\u013c\u013f\u0143\u014f\u0152"+ - "\u0158\u015f\u0166\u0169\u016d\u0171\u0175\u0177\u0182\u0187\u018b\u018e"+ - "\u0194\u0197\u019d\u01a0\u01a2\u01b5\u01c3\u01d1\u01d7\u01df\u01e1\u01e6"+ - "\u01e9\u01f1\u01fa\u0200\u0208\u020d\u0213\u0216\u021a\u0222\u0228\u0234"+ - "\u0236\u0240\u024e\u0255\u0263\u0271\u0276\u027d\u0280\u028a\u029d\u02aa"+ - "\u02b1\u02b6\u02bc\u02bf\u02c3\u02c8\u02cc"; + ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\3\2\3\2\3\2\3\3\3\3\3\3\3\4"+ + "\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4r\n\4\f\4\16\4u\13\4\3\4\5\4x\n\4"+ + "\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0081\n\4\f\4\16\4\u0084\13\4\3\4\5\4"+ + "\u0087\n\4\3\4\3\4\3\4\3\4\5\4\u008d\n\4\3\4\5\4\u0090\n\4\3\4\3\4\3\4"+ + "\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u009b\n\4\3\4\5\4\u009e\n\4\3\4\3\4\3\4\3"+ + "\4\3\4\3\4\3\4\3\4\5\4\u00a8\n\4\3\4\5\4\u00ab\n\4\3\4\5\4\u00ae\n\4\3"+ + "\4\5\4\u00b1\n\4\3\4\3\4\3\4\3\4\7\4\u00b7\n\4\f\4\16\4\u00ba\13\4\5\4"+ + "\u00bc\n\4\3\4\3\4\3\4\3\4\5\4\u00c2\n\4\3\4\3\4\5\4\u00c6\n\4\3\4\5\4"+ + "\u00c9\n\4\3\4\5\4\u00cc\n\4\3\4\5\4\u00cf\n\4\3\4\3\4\3\4\3\4\3\4\5\4"+ + "\u00d6\n\4\3\5\3\5\3\5\3\5\7\5\u00dc\n\5\f\5\16\5\u00df\13\5\5\5\u00e1"+ + "\n\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6\7\6\u00eb\n\6\f\6\16\6\u00ee\13\6"+ + "\5\6\u00f0\n\6\3\6\5\6\u00f3\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u00fa\n\7\3\b"+ + "\3\b\3\b\3\b\3\b\5\b\u0101\n\b\3\t\3\t\5\t\u0105\n\t\3\n\3\n\5\n\u0109"+ + "\n\n\3\n\3\n\3\n\7\n\u010e\n\n\f\n\16\n\u0111\13\n\3\n\5\n\u0114\n\n\3"+ + "\n\3\n\5\n\u0118\n\n\3\n\3\n\3\n\5\n\u011d\n\n\3\n\3\n\5\n\u0121\n\n\3"+ + "\13\3\13\3\13\3\13\7\13\u0127\n\13\f\13\16\13\u012a\13\13\3\f\5\f\u012d"+ + "\n\f\3\f\3\f\3\f\7\f\u0132\n\f\f\f\16\f\u0135\13\f\3\r\3\r\3\16\3\16\3"+ + "\16\3\16\7\16\u013d\n\16\f\16\16\16\u0140\13\16\5\16\u0142\n\16\3\16\3"+ + "\16\5\16\u0146\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21"+ + "\5\21\u0152\n\21\3\21\5\21\u0155\n\21\3\22\3\22\7\22\u0159\n\22\f\22\16"+ + "\22\u015c\13\22\3\23\3\23\3\23\3\23\5\23\u0162\n\23\3\23\3\23\3\23\3\23"+ + "\3\23\5\23\u0169\n\23\3\24\5\24\u016c\n\24\3\24\3\24\5\24\u0170\n\24\3"+ + "\24\3\24\5\24\u0174\n\24\3\24\3\24\5\24\u0178\n\24\5\24\u017a\n\24\3\25"+ + "\3\25\3\25\3\25\3\25\3\25\3\25\7\25\u0183\n\25\f\25\16\25\u0186\13\25"+ + "\3\25\3\25\5\25\u018a\n\25\3\26\3\26\5\26\u018e\n\26\3\26\5\26\u0191\n"+ + "\26\3\26\3\26\3\26\3\26\5\26\u0197\n\26\3\26\5\26\u019a\n\26\3\26\3\26"+ + "\3\26\3\26\5\26\u01a0\n\26\3\26\5\26\u01a3\n\26\5\26\u01a5\n\26\3\27\3"+ + "\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\7"+ + "\30\u01b6\n\30\f\30\16\30\u01b9\13\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ + "\30\3\30\3\30\7\30\u01c4\n\30\f\30\16\30\u01c7\13\30\3\30\3\30\3\30\3"+ + "\30\3\30\3\30\3\30\3\30\3\30\7\30\u01d2\n\30\f\30\16\30\u01d5\13\30\3"+ + "\30\3\30\3\30\5\30\u01da\n\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01e2"+ + "\n\30\f\30\16\30\u01e5\13\30\3\31\3\31\5\31\u01e9\n\31\3\32\5\32\u01ec"+ + "\n\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u01f4\n\32\3\32\3\32\3\32\3\32"+ + "\3\32\7\32\u01fb\n\32\f\32\16\32\u01fe\13\32\3\32\3\32\3\32\5\32\u0203"+ + "\n\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u020b\n\32\3\32\3\32\3\32\5\32"+ + "\u0210\n\32\3\32\3\32\3\32\3\32\5\32\u0216\n\32\3\32\5\32\u0219\n\32\3"+ + "\33\3\33\5\33\u021d\n\33\3\34\3\34\3\34\3\34\3\34\3\34\5\34\u0225\n\34"+ + "\3\35\3\35\3\35\3\35\5\35\u022b\n\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35"+ + "\3\35\3\35\3\35\7\35\u0237\n\35\f\35\16\35\u023a\13\35\3\36\3\36\3\36"+ + "\3\36\3\36\3\36\3\36\5\36\u0243\n\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+ + "\3\36\3\36\3\36\3\36\3\36\5\36\u0251\n\36\3\37\3\37\3\37\3\37\3\37\5\37"+ + "\u0258\n\37\3 \3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\5!\u0266\n!\3\"\3\"\3"+ + "\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\5#\u0274\n#\3$\3$\3$\5$\u0279\n$\3$"+ + "\3$\3$\7$\u027e\n$\f$\16$\u0281\13$\5$\u0283\n$\3$\3$\3%\3%\3%\5%\u028a"+ + "\n%\3&\3&\3&\3&\6&\u0290\n&\r&\16&\u0291\3&\3&\3&\3&\3&\3&\3&\3&\3&\3"+ + "&\3&\3&\3&\3&\3&\3&\3&\5&\u02a5\n&\3\'\3\'\3(\3(\3)\3)\3*\3*\3*\7*\u02b0"+ + "\n*\f*\16*\u02b3\13*\3*\3*\3+\3+\5+\u02b9\n+\3,\3,\3,\5,\u02be\n,\3,\3"+ + ",\3,\3,\5,\u02c4\n,\3,\5,\u02c7\n,\3-\3-\5-\u02cb\n-\3.\3.\3.\5.\u02d0"+ + "\n.\3/\3/\5/\u02d4\n/\3\60\3\60\3\61\3\61\3\61\2\4.8\62\2\4\6\b\n\f\16"+ + "\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`\2"+ + "\20\b\2\7\7\t\t\31\31,,\62\62\66\66\4\2\"\"BB\4\2\t\t\62\62\4\2\37\37"+ + "%%\3\2\25\26\4\2\7\7aa\4\2\r\r\25\25\4\2\7\7\27\27\3\2XY\3\2Z\\\3\2RW"+ + "\4\2\35\35CC\3\2_`\20\2\b\t\22\24\31\31\33\33\36\36!\",,\62\62\668:<>"+ + "?ABDEGG\u0336\2b\3\2\2\2\4e\3\2\2\2\6\u00d5\3\2\2\2\b\u00e0\3\2\2\2\n"+ + "\u00e4\3\2\2\2\f\u00f9\3\2\2\2\16\u0100\3\2\2\2\20\u0102\3\2\2\2\22\u0106"+ + "\3\2\2\2\24\u0122\3\2\2\2\26\u012c\3\2\2\2\30\u0136\3\2\2\2\32\u0145\3"+ + "\2\2\2\34\u0147\3\2\2\2\36\u014d\3\2\2\2 \u014f\3\2\2\2\"\u0156\3\2\2"+ + "\2$\u0168\3\2\2\2&\u0179\3\2\2\2(\u0189\3\2\2\2*\u01a4\3\2\2\2,\u01a6"+ + "\3\2\2\2.\u01d9\3\2\2\2\60\u01e6\3\2\2\2\62\u0218\3\2\2\2\64\u021a\3\2"+ + "\2\2\66\u0224\3\2\2\28\u022a\3\2\2\2:\u0250\3\2\2\2<\u0257\3\2\2\2>\u0259"+ + "\3\2\2\2@\u0265\3\2\2\2B\u0267\3\2\2\2D\u0273\3\2\2\2F\u0275\3\2\2\2H"+ + "\u0289\3\2\2\2J\u02a4\3\2\2\2L\u02a6\3\2\2\2N\u02a8\3\2\2\2P\u02aa\3\2"+ + "\2\2R\u02b1\3\2\2\2T\u02b8\3\2\2\2V\u02c6\3\2\2\2X\u02ca\3\2\2\2Z\u02cf"+ + "\3\2\2\2\\\u02d3\3\2\2\2^\u02d5\3\2\2\2`\u02d7\3\2\2\2bc\5\6\4\2cd\7\2"+ + "\2\3d\3\3\2\2\2ef\5,\27\2fg\7\2\2\3g\5\3\2\2\2h\u00d6\5\b\5\2iw\7\33\2"+ + "\2js\7\3\2\2kl\78\2\2lr\t\2\2\2mn\7\36\2\2nr\t\3\2\2op\7G\2\2pr\5N(\2"+ + "qk\3\2\2\2qm\3\2\2\2qo\3\2\2\2ru\3\2\2\2sq\3\2\2\2st\3\2\2\2tv\3\2\2\2"+ + "us\3\2\2\2vx\7\4\2\2wj\3\2\2\2wx\3\2\2\2xy\3\2\2\2y\u00d6\5\6\4\2z\u0086"+ + "\7\24\2\2{\u0082\7\3\2\2|}\78\2\2}\u0081\t\4\2\2~\177\7\36\2\2\177\u0081"+ + "\t\3\2\2\u0080|\3\2\2\2\u0080~\3\2\2\2\u0081\u0084\3\2\2\2\u0082\u0080"+ + "\3\2\2\2\u0082\u0083\3\2\2\2\u0083\u0085\3\2\2\2\u0084\u0082\3\2\2\2\u0085"+ + "\u0087\7\4\2\2\u0086{\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088\3\2\2\2"+ + "\u0088\u00d6\5\6\4\2\u0089\u008a\7>\2\2\u008a\u008f\7A\2\2\u008b\u008d"+ + "\7*\2\2\u008c\u008b\3\2\2\2\u008c\u008d\3\2\2\2\u008d\u008e\3\2\2\2\u008e"+ + "\u0090\5\64\33\2\u008f\u008c\3\2\2\2\u008f\u0090\3\2\2\2\u0090\u00d6\3"+ + "\2\2\2\u0091\u0092\7>\2\2\u0092\u0093\7\23\2\2\u0093\u0094\t\5\2\2\u0094"+ + "\u00d6\5V,\2\u0095\u0096\t\6\2\2\u0096\u00d6\5V,\2\u0097\u0098\7>\2\2"+ + "\u0098\u009d\7!\2\2\u0099\u009b\7*\2\2\u009a\u0099\3\2\2\2\u009a\u009b"+ + "\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009e\5\64\33\2\u009d\u009a\3\2\2\2"+ + "\u009d\u009e\3\2\2\2\u009e\u00d6\3\2\2\2\u009f\u00a0\7>\2\2\u00a0\u00d6"+ + "\7<\2\2\u00a1\u00a2\7?\2\2\u00a2\u00d6\7\22\2\2\u00a3\u00a4\7?\2\2\u00a4"+ + "\u00aa\7A\2\2\u00a5\u00a7\7\21\2\2\u00a6\u00a8\7*\2\2\u00a7\u00a6\3\2"+ + "\2\2\u00a7\u00a8\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a9\u00ab\5\64\33\2\u00aa"+ + "\u00a5\3\2\2\2\u00aa\u00ab\3\2\2\2\u00ab\u00b0\3\2\2\2\u00ac\u00ae\7*"+ + "\2\2\u00ad\u00ac\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ae\u00af\3\2\2\2\u00af"+ + "\u00b1\5\64\33\2\u00b0\u00ad\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1\u00bb\3"+ + "\2\2\2\u00b2\u00b3\7D\2\2\u00b3\u00b8\5^\60\2\u00b4\u00b5\7\5\2\2\u00b5"+ + "\u00b7\5^\60\2\u00b6\u00b4\3\2\2\2\u00b7\u00ba\3\2\2\2\u00b8\u00b6\3\2"+ + "\2\2\u00b8\u00b9\3\2\2\2\u00b9\u00bc\3\2\2\2\u00ba\u00b8\3\2\2\2\u00bb"+ + "\u00b2\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bc\u00d6\3\2\2\2\u00bd\u00be\7?"+ + "\2\2\u00be\u00c1\7\23\2\2\u00bf\u00c0\7\21\2\2\u00c0\u00c2\5^\60\2\u00c1"+ + "\u00bf\3\2\2\2\u00c1\u00c2\3\2\2\2\u00c2\u00c8\3\2\2\2\u00c3\u00c5\7@"+ + "\2\2\u00c4\u00c6\7*\2\2\u00c5\u00c4\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c6"+ + "\u00c7\3\2\2\2\u00c7\u00c9\5\64\33\2\u00c8\u00c3\3\2\2\2\u00c8\u00c9\3"+ + "\2\2\2\u00c9\u00ce\3\2\2\2\u00ca\u00cc\7*\2\2\u00cb\u00ca\3\2\2\2\u00cb"+ + "\u00cc\3\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00cf\5\64\33\2\u00ce\u00cb\3"+ + "\2\2\2\u00ce\u00cf\3\2\2\2\u00cf\u00d6\3\2\2\2\u00d0\u00d1\7?\2\2\u00d1"+ + "\u00d6\7E\2\2\u00d2\u00d3\7?\2\2\u00d3\u00d4\7@\2\2\u00d4\u00d6\7E\2\2"+ + "\u00d5h\3\2\2\2\u00d5i\3\2\2\2\u00d5z\3\2\2\2\u00d5\u0089\3\2\2\2\u00d5"+ + "\u0091\3\2\2\2\u00d5\u0095\3\2\2\2\u00d5\u0097\3\2\2\2\u00d5\u009f\3\2"+ + "\2\2\u00d5\u00a1\3\2\2\2\u00d5\u00a3\3\2\2\2\u00d5\u00bd\3\2\2\2\u00d5"+ + "\u00d0\3\2\2\2\u00d5\u00d2\3\2\2\2\u00d6\7\3\2\2\2\u00d7\u00d8\7I\2\2"+ + "\u00d8\u00dd\5\34\17\2\u00d9\u00da\7\5\2\2\u00da\u00dc\5\34\17\2\u00db"+ + "\u00d9\3\2\2\2\u00dc\u00df\3\2\2\2\u00dd\u00db\3\2\2\2\u00dd\u00de\3\2"+ + "\2\2\u00de\u00e1\3\2\2\2\u00df\u00dd\3\2\2\2\u00e0\u00d7\3\2\2\2\u00e0"+ + "\u00e1\3\2\2\2\u00e1\u00e2\3\2\2\2\u00e2\u00e3\5\n\6\2\u00e3\t\3\2\2\2"+ + "\u00e4\u00ef\5\16\b\2\u00e5\u00e6\7\64\2\2\u00e6\u00e7\7\17\2\2\u00e7"+ + "\u00ec\5\20\t\2\u00e8\u00e9\7\5\2\2\u00e9\u00eb\5\20\t\2\u00ea\u00e8\3"+ + "\2\2\2\u00eb\u00ee\3\2\2\2\u00ec\u00ea\3\2\2\2\u00ec\u00ed\3\2\2\2\u00ed"+ + "\u00f0\3\2\2\2\u00ee\u00ec\3\2\2\2\u00ef\u00e5\3\2\2\2\u00ef\u00f0\3\2"+ + "\2\2\u00f0\u00f2\3\2\2\2\u00f1\u00f3\5\f\7\2\u00f2\u00f1\3\2\2\2\u00f2"+ + "\u00f3\3\2\2\2\u00f3\13\3\2\2\2\u00f4\u00f5\7+\2\2\u00f5\u00fa\t\7\2\2"+ + "\u00f6\u00f7\7L\2\2\u00f7\u00f8\t\7\2\2\u00f8\u00fa\7Q\2\2\u00f9\u00f4"+ + "\3\2\2\2\u00f9\u00f6\3\2\2\2\u00fa\r\3\2\2\2\u00fb\u0101\5\22\n\2\u00fc"+ + "\u00fd\7\3\2\2\u00fd\u00fe\5\n\6\2\u00fe\u00ff\7\4\2\2\u00ff\u0101\3\2"+ + "\2\2\u0100\u00fb\3\2\2\2\u0100\u00fc\3\2\2\2\u0101\17\3\2\2\2\u0102\u0104"+ + "\5,\27\2\u0103\u0105\t\b\2\2\u0104\u0103\3\2\2\2\u0104\u0105\3\2\2\2\u0105"+ + "\21\3\2\2\2\u0106\u0108\7=\2\2\u0107\u0109\5\36\20\2\u0108\u0107\3\2\2"+ + "\2\u0108\u0109\3\2\2\2\u0109\u010a\3\2\2\2\u010a\u010f\5 \21\2\u010b\u010c"+ + "\7\5\2\2\u010c\u010e\5 \21\2\u010d\u010b\3\2\2\2\u010e\u0111\3\2\2\2\u010f"+ + "\u010d\3\2\2\2\u010f\u0110\3\2\2\2\u0110\u0113\3\2\2\2\u0111\u010f\3\2"+ + "\2\2\u0112\u0114\5\24\13\2\u0113\u0112\3\2\2\2\u0113\u0114\3\2\2\2\u0114"+ + "\u0117\3\2\2\2\u0115\u0116\7H\2\2\u0116\u0118\5.\30\2\u0117\u0115\3\2"+ + "\2\2\u0117\u0118\3\2\2\2\u0118\u011c\3\2\2\2\u0119\u011a\7#\2\2\u011a"+ + "\u011b\7\17\2\2\u011b\u011d\5\26\f\2\u011c\u0119\3\2\2\2\u011c\u011d\3"+ + "\2\2\2\u011d\u0120\3\2\2\2\u011e\u011f\7$\2\2\u011f\u0121\5.\30\2\u0120"+ + "\u011e\3\2\2\2\u0120\u0121\3\2\2\2\u0121\23\3\2\2\2\u0122\u0123\7\37\2"+ + "\2\u0123\u0128\5\"\22\2\u0124\u0125\7\5\2\2\u0125\u0127\5\"\22\2\u0126"+ + "\u0124\3\2\2\2\u0127\u012a\3\2\2\2\u0128\u0126\3\2\2\2\u0128\u0129\3\2"+ + "\2\2\u0129\25\3\2\2\2\u012a\u0128\3\2\2\2\u012b\u012d\5\36\20\2\u012c"+ + "\u012b\3\2\2\2\u012c\u012d\3\2\2\2\u012d\u012e\3\2\2\2\u012e\u0133\5\30"+ + "\r\2\u012f\u0130\7\5\2\2\u0130\u0132\5\30\r\2\u0131\u012f\3\2\2\2\u0132"+ + "\u0135\3\2\2\2\u0133\u0131\3\2\2\2\u0133\u0134\3\2\2\2\u0134\27\3\2\2"+ + "\2\u0135\u0133\3\2\2\2\u0136\u0137\5\32\16\2\u0137\31\3\2\2\2\u0138\u0141"+ + "\7\3\2\2\u0139\u013e\5,\27\2\u013a\u013b\7\5\2\2\u013b\u013d\5,\27\2\u013c"+ + "\u013a\3\2\2\2\u013d\u0140\3\2\2\2\u013e\u013c\3\2\2\2\u013e\u013f\3\2"+ + "\2\2\u013f\u0142\3\2\2\2\u0140\u013e\3\2\2\2\u0141\u0139\3\2\2\2\u0141"+ + "\u0142\3\2\2\2\u0142\u0143\3\2\2\2\u0143\u0146\7\4\2\2\u0144\u0146\5,"+ + "\27\2\u0145\u0138\3\2\2\2\u0145\u0144\3\2\2\2\u0146\33\3\2\2\2\u0147\u0148"+ + "\5T+\2\u0148\u0149\7\f\2\2\u0149\u014a\7\3\2\2\u014a\u014b\5\n\6\2\u014b"+ + "\u014c\7\4\2\2\u014c\35\3\2\2\2\u014d\u014e\t\t\2\2\u014e\37\3\2\2\2\u014f"+ + "\u0154\5,\27\2\u0150\u0152\7\f\2\2\u0151\u0150\3\2\2\2\u0151\u0152\3\2"+ + "\2\2\u0152\u0153\3\2\2\2\u0153\u0155\5T+\2\u0154\u0151\3\2\2\2\u0154\u0155"+ + "\3\2\2\2\u0155!\3\2\2\2\u0156\u015a\5*\26\2\u0157\u0159\5$\23\2\u0158"+ + "\u0157\3\2\2\2\u0159\u015c\3\2\2\2\u015a\u0158\3\2\2\2\u015a\u015b\3\2"+ + "\2\2\u015b#\3\2\2\2\u015c\u015a\3\2\2\2\u015d\u015e\5&\24\2\u015e\u015f"+ + "\7(\2\2\u015f\u0161\5*\26\2\u0160\u0162\5(\25\2\u0161\u0160\3\2\2\2\u0161"+ + "\u0162\3\2\2\2\u0162\u0169\3\2\2\2\u0163\u0164\7.\2\2\u0164\u0165\5&\24"+ + "\2\u0165\u0166\7(\2\2\u0166\u0167\5*\26\2\u0167\u0169\3\2\2\2\u0168\u015d"+ + "\3\2\2\2\u0168\u0163\3\2\2\2\u0169%\3\2\2\2\u016a\u016c\7&\2\2\u016b\u016a"+ + "\3\2\2\2\u016b\u016c\3\2\2\2\u016c\u017a\3\2\2\2\u016d\u016f\7)\2\2\u016e"+ + "\u0170\7\65\2\2\u016f\u016e\3\2\2\2\u016f\u0170\3\2\2\2\u0170\u017a\3"+ + "\2\2\2\u0171\u0173\79\2\2\u0172\u0174\7\65\2\2\u0173\u0172\3\2\2\2\u0173"+ + "\u0174\3\2\2\2\u0174\u017a\3\2\2\2\u0175\u0177\7 \2\2\u0176\u0178\7\65"+ + "\2\2\u0177\u0176\3\2\2\2\u0177\u0178\3\2\2\2\u0178\u017a\3\2\2\2\u0179"+ + "\u016b\3\2\2\2\u0179\u016d\3\2\2\2\u0179\u0171\3\2\2\2\u0179\u0175\3\2"+ + "\2\2\u017a\'\3\2\2\2\u017b\u017c\7\61\2\2\u017c\u018a\5.\30\2\u017d\u017e"+ + "\7F\2\2\u017e\u017f\7\3\2\2\u017f\u0184\5T+\2\u0180\u0181\7\5\2\2\u0181"+ + "\u0183\5T+\2\u0182\u0180\3\2\2\2\u0183\u0186\3\2\2\2\u0184\u0182\3\2\2"+ + "\2\u0184\u0185\3\2\2\2\u0185\u0187\3\2\2\2\u0186\u0184\3\2\2\2\u0187\u0188"+ + "\7\4\2\2\u0188\u018a\3\2\2\2\u0189\u017b\3\2\2\2\u0189\u017d\3\2\2\2\u018a"+ + ")\3\2\2\2\u018b\u0190\5V,\2\u018c\u018e\7\f\2\2\u018d\u018c\3\2\2\2\u018d"+ + "\u018e\3\2\2\2\u018e\u018f\3\2\2\2\u018f\u0191\5R*\2\u0190\u018d\3\2\2"+ + "\2\u0190\u0191\3\2\2\2\u0191\u01a5\3\2\2\2\u0192\u0193\7\3\2\2\u0193\u0194"+ + "\5\n\6\2\u0194\u0199\7\4\2\2\u0195\u0197\7\f\2\2\u0196\u0195\3\2\2\2\u0196"+ + "\u0197\3\2\2\2\u0197\u0198\3\2\2\2\u0198\u019a\5R*\2\u0199\u0196\3\2\2"+ + "\2\u0199\u019a\3\2\2\2\u019a\u01a5\3\2\2\2\u019b\u019c\7\3\2\2\u019c\u019d"+ + "\5\"\22\2\u019d\u01a2\7\4\2\2\u019e\u01a0\7\f\2\2\u019f\u019e\3\2\2\2"+ + "\u019f\u01a0\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1\u01a3\5R*\2\u01a2\u019f"+ + "\3\2\2\2\u01a2\u01a3\3\2\2\2\u01a3\u01a5\3\2\2\2\u01a4\u018b\3\2\2\2\u01a4"+ + "\u0192\3\2\2\2\u01a4\u019b\3\2\2\2\u01a5+\3\2\2\2\u01a6\u01a7\5.\30\2"+ + "\u01a7-\3\2\2\2\u01a8\u01a9\b\30\1\2\u01a9\u01aa\7/\2\2\u01aa\u01da\5"+ + ".\30\n\u01ab\u01ac\7\32\2\2\u01ac\u01ad\7\3\2\2\u01ad\u01ae\5\b\5\2\u01ae"+ + "\u01af\7\4\2\2\u01af\u01da\3\2\2\2\u01b0\u01b1\7;\2\2\u01b1\u01b2\7\3"+ + "\2\2\u01b2\u01b7\5^\60\2\u01b3\u01b4\7\5\2\2\u01b4\u01b6\5^\60\2\u01b5"+ + "\u01b3\3\2\2\2\u01b6\u01b9\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b7\u01b8\3\2"+ + "\2\2\u01b8\u01ba\3\2\2\2\u01b9\u01b7\3\2\2\2\u01ba\u01bb\7\4\2\2\u01bb"+ + "\u01da\3\2\2\2\u01bc\u01bd\7-\2\2\u01bd\u01be\7\3\2\2\u01be\u01bf\5R*"+ + "\2\u01bf\u01c0\7\5\2\2\u01c0\u01c5\5^\60\2\u01c1\u01c2\7\5\2\2\u01c2\u01c4"+ + "\5^\60\2\u01c3\u01c1\3\2\2\2\u01c4\u01c7\3\2\2\2\u01c5\u01c3\3\2\2\2\u01c5"+ + "\u01c6\3\2\2\2\u01c6\u01c8\3\2\2\2\u01c7\u01c5\3\2\2\2\u01c8\u01c9\7\4"+ + "\2\2\u01c9\u01da\3\2\2\2\u01ca\u01cb\7-\2\2\u01cb\u01cc\7\3\2\2\u01cc"+ + "\u01cd\5^\60\2\u01cd\u01ce\7\5\2\2\u01ce\u01d3\5^\60\2\u01cf\u01d0\7\5"+ + "\2\2\u01d0\u01d2\5^\60\2\u01d1\u01cf\3\2\2\2\u01d2\u01d5\3\2\2\2\u01d3"+ + "\u01d1\3\2\2\2\u01d3\u01d4\3\2\2\2\u01d4\u01d6\3\2\2\2\u01d5\u01d3\3\2"+ + "\2\2\u01d6\u01d7\7\4\2\2\u01d7\u01da\3\2\2\2\u01d8\u01da\5\60\31\2\u01d9"+ + "\u01a8\3\2\2\2\u01d9\u01ab\3\2\2\2\u01d9\u01b0\3\2\2\2\u01d9\u01bc\3\2"+ + "\2\2\u01d9\u01ca\3\2\2\2\u01d9\u01d8\3\2\2\2\u01da\u01e3\3\2\2\2\u01db"+ + "\u01dc\f\4\2\2\u01dc\u01dd\7\n\2\2\u01dd\u01e2\5.\30\5\u01de\u01df\f\3"+ + "\2\2\u01df\u01e0\7\63\2\2\u01e0\u01e2\5.\30\4\u01e1\u01db\3\2\2\2\u01e1"+ + "\u01de\3\2\2\2\u01e2\u01e5\3\2\2\2\u01e3\u01e1\3\2\2\2\u01e3\u01e4\3\2"+ + "\2\2\u01e4/\3\2\2\2\u01e5\u01e3\3\2\2\2\u01e6\u01e8\58\35\2\u01e7\u01e9"+ + "\5\62\32\2\u01e8\u01e7\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\61\3\2\2\2\u01ea"+ + "\u01ec\7/\2\2\u01eb\u01ea\3\2\2\2\u01eb\u01ec\3\2\2\2\u01ec\u01ed\3\2"+ + "\2\2\u01ed\u01ee\7\16\2\2\u01ee\u01ef\58\35\2\u01ef\u01f0\7\n\2\2\u01f0"+ + "\u01f1\58\35\2\u01f1\u0219\3\2\2\2\u01f2\u01f4\7/\2\2\u01f3\u01f2\3\2"+ + "\2\2\u01f3\u01f4\3\2\2\2\u01f4\u01f5\3\2\2\2\u01f5\u01f6\7%\2\2\u01f6"+ + "\u01f7\7\3\2\2\u01f7\u01fc\5,\27\2\u01f8\u01f9\7\5\2\2\u01f9\u01fb\5,"+ + "\27\2\u01fa\u01f8\3\2\2\2\u01fb\u01fe\3\2\2\2\u01fc\u01fa\3\2\2\2\u01fc"+ + "\u01fd\3\2\2\2\u01fd\u01ff\3\2\2\2\u01fe\u01fc\3\2\2\2\u01ff\u0200\7\4"+ + "\2\2\u0200\u0219\3\2\2\2\u0201\u0203\7/\2\2\u0202\u0201\3\2\2\2\u0202"+ + "\u0203\3\2\2\2\u0203\u0204\3\2\2\2\u0204\u0205\7%\2\2\u0205\u0206\7\3"+ + "\2\2\u0206\u0207\5\b\5\2\u0207\u0208\7\4\2\2\u0208\u0219\3\2\2\2\u0209"+ + "\u020b\7/\2\2\u020a\u0209\3\2\2\2\u020a\u020b\3\2\2\2\u020b\u020c\3\2"+ + "\2\2\u020c\u020d\7*\2\2\u020d\u0219\5\64\33\2\u020e\u0210\7/\2\2\u020f"+ + "\u020e\3\2\2\2\u020f\u0210\3\2\2\2\u0210\u0211\3\2\2\2\u0211\u0212\7:"+ + "\2\2\u0212\u0219\5^\60\2\u0213\u0215\7\'\2\2\u0214\u0216\7/\2\2\u0215"+ + "\u0214\3\2\2\2\u0215\u0216\3\2\2\2\u0216\u0217\3\2\2\2\u0217\u0219\7\60"+ + "\2\2\u0218\u01eb\3\2\2\2\u0218\u01f3\3\2\2\2\u0218\u0202\3\2\2\2\u0218"+ + "\u020a\3\2\2\2\u0218\u020f\3\2\2\2\u0218\u0213\3\2\2\2\u0219\63\3\2\2"+ + "\2\u021a\u021c\5^\60\2\u021b\u021d\5\66\34\2\u021c\u021b\3\2\2\2\u021c"+ + "\u021d\3\2\2\2\u021d\65\3\2\2\2\u021e\u021f\7\30\2\2\u021f\u0225\5^\60"+ + "\2\u0220\u0221\7J\2\2\u0221\u0222\5^\60\2\u0222\u0223\7Q\2\2\u0223\u0225"+ + "\3\2\2\2\u0224\u021e\3\2\2\2\u0224\u0220\3\2\2\2\u0225\67\3\2\2\2\u0226"+ + "\u0227\b\35\1\2\u0227\u022b\5:\36\2\u0228\u0229\t\n\2\2\u0229\u022b\5"+ + "8\35\6\u022a\u0226\3\2\2\2\u022a\u0228\3\2\2\2\u022b\u0238\3\2\2\2\u022c"+ + "\u022d\f\5\2\2\u022d\u022e\t\13\2\2\u022e\u0237\58\35\6\u022f\u0230\f"+ + "\4\2\2\u0230\u0231\t\n\2\2\u0231\u0237\58\35\5\u0232\u0233\f\3\2\2\u0233"+ + "\u0234\5L\'\2\u0234\u0235\58\35\4\u0235\u0237\3\2\2\2\u0236\u022c\3\2"+ + "\2\2\u0236\u022f\3\2\2\2\u0236\u0232\3\2\2\2\u0237\u023a\3\2\2\2\u0238"+ + "\u0236\3\2\2\2\u0238\u0239\3\2\2\2\u02399\3\2\2\2\u023a\u0238\3\2\2\2"+ + "\u023b\u0251\5<\37\2\u023c\u0251\5@!\2\u023d\u0251\5J&\2\u023e\u0251\7"+ + "Z\2\2\u023f\u0240\5R*\2\u0240\u0241\7^\2\2\u0241\u0243\3\2\2\2\u0242\u023f"+ + "\3\2\2\2\u0242\u0243\3\2\2\2\u0243\u0244\3\2\2\2\u0244\u0251\7Z\2\2\u0245"+ + "\u0251\5D#\2\u0246\u0247\7\3\2\2\u0247\u0248\5\b\5\2\u0248\u0249\7\4\2"+ + "\2\u0249\u0251\3\2\2\2\u024a\u0251\5T+\2\u024b\u0251\5R*\2\u024c\u024d"+ + "\7\3\2\2\u024d\u024e\5,\27\2\u024e\u024f\7\4\2\2\u024f\u0251\3\2\2\2\u0250"+ + "\u023b\3\2\2\2\u0250\u023c\3\2\2\2\u0250\u023d\3\2\2\2\u0250\u023e\3\2"+ + "\2\2\u0250\u0242\3\2\2\2\u0250\u0245\3\2\2\2\u0250\u0246\3\2\2\2\u0250"+ + "\u024a\3\2\2\2\u0250\u024b\3\2\2\2\u0250\u024c\3\2\2\2\u0251;\3\2\2\2"+ + "\u0252\u0258\5> \2\u0253\u0254\7K\2\2\u0254\u0255\5> \2\u0255\u0256\7"+ + "Q\2\2\u0256\u0258\3\2\2\2\u0257\u0252\3\2\2\2\u0257\u0253\3\2\2\2\u0258"+ + "=\3\2\2\2\u0259\u025a\7\20\2\2\u025a\u025b\7\3\2\2\u025b\u025c\5,\27\2"+ + "\u025c\u025d\7\f\2\2\u025d\u025e\5P)\2\u025e\u025f\7\4\2\2\u025f?\3\2"+ + "\2\2\u0260\u0266\5B\"\2\u0261\u0262\7K\2\2\u0262\u0263\5B\"\2\u0263\u0264"+ + "\7Q\2\2\u0264\u0266\3\2\2\2\u0265\u0260\3\2\2\2\u0265\u0261\3\2\2\2\u0266"+ + "A\3\2\2\2\u0267\u0268\7\34\2\2\u0268\u0269\7\3\2\2\u0269\u026a\5T+\2\u026a"+ + "\u026b\7\37\2\2\u026b\u026c\58\35\2\u026c\u026d\7\4\2\2\u026dC\3\2\2\2"+ + "\u026e\u0274\5F$\2\u026f\u0270\7K\2\2\u0270\u0271\5F$\2\u0271\u0272\7"+ + "Q\2\2\u0272\u0274\3\2\2\2\u0273\u026e\3\2\2\2\u0273\u026f\3\2\2\2\u0274"+ + "E\3\2\2\2\u0275\u0276\5H%\2\u0276\u0282\7\3\2\2\u0277\u0279\5\36\20\2"+ + "\u0278\u0277\3\2\2\2\u0278\u0279\3\2\2\2\u0279\u027a\3\2\2\2\u027a\u027f"+ + "\5,\27\2\u027b\u027c\7\5\2\2\u027c\u027e\5,\27\2\u027d\u027b\3\2\2\2\u027e"+ + "\u0281\3\2\2\2\u027f\u027d\3\2\2\2\u027f\u0280\3\2\2\2\u0280\u0283\3\2"+ + "\2\2\u0281\u027f\3\2\2\2\u0282\u0278\3\2\2\2\u0282\u0283\3\2\2\2\u0283"+ + "\u0284\3\2\2\2\u0284\u0285\7\4\2\2\u0285G\3\2\2\2\u0286\u028a\7)\2\2\u0287"+ + "\u028a\79\2\2\u0288\u028a\5T+\2\u0289\u0286\3\2\2\2\u0289\u0287\3\2\2"+ + "\2\u0289\u0288\3\2\2\2\u028aI\3\2\2\2\u028b\u02a5\7\60\2\2\u028c\u02a5"+ + "\5\\/\2\u028d\u02a5\5N(\2\u028e\u0290\7`\2\2\u028f\u028e\3\2\2\2\u0290"+ + "\u0291\3\2\2\2\u0291\u028f\3\2\2\2\u0291\u0292\3\2\2\2\u0292\u02a5\3\2"+ + "\2\2\u0293\u02a5\7_\2\2\u0294\u0295\7M\2\2\u0295\u0296\5^\60\2\u0296\u0297"+ + "\7Q\2\2\u0297\u02a5\3\2\2\2\u0298\u0299\7N\2\2\u0299\u029a\5^\60\2\u029a"+ + "\u029b\7Q\2\2\u029b\u02a5\3\2\2\2\u029c\u029d\7O\2\2\u029d\u029e\5^\60"+ + "\2\u029e\u029f\7Q\2\2\u029f\u02a5\3\2\2\2\u02a0\u02a1\7P\2\2\u02a1\u02a2"+ + "\5^\60\2\u02a2\u02a3\7Q\2\2\u02a3\u02a5\3\2\2\2\u02a4\u028b\3\2\2\2\u02a4"+ + "\u028c\3\2\2\2\u02a4\u028d\3\2\2\2\u02a4\u028f\3\2\2\2\u02a4\u0293\3\2"+ + "\2\2\u02a4\u0294\3\2\2\2\u02a4\u0298\3\2\2\2\u02a4\u029c\3\2\2\2\u02a4"+ + "\u02a0\3\2\2\2\u02a5K\3\2\2\2\u02a6\u02a7\t\f\2\2\u02a7M\3\2\2\2\u02a8"+ + "\u02a9\t\r\2\2\u02a9O\3\2\2\2\u02aa\u02ab\5T+\2\u02abQ\3\2\2\2\u02ac\u02ad"+ + "\5T+\2\u02ad\u02ae\7^\2\2\u02ae\u02b0\3\2\2\2\u02af\u02ac\3\2\2\2\u02b0"+ + "\u02b3\3\2\2\2\u02b1\u02af\3\2\2\2\u02b1\u02b2\3\2\2\2\u02b2\u02b4\3\2"+ + "\2\2\u02b3\u02b1\3\2\2\2\u02b4\u02b5\5T+\2\u02b5S\3\2\2\2\u02b6\u02b9"+ + "\5X-\2\u02b7\u02b9\5Z.\2\u02b8\u02b6\3\2\2\2\u02b8\u02b7\3\2\2\2\u02b9"+ + "U\3\2\2\2\u02ba\u02bb\5T+\2\u02bb\u02bc\7\6\2\2\u02bc\u02be\3\2\2\2\u02bd"+ + "\u02ba\3\2\2\2\u02bd\u02be\3\2\2\2\u02be\u02bf\3\2\2\2\u02bf\u02c7\7e"+ + "\2\2\u02c0\u02c1\5T+\2\u02c1\u02c2\7\6\2\2\u02c2\u02c4\3\2\2\2\u02c3\u02c0"+ + "\3\2\2\2\u02c3\u02c4\3\2\2\2\u02c4\u02c5\3\2\2\2\u02c5\u02c7\5T+\2\u02c6"+ + "\u02bd\3\2\2\2\u02c6\u02c3\3\2\2\2\u02c7W\3\2\2\2\u02c8\u02cb\7f\2\2\u02c9"+ + "\u02cb\7g\2\2\u02ca\u02c8\3\2\2\2\u02ca\u02c9\3\2\2\2\u02cbY\3\2\2\2\u02cc"+ + "\u02d0\7c\2\2\u02cd\u02d0\5`\61\2\u02ce\u02d0\7d\2\2\u02cf\u02cc\3\2\2"+ + "\2\u02cf\u02cd\3\2\2\2\u02cf\u02ce\3\2\2\2\u02d0[\3\2\2\2\u02d1\u02d4"+ + "\7b\2\2\u02d2\u02d4\7a\2\2\u02d3\u02d1\3\2\2\2\u02d3\u02d2\3\2\2\2\u02d4"+ + "]\3\2\2\2\u02d5\u02d6\t\16\2\2\u02d6_\3\2\2\2\u02d7\u02d8\t\17\2\2\u02d8"+ + "a\3\2\2\2fqsw\u0080\u0082\u0086\u008c\u008f\u009a\u009d\u00a7\u00aa\u00ad"+ + "\u00b0\u00b8\u00bb\u00c1\u00c5\u00c8\u00cb\u00ce\u00d5\u00dd\u00e0\u00ec"+ + "\u00ef\u00f2\u00f9\u0100\u0104\u0108\u010f\u0113\u0117\u011c\u0120\u0128"+ + "\u012c\u0133\u013e\u0141\u0145\u0151\u0154\u015a\u0161\u0168\u016b\u016f"+ + "\u0173\u0177\u0179\u0184\u0189\u018d\u0190\u0196\u0199\u019f\u01a2\u01a4"+ + "\u01b7\u01c5\u01d3\u01d9\u01e1\u01e3\u01e8\u01eb\u01f3\u01fc\u0202\u020a"+ + "\u020f\u0215\u0218\u021c\u0224\u022a\u0236\u0238\u0242\u0250\u0257\u0265"+ + "\u0273\u0278\u027f\u0282\u0289\u0291\u02a4\u02b1\u02b8\u02bd\u02c3\u02c6"+ + "\u02ca\u02cf\u02d3"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java index 6745b3fa89b..b2ad5c8f770 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java @@ -445,6 +445,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#functionName}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFunctionName(SqlBaseParser.FunctionNameContext ctx); /** * Visit a parse tree produced by the {@code nullLiteral} * labeled alternative in {@link SqlBaseParser#constant}. diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java index 3702939dd37..ecb5b83896e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.parser; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Neg; import org.elasticsearch.xpack.sql.type.DataType; @@ -15,6 +16,13 @@ public class ExpressionTests extends ESTestCase { private final SqlParser parser = new SqlParser(); + public void testTokenFunctionName() throws Exception { + Expression lt = parser.createExpression("LEFT()"); + assertEquals(UnresolvedFunction.class, lt.getClass()); + UnresolvedFunction uf = (UnresolvedFunction) lt; + assertEquals("LEFT", uf.functionName()); + } + public void testLiteralLong() throws Exception { Expression lt = parser.createExpression(String.valueOf(Long.MAX_VALUE)); assertEquals(Literal.class, lt.getClass()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java index f7f03e5e4b7..de9c6c56da0 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java @@ -23,6 +23,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; public class SqlParserTests extends ESTestCase { + public void testSelectStar() { singleProjection(project(parseStatement("SELECT * FROM foo")), UnresolvedStar.class); } @@ -44,6 +45,11 @@ public class SqlParserTests extends ESTestCase { assertEquals("SCORE", f.functionName()); } + public void testSelectRightFunction() { + UnresolvedFunction f = singleProjection(project(parseStatement("SELECT RIGHT()")), UnresolvedFunction.class); + assertEquals("RIGHT", f.functionName()); + } + public void testOrderByField() { Order.OrderDirection dir = randomFrom(Order.OrderDirection.values()); OrderBy ob = orderBy(parseStatement("SELECT * FROM foo ORDER BY bar" + stringForDirection(dir))); From 14d7e2c7b2d3761a361edd720f98353f856936a4 Mon Sep 17 00:00:00 2001 From: Andy Bristol Date: Mon, 16 Jul 2018 13:57:17 -0700 Subject: [PATCH 046/260] Revert "[test] disable packaging tests for suse boxes" This reverts commit 30d6fd3ca74c44de27f8314fe7c070ac1d23819a. --- .../gradle/vagrant/VagrantTestPlugin.groovy | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index de3c0dfc328..d4d1d857e90 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -526,11 +526,7 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(batsPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - // these tests are temporarily disabled for suse boxes while we debug an issue - // https://github.com/elastic/elasticsearch/issues/30295 - if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { - packagingTest.dependsOn(batsPackagingTest) - } + packagingTest.dependsOn(batsPackagingTest) } } @@ -569,11 +565,7 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(javaPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - // these tests are temporarily disabled for suse boxes while we debug an issue - // https://github.com/elastic/elasticsearch/issues/30295 - if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { - packagingTest.dependsOn(javaPackagingTest) - } + packagingTest.dependsOn(javaPackagingTest) } /* From 791b9b147cb1763678500ca91cc28e7fa171f40d Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Mon, 16 Jul 2018 17:20:50 -0400 Subject: [PATCH 047/260] [Rollup] Add new capabilities endpoint for concrete rollup indices (#30401) This introduces a new GetRollupIndexCaps API which allows the user to retrieve rollup capabilities of a specific rollup index (or index pattern). This is distinct from the existing RollupCaps endpoint. - Multiple jobs can be stored in multiple indices and point to a single target data index pattern (logstash-*). The existing API finds capabilities/config of all jobs matching that data index pattern. - One rollup index can hold data from multiple jobs, targeting multiple data index patterns. This new API finds the capabilities based on the concrete rollup indices. --- .../rollup/rollup-index-caps.asciidoc | 161 ++++++++ .../rollup/action/GetRollupCapsAction.java | 2 +- .../action/GetRollupIndexCapsAction.java | 195 ++++++++++ .../elasticsearch/xpack/rollup/Rollup.java | 33 +- .../action/TransportGetRollupCapsAction.java | 7 +- .../TransportGetRollupIndexCapsAction.java | 79 ++++ .../rest/RestGetRollupIndexCapsAction.java | 38 ++ .../GetRollupCapsActionRequestTests.java | 2 +- .../GetRollupIndexCapsActionRequestTests.java | 177 +++++++++ .../xpack.rollup.get_rollup_index_caps.json | 17 + .../test/rollup/get_rollup_caps.yml | 13 +- .../test/rollup/get_rollup_index_caps.yml | 363 ++++++++++++++++++ 12 files changed, 1066 insertions(+), 21 deletions(-) create mode 100644 x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java create mode 100644 x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java create mode 100644 x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java create mode 100644 x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.rollup.get_rollup_index_caps.json create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml diff --git a/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc b/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc new file mode 100644 index 00000000000..4636d9775e9 --- /dev/null +++ b/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc @@ -0,0 +1,161 @@ +[role="xpack"] +[[rollup-get-rollup-index-caps]] +=== Get Rollup Index Capabilities +++++ +Get Rollup Index Caps +++++ + +experimental[] + +This API returns the rollup capabilities of all jobs inside of a rollup index (e.g. the index where rollup data is stored). +A single rollup index may store the data for multiple rollup jobs, and may have a variety of capabilities depending on those jobs. + +This API will allow you to determine: + +1. What jobs are stored in an index (or indices specified via a pattern)? +2. What target indices were rolled up, what fields were used in those rollups and what aggregations can be performed on each job? + +==== Request + +`GET {index}/_xpack/rollup/data` + +//===== Description + +==== Path Parameters + +`index`:: + (string) Index or index-pattern of concrete rollup indices to check for capabilities. + + + +==== Request Body + +There is no request body for the Get Jobs API. + +==== Authorization + +You must have `monitor`, `monitor_rollup`, `manage` or `manage_rollup` cluster privileges to use this API. +For more information, see +{xpack-ref}/security-privileges.html[Security Privileges]. + +==== Examples + +Imagine we have an index named `sensor-1` full of raw data. We know that the data will grow over time, so there +will be a `sensor-2`, `sensor-3`, etc. Let's create a Rollup job, which stores it's data in `sensor_rollup`: + +[source,js] +-------------------------------------------------- +PUT _xpack/rollup/job/sensor +{ + "index_pattern": "sensor-*", + "rollup_index": "sensor_rollup", + "cron": "*/30 * * * * ?", + "page_size" :1000, + "groups" : { + "date_histogram": { + "field": "timestamp", + "interval": "1h", + "delay": "7d" + }, + "terms": { + "fields": ["node"] + } + }, + "metrics": [ + { + "field": "temperature", + "metrics": ["min", "max", "sum"] + }, + { + "field": "voltage", + "metrics": ["avg"] + } + ] +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sensor_index] + +If at a later date, we'd like to determine what jobs and capabilities were stored in the `sensor_rollup` index, we can use the Get Rollup +Index API: + +[source,js] +-------------------------------------------------- +GET /sensor_rollup/_xpack/rollup/data +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +Note how we are requesting the concrete rollup index name (`sensor_rollup`) as the first part of the URL. +This will yield the following response: + +[source,js] +---- +{ + "sensor_rollup" : { + "rollup_jobs" : [ + { + "job_id" : "sensor", + "rollup_index" : "sensor_rollup", + "index_pattern" : "sensor-*", + "fields" : { + "node" : [ + { + "agg" : "terms" + } + ], + "temperature" : [ + { + "agg" : "min" + }, + { + "agg" : "max" + }, + { + "agg" : "sum" + } + ], + "timestamp" : [ + { + "agg" : "date_histogram", + "time_zone" : "UTC", + "interval" : "1h", + "delay": "7d" + } + ], + "voltage" : [ + { + "agg" : "avg" + } + ] + } + } + ] + } +} +---- +// TESTRESPONSE + + +The response that is returned contains information that is similar to the original Rollup configuration, but formatted +differently. First, there are some house-keeping details: the Rollup job's ID, the index that holds the rolled data, +the index pattern that the job was targeting. + +Next it shows a list of fields that contain data eligible for rollup searches. Here we see four fields: `node`, `temperature`, +`timestamp` and `voltage`. Each of these fields list the aggregations that are possible. For example, you can use a min, max +or sum aggregation on the `temperature` field, but only a `date_histogram` on `timestamp`. + +Note that the `rollup_jobs` element is an array; there can be multiple, independent jobs configured for a single index +or index pattern. Each of these jobs may have different configurations, so the API returns a list of all the various +configurations available. + + +Like other APIs that interact with indices, you can specify index patterns instead of explicit indices: + +[source,js] +-------------------------------------------------- +GET /*_rollup/_xpack/rollup/data +-------------------------------------------------- +// CONSOLE +// TEST[continued] + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java index ea98c2f4628..128874a6c8c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java @@ -139,7 +139,7 @@ public class GetRollupCapsAction extends Action { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); for (Map.Entry entry : jobs.entrySet()) { - entry.getValue().toXContent(builder, params); + entry.getValue().toXContent(builder, params); } builder.endObject(); return builder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java new file mode 100644 index 00000000000..4f95919c498 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.rollup.action; + + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.rollup.RollupField; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; + +public class GetRollupIndexCapsAction extends Action { + + public static final GetRollupIndexCapsAction INSTANCE = new GetRollupIndexCapsAction(); + public static final String NAME = "indices:data/read/xpack/rollup/get/index/caps"; + public static final ParseField CONFIG = new ParseField("config"); + public static final ParseField STATUS = new ParseField("status"); + private static final ParseField INDICES_OPTIONS = new ParseField("indices_options"); + + private GetRollupIndexCapsAction() { + super(NAME); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends ActionRequest implements IndicesRequest.Replaceable, ToXContent { + private String[] indices; + private IndicesOptions options; + + public Request(String[] indices) { + this(indices, IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED); + } + + public Request(String[] indices, IndicesOptions options) { + this.indices = indices; + this.options = options; + } + + public Request() {} + + @Override + public IndicesOptions indicesOptions() { + return options; + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesRequest indices(String... indices) { + Objects.requireNonNull(indices, "indices must not be null"); + for (String index : indices) { + Objects.requireNonNull(index, "index must not be null"); + } + this.indices = indices; + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.indices = in.readStringArray(); + this.options = IndicesOptions.readIndicesOptions(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + options.writeIndicesOptions(out); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.array(RollupField.ID.getPreferredName(), indices); + builder.field(INDICES_OPTIONS.getPreferredName(), options); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(indices), options); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Arrays.equals(indices, other.indices) + && Objects.equals(options, other.options); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + protected RequestBuilder(ElasticsearchClient client, GetRollupIndexCapsAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends ActionResponse implements Writeable, ToXContentObject { + + private Map jobs = Collections.emptyMap(); + + public Response() { + + } + + public Response(Map jobs) { + this.jobs = Objects.requireNonNull(jobs); + } + + Response(StreamInput in) throws IOException { + jobs = in.readMap(StreamInput::readString, RollableIndexCaps::new); + } + + public Map getJobs() { + return jobs; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(jobs, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + for (Map.Entry entry : jobs.entrySet()) { + entry.getValue().toXContent(builder, params); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(jobs); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(jobs, other.jobs); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + } +} diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index cc24a0b4ab9..546103df5dd 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.GetRollupCapsAction; +import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction; import org.elasticsearch.xpack.core.rollup.action.PutRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction; @@ -47,6 +48,7 @@ import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.template.TemplateUtils; import org.elasticsearch.xpack.rollup.action.TransportDeleteRollupJobAction; import org.elasticsearch.xpack.rollup.action.TransportGetRollupCapsAction; +import org.elasticsearch.xpack.rollup.action.TransportGetRollupIndexCapsAction; import org.elasticsearch.xpack.rollup.action.TransportGetRollupJobAction; import org.elasticsearch.xpack.rollup.action.TransportPutRollupJobAction; import org.elasticsearch.xpack.rollup.action.TransportRollupSearchAction; @@ -55,6 +57,7 @@ import org.elasticsearch.xpack.rollup.action.TransportStopRollupAction; import org.elasticsearch.xpack.rollup.job.RollupJobTask; import org.elasticsearch.xpack.rollup.rest.RestDeleteRollupJobAction; import org.elasticsearch.xpack.rollup.rest.RestGetRollupCapsAction; +import org.elasticsearch.xpack.rollup.rest.RestGetRollupIndexCapsAction; import org.elasticsearch.xpack.rollup.rest.RestGetRollupJobsAction; import org.elasticsearch.xpack.rollup.rest.RestPutRollupJobAction; import org.elasticsearch.xpack.rollup.rest.RestRollupSearchAction; @@ -136,13 +139,14 @@ public class Rollup extends Plugin implements ActionPlugin, PersistentTaskPlugin } return Arrays.asList( - new RestRollupSearchAction(settings, restController), - new RestPutRollupJobAction(settings, restController), - new RestStartRollupJobAction(settings, restController), - new RestStopRollupJobAction(settings, restController), - new RestDeleteRollupJobAction(settings, restController), - new RestGetRollupJobsAction(settings, restController), - new RestGetRollupCapsAction(settings, restController) + new RestRollupSearchAction(settings, restController), + new RestPutRollupJobAction(settings, restController), + new RestStartRollupJobAction(settings, restController), + new RestStopRollupJobAction(settings, restController), + new RestDeleteRollupJobAction(settings, restController), + new RestGetRollupJobsAction(settings, restController), + new RestGetRollupCapsAction(settings, restController), + new RestGetRollupIndexCapsAction(settings, restController) ); } @@ -153,13 +157,14 @@ public class Rollup extends Plugin implements ActionPlugin, PersistentTaskPlugin return emptyList(); } return Arrays.asList( - new ActionHandler<>(RollupSearchAction.INSTANCE, TransportRollupSearchAction.class), - new ActionHandler<>(PutRollupJobAction.INSTANCE, TransportPutRollupJobAction.class), - new ActionHandler<>(StartRollupJobAction.INSTANCE, TransportStartRollupAction.class), - new ActionHandler<>(StopRollupJobAction.INSTANCE, TransportStopRollupAction.class), - new ActionHandler<>(DeleteRollupJobAction.INSTANCE, TransportDeleteRollupJobAction.class), - new ActionHandler<>(GetRollupJobsAction.INSTANCE, TransportGetRollupJobAction.class), - new ActionHandler<>(GetRollupCapsAction.INSTANCE, TransportGetRollupCapsAction.class) + new ActionHandler<>(RollupSearchAction.INSTANCE, TransportRollupSearchAction.class), + new ActionHandler<>(PutRollupJobAction.INSTANCE, TransportPutRollupJobAction.class), + new ActionHandler<>(StartRollupJobAction.INSTANCE, TransportStartRollupAction.class), + new ActionHandler<>(StopRollupJobAction.INSTANCE, TransportStopRollupAction.class), + new ActionHandler<>(DeleteRollupJobAction.INSTANCE, TransportDeleteRollupJobAction.class), + new ActionHandler<>(GetRollupJobsAction.INSTANCE, TransportGetRollupJobAction.class), + new ActionHandler<>(GetRollupCapsAction.INSTANCE, TransportGetRollupCapsAction.class), + new ActionHandler<>(GetRollupIndexCapsAction.INSTANCE, TransportGetRollupIndexCapsAction.class) ); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java index 5f013e8897b..6d565e43b86 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java @@ -44,7 +44,6 @@ public class TransportGetRollupCapsAction extends HandledTransportAction listener) { - Map allCaps = getCaps(request.getIndexPattern(), clusterService.state().getMetaData().indices()); listener.onResponse(new GetRollupCapsAction.Response(allCaps)); } @@ -67,7 +66,7 @@ public class TransportGetRollupCapsAction extends HandledTransportAction { String pattern = indexPattern.equals(MetaData.ALL) - ? jobCap.getIndexPattern() : indexPattern; + ? jobCap.getIndexPattern() : indexPattern; // Do we already have an entry for this index pattern? RollableIndexCaps indexCaps = allCaps.get(pattern); @@ -98,11 +97,11 @@ public class TransportGetRollupCapsAction extends HandledTransportAction { + + private final ClusterService clusterService; + + @Inject + public TransportGetRollupIndexCapsAction(Settings settings, TransportService transportService, + ClusterService clusterService, ActionFilters actionFilters) { + super(settings, GetRollupIndexCapsAction.NAME, transportService, actionFilters, + (Supplier) GetRollupIndexCapsAction.Request::new); + this.clusterService = clusterService; + } + + @Override + protected void doExecute(Task task, GetRollupIndexCapsAction.Request request, + ActionListener listener) { + + IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(clusterService.getSettings()); + String[] indices = resolver.concreteIndexNames(clusterService.state(), + request.indicesOptions(), request.indices()); + Map allCaps = getCapsByRollupIndex(Arrays.asList(indices), + clusterService.state().getMetaData().indices()); + listener.onResponse(new GetRollupIndexCapsAction.Response(allCaps)); + } + + static Map getCapsByRollupIndex(List resolvedIndexNames, + ImmutableOpenMap indices) { + Map allCaps = new TreeMap<>(); + + StreamSupport.stream(indices.spliterator(), false) + .filter(entry -> resolvedIndexNames.contains(entry.key)) + .forEach(entry -> { + // Does this index have rollup metadata? + TransportGetRollupCapsAction.findRollupIndexCaps(entry.key, entry.value) + .ifPresent(cap -> { + cap.getJobCaps().forEach(jobCap -> { + // Do we already have an entry for this index? + RollableIndexCaps indexCaps = allCaps.get(jobCap.getRollupIndex()); + if (indexCaps == null) { + indexCaps = new RollableIndexCaps(jobCap.getRollupIndex()); + } + indexCaps.addJobCap(jobCap); + allCaps.put(jobCap.getRollupIndex(), indexCaps); + }); + }); + }); + + return allCaps; + } + +} diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java new file mode 100644 index 00000000000..4f4336f11ab --- /dev/null +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.rollup.rest; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; + +public class RestGetRollupIndexCapsAction extends BaseRestHandler { + public static final ParseField INDEX = new ParseField("index"); + + public RestGetRollupIndexCapsAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.GET, "/{index}/_xpack/rollup/data", this); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + String index = restRequest.param(INDEX.getPreferredName()); + IndicesOptions options = IndicesOptions.fromRequest(restRequest, IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED); + GetRollupIndexCapsAction.Request request = new GetRollupIndexCapsAction.Request(new String[]{index}, options); + return channel -> client.execute(GetRollupIndexCapsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "rollup_get_caps_action"; + } +} diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java index e63650397ab..e3a45dbd66b 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java @@ -11,11 +11,11 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.GetRollupCapsAction; import org.elasticsearch.xpack.core.rollup.action.RollableIndexCaps; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.mockito.Mockito; import java.io.IOException; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java new file mode 100644 index 00000000000..2066d664996 --- /dev/null +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java @@ -0,0 +1,177 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.rollup.action; + + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; +import org.elasticsearch.xpack.core.rollup.RollupField; +import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; +import org.elasticsearch.xpack.core.rollup.action.RollableIndexCaps; +import org.mockito.Mockito; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.rollup.action.TransportGetRollupIndexCapsAction.getCapsByRollupIndex; +import static org.hamcrest.Matchers.equalTo; + + +public class GetRollupIndexCapsActionRequestTests extends AbstractStreamableTestCase { + + @Override + protected GetRollupIndexCapsAction.Request createTestInstance() { + if (randomBoolean()) { + return new GetRollupIndexCapsAction.Request(new String[]{MetaData.ALL}); + } + return new GetRollupIndexCapsAction.Request(new String[]{randomAlphaOfLengthBetween(1, 20)}); + } + + @Override + protected GetRollupIndexCapsAction.Request createBlankInstance() { + return new GetRollupIndexCapsAction.Request(); + } + + + public void testNoIndicesByRollup() { + ImmutableOpenMap indices = new ImmutableOpenMap.Builder().build(); + Map caps = getCapsByRollupIndex(Collections.singletonList("foo"), indices); + assertThat(caps.size(), equalTo(0)); + } + + public void testAllIndicesByRollupSingleRollup() throws IOException { + int num = randomIntBetween(1,5); + ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); + int indexCounter = 0; + for (int j = 0; j < 5; j++) { + + Map jobs = new HashMap<>(num); + for (int i = 0; i < num; i++) { + String jobName = randomAlphaOfLength(10); + String indexName = Integer.toString(indexCounter); + indexCounter += 1; + jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName).setRollupIndex("foo").build()); + } + + MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, + Collections.singletonMap("_meta", + Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(1); + mappings.put(RollupField.TYPE_NAME, mappingMeta); + IndexMetaData meta = Mockito.mock(IndexMetaData.class); + Mockito.when(meta.getMappings()).thenReturn(mappings.build()); + indices.put("foo", meta); + } + + Map caps = getCapsByRollupIndex(Collections.singletonList("foo"), + indices.build()); + assertThat(caps.size(), equalTo(1)); + } + + public void testAllIndicesByRollupManyRollup() throws IOException { + ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); + int indexCounter = 0; + for (int j = 0; j < 5; j++) { + + Map jobs = new HashMap<>(1); + String jobName = randomAlphaOfLength(10); + String indexName = Integer.toString(indexCounter); + indexCounter += 1; + jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName) + .setIndexPattern(indexName) + .setRollupIndex("rollup_" + indexName).build()); + + + MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, + Collections.singletonMap("_meta", + Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(1); + mappings.put(RollupField.TYPE_NAME, mappingMeta); + IndexMetaData meta = Mockito.mock(IndexMetaData.class); + Mockito.when(meta.getMappings()).thenReturn(mappings.build()); + indices.put("rollup_" + indexName, meta); + } + + Map caps = getCapsByRollupIndex(Arrays.asList(indices.keys().toArray(String.class)), indices.build()); + assertThat(caps.size(), equalTo(5)); + } + + + public void testOneIndexByRollupManyRollup() throws IOException { + ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); + int indexCounter = 0; + for (int j = 0; j < 5; j++) { + + Map jobs = new HashMap<>(1); + String jobName = randomAlphaOfLength(10); + String indexName = Integer.toString(indexCounter); + indexCounter += 1; + jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName) + .setIndexPattern("foo_" + indexName) + .setRollupIndex("rollup_" + indexName).build()); + + MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, + Collections.singletonMap("_meta", + Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(1); + mappings.put(RollupField.TYPE_NAME, mappingMeta); + IndexMetaData meta = Mockito.mock(IndexMetaData.class); + Mockito.when(meta.getMappings()).thenReturn(mappings.build()); + indices.put("rollup_" + indexName, meta); + } + + Map caps = getCapsByRollupIndex(Collections.singletonList("rollup_1"), indices.build()); + assertThat(caps.size(), equalTo(1)); + assertThat(caps.get("rollup_1").getIndexName(), equalTo("rollup_1")); + assertThat(caps.get("rollup_1").getJobCaps().size(), equalTo(1)); + } + + public void testOneIndexByRollupOneRollup() throws IOException { + ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); + int indexCounter = 0; + for (int j = 0; j < 5; j++) { + + Map jobs = new HashMap<>(1); + String jobName = randomAlphaOfLength(10); + String indexName = Integer.toString(indexCounter); + indexCounter += 1; + jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName) + .setIndexPattern("foo_" + indexName) + .setRollupIndex("rollup_foo").build()); + + MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, + Collections.singletonMap("_meta", + Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(1); + mappings.put(RollupField.TYPE_NAME, mappingMeta); + IndexMetaData meta = Mockito.mock(IndexMetaData.class); + Mockito.when(meta.getMappings()).thenReturn(mappings.build()); + indices.put("rollup_foo", meta); + } + + Map caps = getCapsByRollupIndex(Collections.singletonList("rollup_foo"), indices.build()); + assertThat(caps.size(), equalTo(1)); + assertThat(caps.get("rollup_foo").getIndexName(), equalTo("rollup_foo")); + assertThat(caps.get("rollup_foo").getJobCaps().size(), equalTo(1)); + } +} + + diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.rollup.get_rollup_index_caps.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.rollup.get_rollup_index_caps.json new file mode 100644 index 00000000000..458311417d4 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.rollup.get_rollup_index_caps.json @@ -0,0 +1,17 @@ +{ + "xpack.rollup.get_rollup_index_caps": { + "documentation": "", + "methods": [ "GET" ], + "url": { + "path": "/{index}/_xpack/rollup/data", + "paths": [ "/{index}/_xpack/rollup/data" ], + "parts": { + "index": { + "type": "string", + "required": true, + "description": "The rollup index or index pattern to obtain rollup capabilities from." + } + } + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml index 050e49bc4b4..f8bb401a772 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml @@ -22,6 +22,18 @@ setup: type: date value_field: type: integer + + - do: + indices.create: + index: foo3 + include_type_name: false + body: + mappings: + properties: + the_field: + type: date + value_field: + type: integer - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -228,4 +240,3 @@ setup: - agg: "min" - agg: "max" - agg: "sum" - diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml new file mode 100644 index 00000000000..7a539edcc67 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml @@ -0,0 +1,363 @@ +setup: + - do: + indices.create: + index: foo + include_type_name: false + body: + mappings: + properties: + the_field: + type: date + value_field: + type: integer + + - do: + indices.create: + index: foo2 + include_type_name: false + body: + mappings: + properties: + the_field: + type: date + value_field: + type: integer + + - do: + indices.create: + index: foo3 + include_type_name: false + body: + mappings: + properties: + the_field: + type: date + value_field: + type: integer + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo + body: > + { + "index_pattern": "foo", + "rollup_index": "foo_rollup", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + +--- +"Verify one job caps by rollup index": + + - do: + xpack.rollup.get_rollup_index_caps: + index: "foo_rollup" + + - match: + foo_rollup: + rollup_jobs: + - job_id: "foo" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + +--- +"Verify two job caps by rollup index": + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo2 + body: > + { + "index_pattern": "foo", + "rollup_index": "foo_rollup", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + - do: + xpack.rollup.get_rollup_index_caps: + index: "foo_rollup" + + - match: + foo_rollup: + rollup_jobs: + - job_id: "foo" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + - job_id: "foo2" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + + +--- +"Verify two different job caps by rollup index": + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo2 + body: > + { + "index_pattern": "foo2", + "rollup_index": "foo_rollup2", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + - do: + xpack.rollup.get_rollup_index_caps: + index: "foo_rollup" + + - match: + foo_rollup: + rollup_jobs: + - job_id: "foo" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + +--- +"Verify all job caps by rollup index": + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo2 + body: > + { + "index_pattern": "foo2", + "rollup_index": "foo_rollup", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo3 + body: > + { + "index_pattern": "foo3", + "rollup_index": "foo_rollup2", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + + - do: + xpack.rollup.get_rollup_index_caps: + index: "_all" + + - match: + $body: + foo_rollup: + rollup_jobs: + - job_id: "foo" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + - job_id: "foo2" + rollup_index: "foo_rollup" + index_pattern: "foo2" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + foo_rollup2: + rollup_jobs: + - job_id: "foo3" + rollup_index: "foo_rollup2" + index_pattern: "foo3" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + +--- +"Verify index pattern": + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo2 + body: > + { + "index_pattern": "foo2", + "rollup_index": "foo_rollup", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo3 + body: > + { + "index_pattern": "foo3", + "rollup_index": "foo_rollup2", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + + - do: + xpack.rollup.get_rollup_index_caps: + index: "*_rollup2" + + - match: + $body: + foo_rollup2: + rollup_jobs: + - job_id: "foo3" + rollup_index: "foo_rollup2" + index_pattern: "foo3" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + From d596447f3dc186855af1e32dc06a75d06feea89c Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 16 Jul 2018 17:44:19 -0400 Subject: [PATCH 048/260] Switch non-x-pack to new style requests (#32106) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes most of the calls not in X-Pack to their new versions. --- .../rest/Netty4BadRequestIT.java | 19 +++++++----- .../rest/Netty4HeadBodyIsEmptyIT.java | 30 ++++++++++++------- .../hdfs/HaHdfsFailoverTestSuiteIT.java | 20 +++++-------- .../VerifyVersionConstantsIT.java | 3 +- 4 files changed, 39 insertions(+), 33 deletions(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java index bc89558d3c6..17a62b3a440 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java @@ -19,7 +19,8 @@ package org.elasticsearch.rest; -import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Setting; @@ -43,7 +44,7 @@ import static org.hamcrest.Matchers.hasToString; public class Netty4BadRequestIT extends ESRestTestCase { public void testBadRequest() throws IOException { - final Response response = client().performRequest("GET", "/_nodes/settings", Collections.emptyMap()); + final Response response = client().performRequest(new Request("GET", "/_nodes/settings")); final ObjectPath objectPath = ObjectPath.createFromResponse(response); final Map map = objectPath.evaluate("nodes"); int maxMaxInitialLineLength = Integer.MIN_VALUE; @@ -77,9 +78,9 @@ public class Netty4BadRequestIT extends ESRestTestCase { } public void testInvalidParameterValue() throws IOException { - final ResponseException e = expectThrows( - ResponseException.class, - () -> client().performRequest("GET", "/_cluster/settings", Collections.singletonMap("pretty", "neither-true-nor-false"))); + final Request request = new Request("GET", "/_cluster/settings"); + request.addParameter("pretty", "neither-true-nor-false"); + final ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); final Response response = e.getResponse(); assertThat(response.getStatusLine().getStatusCode(), equalTo(400)); final ObjectPath objectPath = ObjectPath.createFromResponse(response); @@ -89,9 +90,11 @@ public class Netty4BadRequestIT extends ESRestTestCase { } public void testInvalidHeaderValue() throws IOException { - final BasicHeader header = new BasicHeader("Content-Type", "\t"); - final ResponseException e = - expectThrows(ResponseException.class, () -> client().performRequest("GET", "/_cluster/settings", header)); + final Request request = new Request("GET", "/_cluster/settings"); + final RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Content-Type", "\t"); + request.setOptions(options); + final ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); final Response response = e.getResponse(); assertThat(response.getStatusLine().getStatusCode(), equalTo(400)); final ObjectPath objectPath = ObjectPath.createFromResponse(response); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java index 20b18ebdadd..17b374ecb37 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java @@ -19,8 +19,7 @@ package org.elasticsearch.rest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -57,8 +56,9 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase { builder.field("test", "test"); } builder.endObject(); - client().performRequest("PUT", "/" + indexName + "/" + typeName + "/" + "1", emptyMap(), - new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/" + indexName + "/" + typeName + "/" + "1"); + request.setJsonEntity(Strings.toString(builder)); + client().performRequest(request); } } @@ -109,8 +109,9 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase { } builder.endObject(); - client().performRequest("POST", "_aliases", emptyMap(), new StringEntity(Strings.toString(builder), - ContentType.APPLICATION_JSON)); + Request request = new Request("POST", "/_aliases"); + request.setJsonEntity(Strings.toString(builder)); + client().performRequest(request); headTestCase("/_alias/test_alias", emptyMap(), greaterThan(0)); headTestCase("/test/_alias/test_alias", emptyMap(), greaterThan(0)); } @@ -135,8 +136,9 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase { } builder.endObject(); - client().performRequest("PUT", "/_template/template", emptyMap(), - new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/_template/template"); + request.setJsonEntity(Strings.toString(builder)); + client().performRequest(request); headTestCase("/_template/template", emptyMap(), greaterThan(0)); } } @@ -164,8 +166,10 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase { builder.endObject(); } builder.endObject(); - client().performRequest("PUT", "/test-no-source", emptyMap(), new StringEntity(Strings.toString(builder), - ContentType.APPLICATION_JSON)); + + Request request = new Request("PUT", "/test-no-source"); + request.setJsonEntity(Strings.toString(builder)); + client().performRequest(request); createTestDoc("test-no-source", "test-no-source"); headTestCase("/test-no-source/test-no-source/1/_source", emptyMap(), NOT_FOUND.getStatus(), equalTo(0)); } @@ -190,7 +194,11 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase { final Map params, final int expectedStatusCode, final Matcher matcher) throws IOException { - Response response = client().performRequest("HEAD", url, params); + Request request = new Request("HEAD", url); + for (Map.Entry param : params.entrySet()) { + request.addParameter(param.getKey(), param.getValue()); + } + Response response = client().performRequest(request); assertEquals(expectedStatusCode, response.getStatusLine().getStatusCode()); assertThat(Integer.valueOf(response.getHeader("Content-Length")), matcher); assertNull("HEAD requests shouldn't have a response body but " + url + " did", response.getEntity()); diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java index ce4fe9b6d3f..0248576b573 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java @@ -21,16 +21,13 @@ package org.elasticsearch.repositories.hdfs; import java.io.IOException; import java.net.InetSocketAddress; -import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; -import java.util.Collections; import java.util.List; -import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ha.BadFencingConfigurationException; @@ -42,9 +39,7 @@ import org.apache.hadoop.ha.protocolPB.HAServiceProtocolClientSideTranslatorPB; import org.apache.hadoop.hdfs.tools.DFSHAAdmin; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.http.Header; -import org.apache.http.message.BasicHeader; -import org.apache.http.nio.entity.NStringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.io.PathUtils; @@ -58,8 +53,6 @@ public class HaHdfsFailoverTestSuiteIT extends ESRestTestCase { public void testHAFailoverWithRepository() throws Exception { RestClient client = client(); - Map emptyParams = Collections.emptyMap(); - Header contentHeader = new BasicHeader("Content-Type", "application/json"); String esKerberosPrincipal = System.getProperty("test.krb5.principal.es"); String hdfsKerberosPrincipal = System.getProperty("test.krb5.principal.hdfs"); @@ -106,7 +99,8 @@ public class HaHdfsFailoverTestSuiteIT extends ESRestTestCase { // Create repository { - Response response = client.performRequest("PUT", "/_snapshot/hdfs_ha_repo_read", emptyParams, new NStringEntity( + Request request = new Request("PUT", "/_snapshot/hdfs_ha_repo_read"); + request.setJsonEntity( "{" + "\"type\":\"hdfs\"," + "\"settings\":{" + @@ -121,15 +115,15 @@ public class HaHdfsFailoverTestSuiteIT extends ESRestTestCase { "\"conf.dfs.client.failover.proxy.provider.ha-hdfs\": " + "\"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider\"" + "}" + - "}", - Charset.defaultCharset()), contentHeader); + "}"); + Response response = client.performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } // Get repository { - Response response = client.performRequest("GET", "/_snapshot/hdfs_ha_repo_read/_all", emptyParams); + Response response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @@ -138,7 +132,7 @@ public class HaHdfsFailoverTestSuiteIT extends ESRestTestCase { // Get repository again { - Response response = client.performRequest("GET", "/_snapshot/hdfs_ha_repo_read/_all", emptyParams); + Response response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } } diff --git a/qa/verify-version-constants/src/test/java/org/elasticsearch/qa/verify_version_constants/VerifyVersionConstantsIT.java b/qa/verify-version-constants/src/test/java/org/elasticsearch/qa/verify_version_constants/VerifyVersionConstantsIT.java index a26237962bf..71fc9d09fe3 100644 --- a/qa/verify-version-constants/src/test/java/org/elasticsearch/qa/verify_version_constants/VerifyVersionConstantsIT.java +++ b/qa/verify-version-constants/src/test/java/org/elasticsearch/qa/verify_version_constants/VerifyVersionConstantsIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.qa.verify_version_constants; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ObjectPath; @@ -32,7 +33,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class VerifyVersionConstantsIT extends ESRestTestCase { public void testLuceneVersionConstant() throws IOException, ParseException { - final Response response = client().performRequest("GET", "/"); + final Response response = client().performRequest(new Request("GET", "/")); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); final ObjectPath objectPath = ObjectPath.createFromResponse(response); final String elasticsearchVersionString = objectPath.evaluate("version.number").toString(); From f699cb9f5565bfe6825ae196969a45b4e6365d31 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 17 Jul 2018 00:26:01 +0200 Subject: [PATCH 049/260] Bypass highlight query terms extraction on empty fields (#32090) Dealing with empty fields in the highlight phase can slow down the query because the query terms extraction is done independently on each field. This change shortcuts the highlighting performed by the unified highlighter for fields that are not present in the document. In such cases there is nothing to higlight so we don't need to visit the query to build the highligh builder. --- .../search/fetch/subphase/highlight/UnifiedHighlighter.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index 7f209ed0586..2c9d482cab0 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -76,6 +76,9 @@ public class UnifiedHighlighter implements Highlighter { fieldValues = fieldValues.stream() .map((s) -> convertFieldValue(fieldType, s)) .collect(Collectors.toList()); + if (fieldValues.size() == 0) { + return null; + } final IndexSearcher searcher = new IndexSearcher(hitContext.reader()); final CustomUnifiedHighlighter highlighter; final String fieldValue = mergeFieldValues(fieldValues, MULTIVAL_SEP_CHAR); From 15740d622971551e6595017be21dc6dc58a6ea84 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Mon, 16 Jul 2018 16:13:48 -0700 Subject: [PATCH 050/260] Painless: Move and Rename Several Methods in the lookup package (#32105) --- .../painless/AnalyzerCaster.java | 7 +- .../java/org/elasticsearch/painless/Def.java | 9 +- .../elasticsearch/painless/FunctionRef.java | 3 +- .../org/elasticsearch/painless/Locals.java | 5 +- .../painless/ScriptClassInfo.java | 3 +- .../painless/antlr/EnhancedPainlessLexer.java | 2 +- .../painless/lookup/PainlessLookup.java | 208 +------------ .../lookup/PainlessLookupBuilder.java | 69 +---- .../lookup/PainlessLookupUtility.java | 284 ++++++++++++++++++ .../painless/lookup/PainlessMethod.java | 14 +- .../painless/node/AExpression.java | 6 +- .../painless/node/EAssignment.java | 4 +- .../elasticsearch/painless/node/EBinary.java | 37 ++- .../painless/node/ECallLocal.java | 4 +- .../painless/node/ECapturingFunctionRef.java | 11 +- .../elasticsearch/painless/node/ECast.java | 6 +- .../elasticsearch/painless/node/EComp.java | 28 +- .../painless/node/EFunctionRef.java | 10 +- .../painless/node/EInstanceof.java | 7 +- .../elasticsearch/painless/node/ELambda.java | 20 +- .../painless/node/EListInit.java | 6 +- .../elasticsearch/painless/node/EMapInit.java | 6 +- .../elasticsearch/painless/node/ENewObj.java | 4 +- .../elasticsearch/painless/node/ENull.java | 4 +- .../elasticsearch/painless/node/EUnary.java | 15 +- .../elasticsearch/painless/node/PBrace.java | 8 +- .../painless/node/PCallInvoke.java | 12 +- .../elasticsearch/painless/node/PField.java | 16 +- .../painless/node/PSubCallInvoke.java | 2 +- .../painless/node/PSubDefArray.java | 2 +- .../painless/node/PSubDefCall.java | 2 +- .../painless/node/PSubDefField.java | 2 +- .../painless/node/PSubField.java | 8 +- .../painless/node/PSubListShortcut.java | 4 +- .../painless/node/PSubMapShortcut.java | 4 +- .../painless/node/PSubShortcut.java | 2 +- .../elasticsearch/painless/node/SEach.java | 8 +- .../painless/node/SFunction.java | 11 +- .../painless/node/SSubEachArray.java | 6 +- .../painless/node/SSubEachIterable.java | 16 +- 40 files changed, 468 insertions(+), 407 deletions(-) create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index 457ec82a5e4..6cfc7ff6ebf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -19,8 +19,8 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.def; import java.util.Objects; @@ -465,8 +465,9 @@ public final class AnalyzerCaster { (actual.isAssignableFrom(expected) && explicit)) { return PainlessCast.standard(actual, expected, explicit); } else { - throw location.createError(new ClassCastException( - "Cannot cast from [" + PainlessLookup.ClassToName(actual) + "] to [" + PainlessLookup.ClassToName(expected) + "].")); + throw location.createError(new ClassCastException("Cannot cast from " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(actual) + "] to " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index f006b57f46b..78db712d183 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -19,9 +19,10 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.lang.invoke.CallSite; @@ -302,7 +303,7 @@ public final class Def { nestedType, 0, DefBootstrap.REFERENCE, - PainlessLookup.ClassToName(interfaceType)); + PainlessLookupUtility.anyTypeToPainlessTypeName(interfaceType)); filter = nested.dynamicInvoker(); } else { throw new AssertionError(); @@ -347,7 +348,7 @@ public final class Def { PainlessMethod interfaceMethod = painlessLookup.getPainlessStructFromJavaClass(clazz).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookup.ClassToName(clazz) + "], not a functional interface"); + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(clazz) + "], not a functional interface"); } int arity = interfaceMethod.arguments.size() + captures.length; final MethodHandle handle; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 6ab7292c7f6..9e72dc2c835 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -21,6 +21,7 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; @@ -168,7 +169,7 @@ public class FunctionRef { PainlessMethod method = painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod; if (method == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookup.ClassToName(expected) + "], not a functional interface"); + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface"); } // lookup requested method diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index bab1b7942d6..e797740fed1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -19,10 +19,11 @@ package org.elasticsearch.painless; +import org.elasticsearch.painless.ScriptClassInfo.MethodArgument; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; -import org.elasticsearch.painless.ScriptClassInfo.MethodArgument; import java.util.Arrays; import java.util.Collection; @@ -292,7 +293,7 @@ public final class Locals { @Override public String toString() { StringBuilder b = new StringBuilder(); - b.append("Variable[type=").append(PainlessLookup.ClassToName(clazz)); + b.append("Variable[type=").append(PainlessLookupUtility.anyTypeToPainlessTypeName(clazz)); b.append(",name=").append(name); b.append(",slot=").append(slot); if (readonly) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java index df43dba7b34..ff2061a9a4b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import java.lang.invoke.MethodType; import java.lang.reflect.Field; @@ -182,7 +183,7 @@ public class ScriptClassInfo { private static Class definitionTypeForClass(PainlessLookup painlessLookup, Class type, Function, String> unknownErrorMessageSource) { - type = PainlessLookup.ObjectClassTodefClass(type); + type = PainlessLookupUtility.javaObjectTypeToPainlessDefType(type); Class componentType = type; while (componentType.isArray()) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java index e6c5da05798..f1db35636b4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java @@ -23,8 +23,8 @@ import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.LexerNoViableAltException; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.misc.Interval; -import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.lookup.PainlessLookup; /** * A lexer that is customized for painless. It: diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index feeaf4d34bc..6111d12317b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.lookup; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Map; @@ -30,154 +29,6 @@ import java.util.Map; */ public final class PainlessLookup { - public static Class getBoxedType(Class clazz) { - if (clazz == boolean.class) { - return Boolean.class; - } else if (clazz == byte.class) { - return Byte.class; - } else if (clazz == short.class) { - return Short.class; - } else if (clazz == char.class) { - return Character.class; - } else if (clazz == int.class) { - return Integer.class; - } else if (clazz == long.class) { - return Long.class; - } else if (clazz == float.class) { - return Float.class; - } else if (clazz == double.class) { - return Double.class; - } - - return clazz; - } - - public static Class getUnboxedype(Class clazz) { - if (clazz == Boolean.class) { - return boolean.class; - } else if (clazz == Byte.class) { - return byte.class; - } else if (clazz == Short.class) { - return short.class; - } else if (clazz == Character.class) { - return char.class; - } else if (clazz == Integer.class) { - return int.class; - } else if (clazz == Long.class) { - return long.class; - } else if (clazz == Float.class) { - return float.class; - } else if (clazz == Double.class) { - return double.class; - } - - return clazz; - } - - public static boolean isConstantType(Class clazz) { - return clazz == boolean.class || - clazz == byte.class || - clazz == short.class || - clazz == char.class || - clazz == int.class || - clazz == long.class || - clazz == float.class || - clazz == double.class || - clazz == String.class; - } - - public Class getClassFromBinaryName(String painlessType) { - return painlessTypesToJavaClasses.get(painlessType.replace('$', '.')); - } - - public static Class ObjectClassTodefClass(Class clazz) { - if (clazz.isArray()) { - Class component = clazz.getComponentType(); - int dimensions = 1; - - while (component.isArray()) { - component = component.getComponentType(); - ++dimensions; - } - - if (component == Object.class) { - char[] braces = new char[dimensions]; - Arrays.fill(braces, '['); - - String descriptor = new String(braces) + org.objectweb.asm.Type.getType(def.class).getDescriptor(); - org.objectweb.asm.Type type = org.objectweb.asm.Type.getType(descriptor); - - try { - return Class.forName(type.getInternalName().replace('/', '.')); - } catch (ClassNotFoundException exception) { - throw new IllegalStateException("internal error", exception); - } - } - } else if (clazz == Object.class) { - return def.class; - } - - return clazz; - } - - public static Class defClassToObjectClass(Class clazz) { - if (clazz.isArray()) { - Class component = clazz.getComponentType(); - int dimensions = 1; - - while (component.isArray()) { - component = component.getComponentType(); - ++dimensions; - } - - if (component == def.class) { - char[] braces = new char[dimensions]; - Arrays.fill(braces, '['); - - String descriptor = new String(braces) + org.objectweb.asm.Type.getType(Object.class).getDescriptor(); - org.objectweb.asm.Type type = org.objectweb.asm.Type.getType(descriptor); - - try { - return Class.forName(type.getInternalName().replace('/', '.')); - } catch (ClassNotFoundException exception) { - throw new IllegalStateException("internal error", exception); - } - } - } else if (clazz == def.class) { - return Object.class; - } - - return clazz; - } - - public static String ClassToName(Class clazz) { - if (clazz.isLocalClass() || clazz.isAnonymousClass()) { - return null; - } else if (clazz.isArray()) { - Class component = clazz.getComponentType(); - int dimensions = 1; - - while (component.isArray()) { - component = component.getComponentType(); - ++dimensions; - } - - if (component == def.class) { - StringBuilder builder = new StringBuilder(def.class.getSimpleName()); - - for (int dimension = 0; dimension < dimensions; dimension++) { - builder.append("[]"); - } - - return builder.toString(); - } - } else if (clazz == def.class) { - return def.class.getSimpleName(); - } - - return clazz.getCanonicalName().replace('$', '.'); - } - public Collection getStructs() { return javaClassesToPainlessStructs.values(); } @@ -190,6 +41,10 @@ public final class PainlessLookup { this.javaClassesToPainlessStructs = Collections.unmodifiableMap(javaClassesToPainlessStructs); } + public Class getClassFromBinaryName(String painlessType) { + return painlessTypesToJavaClasses.get(painlessType.replace('$', '.')); + } + public boolean isSimplePainlessType(String painlessType) { return painlessTypesToJavaClasses.containsKey(painlessType); } @@ -199,59 +54,6 @@ public final class PainlessLookup { } public Class getJavaClassFromPainlessType(String painlessType) { - Class javaClass = painlessTypesToJavaClasses.get(painlessType); - - if (javaClass != null) { - return javaClass; - } - int arrayDimensions = 0; - int arrayIndex = painlessType.indexOf('['); - - if (arrayIndex != -1) { - int length = painlessType.length(); - - while (arrayIndex < length) { - if (painlessType.charAt(arrayIndex) == '[' && ++arrayIndex < length && painlessType.charAt(arrayIndex++) == ']') { - ++arrayDimensions; - } else { - throw new IllegalArgumentException("invalid painless type [" + painlessType + "]."); - } - } - - painlessType = painlessType.substring(0, painlessType.indexOf('[')); - javaClass = painlessTypesToJavaClasses.get(painlessType); - - char braces[] = new char[arrayDimensions]; - Arrays.fill(braces, '['); - String descriptor = new String(braces); - - if (javaClass == boolean.class) { - descriptor += "Z"; - } else if (javaClass == byte.class) { - descriptor += "B"; - } else if (javaClass == short.class) { - descriptor += "S"; - } else if (javaClass == char.class) { - descriptor += "C"; - } else if (javaClass == int.class) { - descriptor += "I"; - } else if (javaClass == long.class) { - descriptor += "J"; - } else if (javaClass == float.class) { - descriptor += "F"; - } else if (javaClass == double.class) { - descriptor += "D"; - } else { - descriptor += "L" + javaClass.getName() + ";"; - } - - try { - return Class.forName(descriptor); - } catch (ClassNotFoundException cnfe) { - throw new IllegalStateException("invalid painless type [" + painlessType + "]", cnfe); - } - } - - throw new IllegalArgumentException("invalid painless type [" + painlessType + "]"); + return PainlessLookupUtility.painlessTypeNameToPainlessType(painlessType, painlessTypesToJavaClasses); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 1dadce318d6..9a5e08d65a7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -30,7 +30,6 @@ import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.reflect.Modifier; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -279,7 +278,7 @@ public class PainlessLookupBuilder { Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); - javaClassParameters[parameterCount] = PainlessLookup.defClassToObjectClass(painlessParameterClass); + javaClassParameters[parameterCount] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessParameterClass); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " + "with owner struct [" + ownerStructName + "] and constructor parameters " + @@ -364,7 +363,8 @@ public class PainlessLookupBuilder { Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); - javaClassParameters[parameterCount + augmentedOffset] = PainlessLookup.defClassToObjectClass(painlessParameterClass); + javaClassParameters[parameterCount + augmentedOffset] = + PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessParameterClass); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " + "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + @@ -393,7 +393,7 @@ public class PainlessLookupBuilder { "and parameters " + whitelistMethod.painlessParameterTypeNames, iae); } - if (javaMethod.getReturnType() != PainlessLookup.defClassToObjectClass(painlessReturnClass)) { + if (javaMethod.getReturnType() != PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessReturnClass)) { throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " + "does not match the return type class [" + javaMethod.getReturnType() + "] for the " + "method with name [" + whitelistMethod.javaMethodName + "] " + @@ -711,64 +711,11 @@ public class PainlessLookupBuilder { return painless; } - public Class getJavaClassFromPainlessType(String painlessType) { - Class javaClass = painlessTypesToJavaClasses.get(painlessType); - - if (javaClass != null) { - return javaClass; - } - int arrayDimensions = 0; - int arrayIndex = painlessType.indexOf('['); - - if (arrayIndex != -1) { - int length = painlessType.length(); - - while (arrayIndex < length) { - if (painlessType.charAt(arrayIndex) == '[' && ++arrayIndex < length && painlessType.charAt(arrayIndex++) == ']') { - ++arrayDimensions; - } else { - throw new IllegalArgumentException("invalid painless type [" + painlessType + "]."); - } - } - - painlessType = painlessType.substring(0, painlessType.indexOf('[')); - javaClass = painlessTypesToJavaClasses.get(painlessType); - - char braces[] = new char[arrayDimensions]; - Arrays.fill(braces, '['); - String descriptor = new String(braces); - - if (javaClass == boolean.class) { - descriptor += "Z"; - } else if (javaClass == byte.class) { - descriptor += "B"; - } else if (javaClass == short.class) { - descriptor += "S"; - } else if (javaClass == char.class) { - descriptor += "C"; - } else if (javaClass == int.class) { - descriptor += "I"; - } else if (javaClass == long.class) { - descriptor += "J"; - } else if (javaClass == float.class) { - descriptor += "F"; - } else if (javaClass == double.class) { - descriptor += "D"; - } else { - descriptor += "L" + javaClass.getName() + ";"; - } - - try { - return Class.forName(descriptor); - } catch (ClassNotFoundException cnfe) { - throw new IllegalStateException("invalid painless type [" + painlessType + "]", cnfe); - } - } - - throw new IllegalArgumentException("invalid painless type [" + painlessType + "]"); - } - public PainlessLookup build() { return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessStructs); } + + public Class getJavaClassFromPainlessType(String painlessType) { + return PainlessLookupUtility.painlessTypeNameToPainlessType(painlessType, painlessTypesToJavaClasses); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java new file mode 100644 index 00000000000..d1f3ee4ece3 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -0,0 +1,284 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import org.objectweb.asm.Type; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Map; + +/** + * This class contains methods shared by {@link PainlessLookupBuilder}, {@link PainlessLookup}, and other classes within + * Painless for conversion between type names and types along with some other various utility methods. + * + * The following terminology is used for variable names throughout the lookup package: + * + * - javaClass (Class) - a java class including def and excluding array type java classes + * - javaClassName (String) - the fully qualified java class name for a javaClass + * - painlessClassName (String) - the fully qualified painless name or imported painless name for a painlessClass + * - anyClassName (String) - either a javaClassName or a painlessClassName + * - javaType (Class) - a java class excluding def and array type java classes + * - painlessType (Class) - a java class including def and array type java classes + * - javaTypeName (String) - the fully qualified java Type name for a javaType + * - painlessTypeName (String) - the fully qualified painless name or imported painless name for a painlessType + * - anyTypeName (String) - either a javaTypeName or a painlessTypeName + * - painlessClass (PainlessClass) - a painless class object + * + * Under ambiguous circumstances most variable names are prefixed with asm, java, or painless. + * If the variable name is the same for asm, java, and painless, no prefix is used. + */ +public final class PainlessLookupUtility { + + public static Class javaObjectTypeToPainlessDefType(Class javaType) { + if (javaType.isArray()) { + Class javaTypeComponent = javaType.getComponentType(); + int arrayDimensions = 1; + + while (javaTypeComponent.isArray()) { + javaTypeComponent = javaTypeComponent.getComponentType(); + ++arrayDimensions; + } + + if (javaTypeComponent == Object.class) { + char[] asmDescriptorBraces = new char[arrayDimensions]; + Arrays.fill(asmDescriptorBraces, '['); + + String asmDescriptor = new String(asmDescriptorBraces) + Type.getType(def.class).getDescriptor(); + Type asmType = Type.getType(asmDescriptor); + + try { + return Class.forName(asmType.getInternalName().replace('/', '.')); + } catch (ClassNotFoundException cnfe) { + throw new IllegalStateException("internal error", cnfe); + } + } + } else if (javaType == Object.class) { + return def.class; + } + + return javaType; + } + + public static Class painlessDefTypeToJavaObjectType(Class painlessType) { + if (painlessType.isArray()) { + Class painlessTypeComponent = painlessType.getComponentType(); + int arrayDimensions = 1; + + while (painlessTypeComponent.isArray()) { + painlessTypeComponent = painlessTypeComponent.getComponentType(); + ++arrayDimensions; + } + + if (painlessTypeComponent == def.class) { + char[] asmDescriptorBraces = new char[arrayDimensions]; + Arrays.fill(asmDescriptorBraces, '['); + + String asmDescriptor = new String(asmDescriptorBraces) + Type.getType(Object.class).getDescriptor(); + Type asmType = Type.getType(asmDescriptor); + + try { + return Class.forName(asmType.getInternalName().replace('/', '.')); + } catch (ClassNotFoundException exception) { + throw new IllegalStateException("internal error", exception); + } + } + } else if (painlessType == def.class) { + return Object.class; + } + + return painlessType; + } + + public static String anyTypeNameToPainlessTypeName(String anyTypeName) { + return anyTypeName.replace(def.class.getName(), DEF_PAINLESS_CLASS_NAME).replace('$', '.'); + } + + public static String anyTypeToPainlessTypeName(Class anyType) { + if (anyType.isLocalClass() || anyType.isAnonymousClass()) { + return null; + } else if (anyType.isArray()) { + Class anyTypeComponent = anyType.getComponentType(); + int arrayDimensions = 1; + + while (anyTypeComponent.isArray()) { + anyTypeComponent = anyTypeComponent.getComponentType(); + ++arrayDimensions; + } + + if (anyTypeComponent == def.class) { + StringBuilder painlessDefTypeNameArrayBuilder = new StringBuilder(DEF_PAINLESS_CLASS_NAME); + + for (int dimension = 0; dimension < arrayDimensions; dimension++) { + painlessDefTypeNameArrayBuilder.append("[]"); + } + + return painlessDefTypeNameArrayBuilder.toString(); + } + } else if (anyType == def.class) { + return DEF_PAINLESS_CLASS_NAME; + } + + return anyType.getCanonicalName().replace('$', '.'); + } + + public static Class painlessTypeNameToPainlessType(String painlessTypeName, Map> painlessClassNamesToJavaClasses) { + Class javaClass = painlessClassNamesToJavaClasses.get(painlessTypeName); + + if (javaClass != null) { + return javaClass; + } + + int arrayDimensions = 0; + int arrayIndex = painlessTypeName.indexOf('['); + + if (arrayIndex != -1) { + int painlessTypeNameLength = painlessTypeName.length(); + + while (arrayIndex < painlessTypeNameLength) { + if (painlessTypeName.charAt(arrayIndex) == '[' && + ++arrayIndex < painlessTypeNameLength && + painlessTypeName.charAt(arrayIndex++) == ']') { + ++arrayDimensions; + } else { + throw new IllegalArgumentException("invalid painless type [" + painlessTypeName + "]."); + } + } + + painlessTypeName = painlessTypeName.substring(0, painlessTypeName.indexOf('[')); + javaClass = painlessClassNamesToJavaClasses.get(painlessTypeName); + + char javaDescriptorBraces[] = new char[arrayDimensions]; + Arrays.fill(javaDescriptorBraces, '['); + String javaDescriptor = new String(javaDescriptorBraces); + + if (javaClass == boolean.class) { + javaDescriptor += "Z"; + } else if (javaClass == byte.class) { + javaDescriptor += "B"; + } else if (javaClass == short.class) { + javaDescriptor += "S"; + } else if (javaClass == char.class) { + javaDescriptor += "C"; + } else if (javaClass == int.class) { + javaDescriptor += "I"; + } else if (javaClass == long.class) { + javaDescriptor += "J"; + } else if (javaClass == float.class) { + javaDescriptor += "F"; + } else if (javaClass == double.class) { + javaDescriptor += "D"; + } else { + javaDescriptor += "L" + javaClass.getName() + ";"; + } + + try { + return Class.forName(javaDescriptor); + } catch (ClassNotFoundException cnfe) { + throw new IllegalStateException("painless type [" + painlessTypeName + "] not found", cnfe); + } + } + + throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found"); + } + + public static void validatePainlessType(Class painlessType, Collection> javaClasses) { + String painlessTypeName = anyTypeNameToPainlessTypeName(painlessType.getName()); + + while (painlessType.getComponentType() != null) { + painlessType = painlessType.getComponentType(); + } + + if (javaClasses.contains(painlessType) == false) { + throw new IllegalStateException("painless type [" + painlessTypeName + "] not found"); + } + } + + public static String buildPainlessMethodKey(String methodName, int methodArity) { + return methodName + "/" + methodArity; + } + + public static String buildPainlessFieldKey(String fieldName) { + return fieldName; + } + + public static Class getBoxedAnyType(Class anyType) { + if (anyType == boolean.class) { + return Boolean.class; + } else if (anyType == byte.class) { + return Byte.class; + } else if (anyType == short.class) { + return Short.class; + } else if (anyType == char.class) { + return Character.class; + } else if (anyType == int.class) { + return Integer.class; + } else if (anyType == long.class) { + return Long.class; + } else if (anyType == float.class) { + return Float.class; + } else if (anyType == double.class) { + return Double.class; + } + + return anyType; + } + + public static Class getUnboxedAnyType(Class anyType) { + if (anyType == Boolean.class) { + return boolean.class; + } else if (anyType == Byte.class) { + return byte.class; + } else if (anyType == Short.class) { + return short.class; + } else if (anyType == Character.class) { + return char.class; + } else if (anyType == Integer.class) { + return int.class; + } else if (anyType == Long.class) { + return long.class; + } else if (anyType == Float.class) { + return float.class; + } else if (anyType == Double.class) { + return double.class; + } + + return anyType; + } + + public static boolean isAnyTypeConstant(Class anyType) { + return anyType == boolean.class || + anyType == byte.class || + anyType == short.class || + anyType == char.class || + anyType == int.class || + anyType == long.class || + anyType == float.class || + anyType == double.class || + anyType == String.class; + } + + public static final String DEF_PAINLESS_CLASS_NAME = def.class.getSimpleName(); + public static final String CONSTRUCTOR_ANY_NAME = ""; + + private PainlessLookupUtility() { + + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java index e87e1d4bf38..8d8a7f691fe 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java @@ -69,21 +69,21 @@ public class PainlessMethod { params = new Class[1 + arguments.size()]; params[0] = augmentation; for (int i = 0; i < arguments.size(); i++) { - params[i + 1] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + params[i + 1] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } - returnValue = PainlessLookup.defClassToObjectClass(rtn); + returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); } else if (Modifier.isStatic(modifiers)) { // static method: straightforward copy params = new Class[arguments.size()]; for (int i = 0; i < arguments.size(); i++) { - params[i] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + params[i] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } - returnValue = PainlessLookup.defClassToObjectClass(rtn); + returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); } else if ("".equals(name)) { // constructor: returns the owner class params = new Class[arguments.size()]; for (int i = 0; i < arguments.size(); i++) { - params[i] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + params[i] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } returnValue = owner.clazz; } else { @@ -91,9 +91,9 @@ public class PainlessMethod { params = new Class[1 + arguments.size()]; params[0] = owner.clazz; for (int i = 0; i < arguments.size(); i++) { - params[i + 1] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + params[i + 1] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } - returnValue = PainlessLookup.defClassToObjectClass(rtn); + returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); } return MethodType.methodType(returnValue, params); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java index 0dfcf4d4093..dd813f73c3d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java @@ -20,10 +20,10 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import java.util.Objects; @@ -157,7 +157,7 @@ public abstract class AExpression extends ANode { return ecast; } else { - if (PainlessLookup.isConstantType(expected)) { + if (PainlessLookupUtility.isAnyTypeConstant(expected)) { // For the case where a cast is required, a constant is set, // and the constant can be immediately cast to the expected type. // An EConstant replaces this node with the constant cast appropriately diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java index a0a29ed59dd..584e5df6342 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java @@ -22,13 +22,13 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessCast; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.def; import java.util.ArrayList; import java.util.List; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 422300072dc..65776ca76f1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -21,14 +21,14 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.WriterConstants; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import java.util.Objects; import java.util.Set; @@ -106,7 +106,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply multiply [*] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -148,7 +149,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply divide [/] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -195,7 +197,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply remainder [%] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -242,7 +245,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply add [+] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -300,7 +304,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply subtract [-] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -358,7 +363,8 @@ public final class EBinary extends AExpression { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply left shift [<<] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote = lhspromote; @@ -405,7 +411,8 @@ public final class EBinary extends AExpression { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply right shift [>>] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote = lhspromote; @@ -455,7 +462,8 @@ public final class EBinary extends AExpression { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply unsigned shift [>>>] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (lhspromote == def.class || rhspromote == def.class) { @@ -498,7 +506,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply and [&] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -537,7 +546,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply xor [^] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -577,7 +587,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply or [|] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index ede1a2a6b9c..dfed0ca47b4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.List; import java.util.Objects; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index c0345b6308c..e8ad9d85ed6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -21,14 +21,14 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; @@ -69,7 +69,7 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda defPointer = "D" + variable + "." + call + ",1"; } else { // typed implementation - defPointer = "S" + PainlessLookup.ClassToName(captured.clazz) + "." + call + ",1"; + defPointer = "S" + PainlessLookupUtility.anyTypeToPainlessTypeName(captured.clazz) + "." + call + ",1"; } actual = String.class; } else { @@ -77,7 +77,8 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda // static case if (captured.clazz != def.class) { try { - ref = new FunctionRef(locals.getPainlessLookup(), expected, PainlessLookup.ClassToName(captured.clazz), call, 1); + ref = new FunctionRef( + locals.getPainlessLookup(), expected, PainlessLookupUtility.anyTypeToPainlessTypeName(captured.clazz), call, 1); // check casts between the interface method and the delegate method are legal for (int i = 0; i < ref.interfaceMethod.arguments.size(); ++i) { @@ -109,7 +110,7 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda // typed interface, dynamic implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); Type methodType = Type.getMethodType(MethodWriter.getType(expected), MethodWriter.getType(captured.clazz)); - writer.invokeDefCall(call, methodType, DefBootstrap.REFERENCE, PainlessLookup.ClassToName(expected)); + writer.invokeDefCall(call, methodType, DefBootstrap.REFERENCE, PainlessLookupUtility.anyTypeToPainlessTypeName(expected)); } else { // typed interface, typed implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index 55a9dbf71c8..b0451b685b5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import java.util.Objects; import java.util.Set; @@ -63,6 +63,6 @@ final class ECast extends AExpression { @Override public String toString() { - return singleLineToString(PainlessLookup.ClassToName(cast.to), child); + return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(cast.to), child); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index 806204d051a..edf18f501bc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -21,13 +21,13 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Label; import org.objectweb.asm.Type; @@ -93,7 +93,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply equals [==] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -142,7 +143,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply reference equals [===] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } left.expected = promotedType; @@ -182,7 +184,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply not equals [!=] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -231,7 +234,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply reference not equals [!==] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } left.expected = promotedType; @@ -271,7 +275,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply greater than or equals [>=] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -310,7 +315,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply greater than [>] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -349,7 +355,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply less than or equals [<=] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -388,7 +395,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply less than [>=] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index 82c24e27c5d..d4eddb059a8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -20,14 +20,14 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.util.Objects; @@ -69,12 +69,12 @@ public final class EFunctionRef extends AExpression implements ILambda { PainlessMethod interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookup.ClassToName(expected) + "], not a functional interface"); + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface"); } PainlessMethod delegateMethod = locals.getMethod(new PainlessMethodKey(call, interfaceMethod.arguments.size())); if (delegateMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookup.ClassToName(expected) + "], function not found"); + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], function not found"); } ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index a3835cbc213..05564a2952e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import java.util.Objects; import java.util.Set; @@ -64,7 +64,8 @@ public final class EInstanceof extends AExpression { } // map to wrapped type for primitive types - resolvedType = clazz.isPrimitive() ? PainlessLookup.getBoxedType(clazz) : PainlessLookup.defClassToObjectClass(clazz); + resolvedType = clazz.isPrimitive() ? PainlessLookupUtility.getBoxedAnyType(clazz) : + PainlessLookupUtility.painlessDefTypeToJavaObjectType(clazz); // analyze and cast the expression expression.analyze(locals); @@ -75,7 +76,7 @@ public final class EInstanceof extends AExpression { primitiveExpression = expression.actual.isPrimitive(); // map to wrapped type for primitive types expressionType = expression.actual.isPrimitive() ? - PainlessLookup.getBoxedType(expression.actual) : PainlessLookup.defClassToObjectClass(clazz); + PainlessLookupUtility.getBoxedAnyType(expression.actual) : PainlessLookupUtility.painlessDefTypeToJavaObjectType(clazz); actual = boolean.class; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index 8977f4f0ef3..8e8d164b03d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -20,15 +20,15 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.node.SFunction.FunctionReserved; import org.objectweb.asm.Opcodes; @@ -122,13 +122,13 @@ public final class ELambda extends AExpression implements ILambda { // we know the method statically, infer return type and any unknown/def types interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { - throw createError(new IllegalArgumentException("Cannot pass lambda to [" + PainlessLookup.ClassToName(expected) + - "], not a functional interface")); + throw createError(new IllegalArgumentException("Cannot pass lambda to " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface")); } // check arity before we manipulate parameters if (interfaceMethod.arguments.size() != paramTypeStrs.size()) throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name + - "] in [" + PainlessLookup.ClassToName(expected) + "]"); + "] in [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "]"); // for method invocation, its allowed to ignore the return value if (interfaceMethod.rtn == void.class) { returnType = def.class; @@ -140,7 +140,7 @@ public final class ELambda extends AExpression implements ILambda { for (int i = 0; i < paramTypeStrs.size(); i++) { String paramType = paramTypeStrs.get(i); if (paramType == null) { - actualParamTypeStrs.add(PainlessLookup.ClassToName(interfaceMethod.arguments.get(i))); + actualParamTypeStrs.add(PainlessLookupUtility.anyTypeToPainlessTypeName(interfaceMethod.arguments.get(i))); } else { actualParamTypeStrs.add(paramType); } @@ -162,15 +162,15 @@ public final class ELambda extends AExpression implements ILambda { List paramTypes = new ArrayList<>(captures.size() + actualParamTypeStrs.size()); List paramNames = new ArrayList<>(captures.size() + paramNameStrs.size()); for (Variable var : captures) { - paramTypes.add(PainlessLookup.ClassToName(var.clazz)); + paramTypes.add(PainlessLookupUtility.anyTypeToPainlessTypeName(var.clazz)); paramNames.add(var.name); } paramTypes.addAll(actualParamTypeStrs); paramNames.addAll(paramNameStrs); // desugar lambda body into a synthetic method - desugared = new SFunction(reserved, location, PainlessLookup.ClassToName(returnType), name, - paramTypes, paramNames, statements, true); + desugared = new SFunction(reserved, location, PainlessLookupUtility.anyTypeToPainlessTypeName(returnType), name, + paramTypes, paramNames, statements, true); desugared.generateSignature(locals.getPainlessLookup()); desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, desugared.parameters, captures.size(), reserved.getMaxLoopCounter())); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index 820cce685ed..90475419b32 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -19,13 +19,13 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.def; import java.util.ArrayList; import java.util.List; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index b6c7fb80af9..c6474846d4c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -19,13 +19,13 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.def; import java.util.HashMap; import java.util.List; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index 197e2fcb7fd..a780ea3e05b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.List; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java index 983819b6b2b..6bc5331cb1d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.objectweb.asm.Opcodes; import java.util.Set; @@ -53,7 +53,7 @@ public final class ENull extends AExpression { if (expected != null) { if (expected.isPrimitive()) { throw createError(new IllegalArgumentException( - "Cannot cast null to a primitive type [" + PainlessLookup.ClassToName(expected) + "].")); + "Cannot cast null to a primitive type [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "].")); } actual = expected; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index 3a5102ebdc9..d34399db779 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -21,13 +21,13 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; @@ -93,7 +93,8 @@ public final class EUnary extends AExpression { promote = AnalyzerCaster.promoteNumeric(child.actual, false); if (promote == null) { - throw createError(new ClassCastException("Cannot apply not [~] to type [" + PainlessLookup.ClassToName(child.actual) + "].")); + throw createError(new ClassCastException("Cannot apply not [~] to type " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(child.actual) + "].")); } child.expected = promote; @@ -122,8 +123,8 @@ public final class EUnary extends AExpression { promote = AnalyzerCaster.promoteNumeric(child.actual, true); if (promote == null) { - throw createError( - new ClassCastException("Cannot apply positive [+] to type [" + PainlessLookup.ClassToName(child.actual) + "].")); + throw createError(new ClassCastException("Cannot apply positive [+] to type " + + "[" + PainlessLookupUtility.painlessDefTypeToJavaObjectType(child.actual) + "].")); } child.expected = promote; @@ -156,8 +157,8 @@ public final class EUnary extends AExpression { promote = AnalyzerCaster.promoteNumeric(child.actual, true); if (promote == null) { - throw createError( - new ClassCastException("Cannot apply negative [-] to type [" + PainlessLookup.ClassToName(child.actual) + "].")); + throw createError(new ClassCastException("Cannot apply negative [-] to type " + + "[" + PainlessLookupUtility.painlessDefTypeToJavaObjectType(child.actual) + "].")); } child.expected = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java index 5b282abdce9..c45107a37ac 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import java.util.List; import java.util.Map; @@ -67,8 +67,8 @@ public final class PBrace extends AStoreable { } else if (List.class.isAssignableFrom(prefix.actual)) { sub = new PSubListShortcut(location, locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual), index); } else { - throw createError( - new IllegalArgumentException("Illegal array access on type [" + PainlessLookup.ClassToName(prefix.actual) + "].")); + throw createError(new IllegalArgumentException("Illegal array access on type " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual) + "].")); } sub.write = write; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index f23ae9f1887..445c053347e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -19,15 +19,15 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; -import org.elasticsearch.painless.lookup.PainlessClass; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.def; import java.util.List; import java.util.Objects; @@ -74,7 +74,7 @@ public final class PCallInvoke extends AExpression { PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual); if (prefix.actual.isPrimitive()) { - struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookup.getBoxedType(prefix.actual)); + struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookupUtility.getBoxedAnyType(prefix.actual)); } PainlessMethodKey methodKey = new PainlessMethodKey(name, arguments.size()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index 78a18b91ab2..3f2f8879564 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -19,16 +19,16 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessField; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessClass; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessField; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.def; import java.util.List; import java.util.Map; @@ -64,7 +64,7 @@ public final class PField extends AStoreable { prefix = prefix.cast(locals); if (prefix.actual.isArray()) { - sub = new PSubArrayLength(location, PainlessLookup.ClassToName(prefix.actual), value); + sub = new PSubArrayLength(location, PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual), value); } else if (prefix.actual == def.class) { sub = new PSubDefField(location, value); } else { @@ -86,7 +86,7 @@ public final class PField extends AStoreable { new PainlessMethodKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); if (getter != null || setter != null) { - sub = new PSubShortcut(location, value, PainlessLookup.ClassToName(prefix.actual), getter, setter); + sub = new PSubShortcut(location, value, PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual), getter, setter); } else { EConstant index = new EConstant(location, value); index.analyze(locals); @@ -104,7 +104,7 @@ public final class PField extends AStoreable { if (sub == null) { throw createError(new IllegalArgumentException( - "Unknown field [" + value + "] for type [" + PainlessLookup.ClassToName(prefix.actual) + "].")); + "Unknown field [" + value + "] for type [" + PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual) + "].")); } if (nullSafe) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java index 66ad0ecff1b..237efa61ffa 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethod; import java.util.List; import java.util.Objects; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java index ccbc25db4f2..afad497dec7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java @@ -20,11 +20,11 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; import java.util.Objects; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java index a9021000e2d..7f4e253b409 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java @@ -20,11 +20,11 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; import java.util.ArrayList; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java index 1c081c9422e..bf00d8d8599 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java @@ -20,11 +20,11 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.def; import java.util.Objects; import java.util.Set; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java index d6c367cfeab..8eb154e745b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessField; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import java.lang.reflect.Modifier; import java.util.Objects; @@ -51,8 +51,8 @@ final class PSubField extends AStoreable { @Override void analyze(Locals locals) { if (write && Modifier.isFinal(field.modifiers)) { - throw createError(new IllegalArgumentException( - "Cannot write to read-only field [" + field.name + "] for type [" + PainlessLookup.ClassToName(field.clazz) + "].")); + throw createError(new IllegalArgumentException("Cannot write to read-only field [" + field.name + "] for type " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(field.clazz) + "].")); } actual = field.clazz; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 5d881b30db2..0a3ab142ddc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -19,13 +19,13 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index 4875d55cbeb..f71e2ac5d1f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java index 4b2910dbc01..eb5668c554c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethod; import java.util.Set; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index c402d8982d8..de1a7062a24 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -19,13 +19,13 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import java.util.Objects; import java.util.Set; @@ -84,8 +84,8 @@ public class SEach extends AStatement { } else if (expression.actual == def.class || Iterable.class.isAssignableFrom(expression.actual)) { sub = new SSubEachIterable(location, variable, expression, block); } else { - throw createError( - new IllegalArgumentException("Illegal for each type [" + PainlessLookup.ClassToName(expression.actual) + "].")); + throw createError(new IllegalArgumentException("Illegal for each type " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expression.actual) + "].")); } sub.analyze(locals); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 628bb1d32d5..1c801d509b5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -22,8 +22,6 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Constant; import org.elasticsearch.painless.Def; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Parameter; @@ -31,6 +29,9 @@ import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.node.SSource.Reserved; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Handle; @@ -135,7 +136,7 @@ public final class SFunction extends AStatement { try { Class paramType = painlessLookup.getJavaClassFromPainlessType(this.paramTypeStrs.get(param)); - paramClasses[param] = PainlessLookup.defClassToObjectClass(paramType); + paramClasses[param] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(paramType); paramTypes.add(paramType); parameters.add(new Parameter(location, paramNameStrs.get(param), paramType)); } catch (IllegalArgumentException exception) { @@ -144,8 +145,8 @@ public final class SFunction extends AStatement { } } - org.objectweb.asm.commons.Method method = new org.objectweb.asm.commons.Method( - name, MethodType.methodType(PainlessLookup.defClassToObjectClass(rtnType), paramClasses).toMethodDescriptorString()); + org.objectweb.asm.commons.Method method = new org.objectweb.asm.commons.Method(name, MethodType.methodType( + PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtnType), paramClasses).toMethodDescriptorString()); this.method = new PainlessMethod(name, null, null, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java index 5db161b8002..fea8c8953b6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java @@ -20,13 +20,13 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; @@ -109,6 +109,6 @@ final class SSubEachArray extends AStatement { @Override public String toString() { - return singleLineToString(PainlessLookup.ClassToName(variable.clazz), variable.name, expression, block); + return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(variable.clazz), variable.name, expression, block); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index cfc87536b6b..cec1297a4c4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -21,16 +21,16 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessCast; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; @@ -81,8 +81,8 @@ final class SSubEachIterable extends AStatement { getPainlessStructFromJavaClass(expression.actual).methods.get(new PainlessMethodKey("iterator", 0)); if (method == null) { - throw createError(new IllegalArgumentException( - "Unable to create iterator for the type [" + PainlessLookup.ClassToName(expression.actual) + "].")); + throw createError(new IllegalArgumentException("Unable to create iterator for the type " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expression.actual) + "].")); } } @@ -133,6 +133,6 @@ final class SSubEachIterable extends AStatement { @Override public String toString() { - return singleLineToString(PainlessLookup.ClassToName(variable.clazz), variable.name, expression, block); + return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(variable.clazz), variable.name, expression, block); } } From 4b5071f2d08a2ba23eb6d84d47192453c6e68b11 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 17 Jul 2018 06:50:21 +0200 Subject: [PATCH 051/260] Add Index UUID to `/_stats` Response (#31871) * Add "uuid" field to each index's section in the `/_stats` response * closes #31791 --- .../rest-api-spec/test/indices.stats/10_index.yml | 2 ++ .../action/admin/indices/stats/IndexStats.java | 9 ++++++++- .../admin/indices/stats/IndicesStatsResponse.java | 14 +++++++++----- .../admin/indices/stats/IndicesStatsTests.java | 8 +++++++- .../indices/IndicesStatsMonitoringDocTests.java | 2 +- 5 files changed, 27 insertions(+), 8 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml index b70fac8cf04..42847b05cd1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml @@ -47,7 +47,9 @@ setup: - match: { _shards.total: 18 } - is_true: _all - is_true: indices.test1 + - is_true: indices.test1.uuid - is_true: indices.test2 + - is_true: indices.test2.uuid --- diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java index 7903a340628..d8480519e5d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java @@ -29,10 +29,13 @@ public class IndexStats implements Iterable { private final String index; + private final String uuid; + private final ShardStats shards[]; - public IndexStats(String index, ShardStats[] shards) { + public IndexStats(String index, String uuid, ShardStats[] shards) { this.index = index; + this.uuid = uuid; this.shards = shards; } @@ -40,6 +43,10 @@ public class IndexStats implements Iterable { return this.index; } + public String getUuid() { + return uuid; + } + public ShardStats[] getShards() { return this.shards; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 7406dc4f2d1..cc563948160 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.Index; import java.io.IOException; import java.util.ArrayList; @@ -84,19 +85,22 @@ public class IndicesStatsResponse extends BroadcastResponse { } Map indicesStats = new HashMap<>(); - Set indices = new HashSet<>(); + Set indices = new HashSet<>(); for (ShardStats shard : shards) { - indices.add(shard.getShardRouting().getIndexName()); + indices.add(shard.getShardRouting().index()); } - for (String indexName : indices) { + for (Index index : indices) { List shards = new ArrayList<>(); + String indexName = index.getName(); for (ShardStats shard : this.shards) { if (shard.getShardRouting().getIndexName().equals(indexName)) { shards.add(shard); } } - indicesStats.put(indexName, new IndexStats(indexName, shards.toArray(new ShardStats[shards.size()]))); + indicesStats.put( + indexName, new IndexStats(indexName, index.getUUID(), shards.toArray(new ShardStats[shards.size()])) + ); } this.indicesStats = indicesStats; return indicesStats; @@ -169,7 +173,7 @@ public class IndicesStatsResponse extends BroadcastResponse { builder.startObject(Fields.INDICES); for (IndexStats indexStats : getIndices().values()) { builder.startObject(indexStats.getIndex()); - + builder.field("uuid", indexStats.getUuid()); builder.startObject("primaries"); indexStats.getPrimaries().toXContent(builder, params); builder.endObject(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 26785d2c870..264a92137be 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -153,6 +153,13 @@ public class IndicesStatsTests extends ESSingleNodeTestCase { assertEquals(0, common.refresh.getListeners()); } + @SuppressWarnings("unchecked") + public void testUuidOnRootStatsIndices() { + String uuid = createIndex("test").indexUUID(); + IndicesStatsResponse rsp = client().admin().indices().prepareStats().get(); + assertEquals(uuid, rsp.getIndex("test").getUuid()); + } + /** * Gives access to package private IndicesStatsResponse constructor for test purpose. **/ @@ -160,5 +167,4 @@ public class IndicesStatsTests extends ESSingleNodeTestCase { int failedShards, List shardFailures) { return new IndicesStatsResponse(shards, totalShards, successfulShards, failedShards, shardFailures); } - } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java index 86fa89f4c01..66b41d40943 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java @@ -45,7 +45,7 @@ public class IndicesStatsMonitoringDocTests extends BaseFilteredMonitoringDocTes @Before public void setUp() throws Exception { super.setUp(); - indicesStats = Collections.singletonList(new IndexStats("index-0", new ShardStats[] { + indicesStats = Collections.singletonList(new IndexStats("index-0", "dcvO5uZATE-EhIKc3tk9Bg", new ShardStats[] { // Primaries new ShardStats(mockShardRouting(true), mockShardPath(), mockCommonStats(), null, null), new ShardStats(mockShardRouting(true), mockShardPath(), mockCommonStats(), null, null), From 6717df3c2deb35ce367abedd556e337d3df7c41f Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Tue, 17 Jul 2018 15:43:03 +1000 Subject: [PATCH 052/260] [Test] Modify assert statement for ssl handshake (#32072) There have been changes in error messages for `SSLHandshakeException`. This has caused a couple of failures in our tests. This commit modifies test verification to assert on exception type of class `SSLHandshakeException`. There was another issue in Java11 which caused NPE. The bug has now been fixed on Java11 - early access build 22. Bug Ref: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=8206355 Enable the skipped tests due to this bug. Closes #31940 --- .../elasticsearch/client/RestClientBuilderIntegTests.java | 8 +++----- .../smoketest/SmokeTestMonitoringWithSecurityIT.java | 1 - 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java index 30359ea90f6..0d1c3ffd6b8 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java @@ -30,6 +30,7 @@ import org.junit.BeforeClass; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.TrustManagerFactory; import java.io.IOException; import java.io.InputStream; @@ -37,7 +38,7 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.security.KeyStore; -import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -72,9 +73,6 @@ public class RestClientBuilderIntegTests extends RestClientTestCase { } public void testBuilderUsesDefaultSSLContext() throws Exception { - assumeFalse("Due to bug inside jdk, this test can't momentarily run with java 11. " + - "See: https://github.com/elastic/elasticsearch/issues/31940", - System.getProperty("java.version").contains("11")); final SSLContext defaultSSLContext = SSLContext.getDefault(); try { try (RestClient client = buildRestClient()) { @@ -82,7 +80,7 @@ public class RestClientBuilderIntegTests extends RestClientTestCase { client.performRequest(new Request("GET", "/")); fail("connection should have been rejected due to SSL handshake"); } catch (Exception e) { - assertThat(e.getMessage(), containsString("General SSLEngine problem")); + assertThat(e, instanceOf(SSLHandshakeException.class)); } } diff --git a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java index 52c2a7dfa2d..c427d8bf32c 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java +++ b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java @@ -127,7 +127,6 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase { return monitoringUsage.get().getExporters().isEmpty() == false; } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31940") public void testHTTPExporterWithSSL() throws Exception { // Ensures that the exporter is actually on assertBusy(() -> assertThat("[_http] exporter is not defined", getMonitoringUsageExportersDefined(), is(true))); From 61486680a25fc16299f666385970fd7dc310271c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Jul 2018 09:04:41 +0200 Subject: [PATCH 053/260] Add exclusion option to `keep_types` token filter (#32012) Currently the `keep_types` token filter includes all token types specified using its `types` parameter. Lucenes TypeTokenFilter also provides a second mode where instead of keeping the specified tokens (include) they are filtered out (exclude). This change exposes this option as a new `mode` parameter that can either take the values `include` (the default, if not specified) or `exclude`. Closes #29277 --- .../keep-types-tokenfilter.asciidoc | 74 ++++++++++++++++++- .../common/KeepTypesFilterFactory.java | 36 +++++++-- .../common/KeepTypesFilterFactoryTests.java | 48 ++++++++++-- 3 files changed, 142 insertions(+), 16 deletions(-) diff --git a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc index afaf4f8fa8c..05687f86691 100644 --- a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc @@ -8,8 +8,9 @@ contained in a predefined set. [float] === Options [horizontal] -types:: a list of types to keep - +types:: a list of types to include (default mode) or exclude +mode:: if set to `include` (default) the specified token types will be kept, +if set to `exclude` the specified token types will be removed from the stream [float] === Settings example @@ -53,7 +54,7 @@ POST /keep_types_example/_analyze // CONSOLE // TEST[continued] -And it'd respond: +The response will be: [source,js] -------------------------------------------------- @@ -72,3 +73,70 @@ And it'd respond: // TESTRESPONSE Note how only the `` token is in the output. + +=== Exclude mode settings example + +If the `mode` parameter is set to `exclude` like in the following example: + +[source,js] +-------------------------------------------------- +PUT /keep_types_exclude_example +{ + "settings" : { + "analysis" : { + "analyzer" : { + "my_analyzer" : { + "tokenizer" : "standard", + "filter" : ["standard", "lowercase", "remove_numbers"] + } + }, + "filter" : { + "remove_numbers" : { + "type" : "keep_types", + "mode" : "exclude", + "types" : [ "" ] + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +And we test it like: + +[source,js] +-------------------------------------------------- +POST /keep_types_exclude_example/_analyze +{ + "analyzer" : "my_analyzer", + "text" : "hello 101 world" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +The response will be: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "hello", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "world", + "start_offset": 10, + "end_offset": 15, + "type": "", + "position": 2 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE \ No newline at end of file diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java index 0f94b521e4b..b6b8b45fabf 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java @@ -29,21 +29,47 @@ import org.elasticsearch.index.analysis.TokenFilterFactory; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Set; /** * A {@link TokenFilterFactory} for {@link TypeTokenFilter}. This filter only * keep tokens that are contained in the set configured via - * {@value #KEEP_TYPES_KEY} setting. + * {@value #KEEP_TYPES_MODE_KEY} setting. *

* Configuration options: *

    - *
  • {@value #KEEP_TYPES_KEY} the array of words / tokens to keep.
  • + *
  • {@value #KEEP_TYPES_KEY} the array of words / tokens.
  • + *
  • {@value #KEEP_TYPES_MODE_KEY} whether to keep ("include") or discard + * ("exclude") the specified token types.
  • *
*/ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory { private final Set keepTypes; - private static final String KEEP_TYPES_KEY = "types"; + private final KeepTypesMode includeMode; + static final String KEEP_TYPES_KEY = "types"; + static final String KEEP_TYPES_MODE_KEY = "mode"; + + enum KeepTypesMode { + INCLUDE, EXCLUDE; + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } + + private static KeepTypesMode fromString(String modeString) { + String lc = modeString.toLowerCase(Locale.ROOT); + if (lc.equals("include")) { + return INCLUDE; + } else if (lc.equals("exclude")) { + return EXCLUDE; + } else { + throw new IllegalArgumentException("`keep_types` tokenfilter mode can only be [" + KeepTypesMode.INCLUDE + "] or [" + + KeepTypesMode.EXCLUDE + "] but was [" + modeString + "]."); + } + } + }; KeepTypesFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); @@ -52,12 +78,12 @@ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory { if ((arrayKeepTypes == null)) { throw new IllegalArgumentException("keep_types requires `" + KEEP_TYPES_KEY + "` to be configured"); } - + this.includeMode = KeepTypesMode.fromString(settings.get(KEEP_TYPES_MODE_KEY, "include")); this.keepTypes = new HashSet<>(arrayKeepTypes); } @Override public TokenStream create(TokenStream tokenStream) { - return new TypeTokenFilter(tokenStream, keepTypes, true); + return new TypeTokenFilter(tokenStream, keepTypes, includeMode == KeepTypesMode.INCLUDE); } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java index a19882d6faa..d0c7723457f 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java @@ -34,19 +34,51 @@ import java.io.StringReader; import static org.hamcrest.Matchers.instanceOf; public class KeepTypesFilterFactoryTests extends ESTokenStreamTestCase { - public void testKeepTypes() throws IOException { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put("index.analysis.filter.keep_numbers.type", "keep_types") - .putList("index.analysis.filter.keep_numbers.types", new String[] {"", ""}) - .build(); + + private static final String BASE_SETTING = "index.analysis.filter.keep_numbers"; + + public void testKeepTypesInclude() throws IOException { + Settings.Builder settingsBuilder = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }); + // either use default mode or set "include" mode explicitly + if (random().nextBoolean()) { + settingsBuilder.put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, + KeepTypesFilterFactory.KeepTypesMode.INCLUDE); + } + Settings settings = settingsBuilder.build(); ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers"); assertThat(tokenFilter, instanceOf(KeepTypesFilterFactory.class)); String source = "Hello 123 world"; - String[] expected = new String[]{"123"}; + String[] expected = new String[] { "123" }; Tokenizer tokenizer = new StandardTokenizer(); tokenizer.setReader(new StringReader(source)); - assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[]{2}); + assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[] { 2 }); + } + + public void testKeepTypesExclude() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }) + .put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, KeepTypesFilterFactory.KeepTypesMode.EXCLUDE).build(); + ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers"); + assertThat(tokenFilter, instanceOf(KeepTypesFilterFactory.class)); + String source = "Hello 123 world"; + String[] expected = new String[] { "Hello", "world" }; + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader(source)); + assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[] { 1, 2 }); + } + + public void testKeepTypesException() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }) + .put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, "bad_parameter").build(); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin())); + assertEquals("`keep_types` tokenfilter mode can only be [include] or [exclude] but was [bad_parameter].", ex.getMessage()); } } From 36165265ce6010a33147a6e3c5296a64f989944d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Jul 2018 09:09:03 +0200 Subject: [PATCH 054/260] Fix put mappings java API documentation (#31955) The current docs of the put-mapping Java API is currently broken. It its current form, it creates an index and uses the whole mapping definition given as a JSON string as the type name. Since we didn't check the index created in the IndicesDocumentationIT so far this went unnoticed. This change adds test to catch this error to the documentation test, changes the documentation so it works correctly now and adds an input validation to PutMappingRequest#buildFromSimplifiedDef() which was used internally to reject calls where no mapping definition is given. Closes #31906 --- .../admin/indices/put-mapping.asciidoc | 11 ++++-- .../mapping/put/PutMappingRequest.java | 11 +++--- .../mapping/put/PutMappingRequestTests.java | 6 +++- .../documentation/IndicesDocumentationIT.java | 35 ++++++++++++++----- 4 files changed, 47 insertions(+), 16 deletions(-) diff --git a/docs/java-api/admin/indices/put-mapping.asciidoc b/docs/java-api/admin/indices/put-mapping.asciidoc index 3e931dfd7b7..8bdcb491697 100644 --- a/docs/java-api/admin/indices/put-mapping.asciidoc +++ b/docs/java-api/admin/indices/put-mapping.asciidoc @@ -2,17 +2,22 @@ ==== Put Mapping -The PUT mapping API allows you to add a new type while creating an index: +You can add mappings for a new type at index creation time: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{client-tests}/IndicesDocumentationIT.java[index-with-mapping] -------------------------------------------------- <1> <> called `twitter` -<2> It also adds a `tweet` mapping type. +<2> Add a `tweet` type with a field called `message` that has the datatype `text`. +There are several variants of the above `addMapping` method, some taking an +`XContentBuilder` or a `Map` with the mapping definition as arguments. Make sure +to check the javadocs to pick the simplest one for your use case. -The PUT mapping API also allows to add a new type to an existing index: +The PUT mapping API also allows to specify the mapping of a type after index +creation. In this case you can provide the mapping as a String similar to the +Rest API syntax: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index dc201b38c3b..3429b35073c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -184,10 +184,13 @@ public class PutMappingRequest extends AcknowledgedRequest im } /** - * @param type the mapping type - * @param source consisting of field/properties pairs (e.g. "field1", - * "type=string,store=true"). If the number of arguments is not - * divisible by two an {@link IllegalArgumentException} is thrown + * @param type + * the mapping type + * @param source + * consisting of field/properties pairs (e.g. "field1", + * "type=string,store=true") + * @throws IllegalArgumentException + * if the number of the source arguments is not divisible by two * @return the mappings definition */ public static XContentBuilder buildFromSimplifiedDef(String type, Object... source) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index be44d790b40..86c2b67be9c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -76,8 +76,12 @@ public class PutMappingRequestTests extends ESTestCase { " concrete index: [[foo/bar]] and indices: [myindex];"); } + /** + * Test that {@link PutMappingRequest#buildFromSimplifiedDef(String, Object...)} + * rejects inputs where the {@code Object...} varargs of field name and properties are not + * paired correctly + */ public void testBuildFromSimplifiedDef() { - // test that method rejects input where input varargs fieldname/properites are not paired correctly IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PutMappingRequest.buildFromSimplifiedDef("type", "only_field")); assertEquals("mapping source must be pairs of fieldnames and properties definition.", e.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java b/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java index 064702170d5..e5df229cd98 100644 --- a/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java +++ b/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java @@ -19,10 +19,19 @@ package org.elasticsearch.client.documentation; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESIntegTestCase; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.instanceOf; + /** * This class is used to generate the Java indices administration documentation. * You need to wrap your code between two tags like: @@ -48,16 +57,14 @@ public class IndicesDocumentationIT extends ESIntegTestCase { Client client = client(); // tag::index-with-mapping - client.admin().indices().prepareCreate("twitter") // <1> - .addMapping("\"tweet\": {\n" + // <2> - " \"properties\": {\n" + - " \"message\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - "}") + client.admin().indices().prepareCreate("twitter") // <1> + .addMapping("tweet", "message", "type=text") // <2> .get(); // end::index-with-mapping + GetMappingsResponse getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); + assertEquals(1, getMappingsResponse.getMappings().size()); + ImmutableOpenMap indexMapping = getMappingsResponse.getMappings().get("twitter"); + assertThat(indexMapping.get("tweet"), instanceOf(MappingMetaData.class)); // we need to delete in order to create a fresh new index with another type client.admin().indices().prepareDelete("twitter").get(); @@ -89,6 +96,11 @@ public class IndicesDocumentationIT extends ESIntegTestCase { "}", XContentType.JSON) .get(); // end::putMapping-request-source + getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); + assertEquals(1, getMappingsResponse.getMappings().size()); + indexMapping = getMappingsResponse.getMappings().get("twitter"); + assertEquals(singletonMap("properties", singletonMap("name", singletonMap("type", "text"))), + indexMapping.get("user").getSourceAsMap()); // tag::putMapping-request-source-append client.admin().indices().preparePutMapping("twitter") // <1> @@ -102,6 +114,13 @@ public class IndicesDocumentationIT extends ESIntegTestCase { "}", XContentType.JSON) .get(); // end::putMapping-request-source-append + getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); + assertEquals(1, getMappingsResponse.getMappings().size()); + indexMapping = getMappingsResponse.getMappings().get("twitter"); + Map> expected = new HashMap<>(); + expected.put("name", singletonMap("type", "text")); + expected.put("user_name", singletonMap("type", "text")); + assertEquals(expected, indexMapping.get("user").getSourceAsMap().get("properties")); } } From 9e529d9d584ae6f6978ac09056b73d9ccdde4a3c Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 17 Jul 2018 10:54:10 +0300 Subject: [PATCH 055/260] Enable testing in FIPS140 JVM (#31666) Ensure our tests can run in a FIPS JVM JKS keystores cannot be used in a FIPS JVM as attempting to use one in order to init a KeyManagerFactory or a TrustManagerFactory is not allowed.( JKS keystore algorithms for private key encryption are not FIPS 140 approved) This commit replaces JKS keystores in our tests with the corresponding PEM encoded key and certificates both for key and trust configurations. Whenever it's not possible to refactor the test, i.e. when we are testing that we can load a JKS keystore, etc. we attempt to mute the test when we are running in FIPS 140 JVM. Testing for the JVM is naive and is based on the name of the security provider as we would control the testing infrastrtucture and so this would be reliable enough. Other cases of tests being muted are the ones that involve custom TrustStoreManagers or KeyStoreManagers, null TLS Ciphers and the SAMLAuthneticator class as we cannot sign XML documents in the way we were doing. SAMLAuthenticator tests in a FIPS JVM can be reenabled with precomputed and signed SAML messages at a later stage. IT will be covered in a subsequent PR --- client/rest/build.gradle | 4 + .../client/RestClientBuilderIntegTests.java | 24 ++- client/rest/src/test/resources/test.crt | 24 +++ client/rest/src/test/resources/test.der | Bin 0 -> 1218 bytes .../plugins/InstallPluginCommandTests.java | 6 + .../ingest/common/DateProcessorTests.java | 2 + .../AzureDiscoveryClusterFormationTests.java | 10 ++ .../common/settings/KeyStoreWrapperTests.java | 2 + .../org/elasticsearch/test/ESTestCase.java | 6 + .../xpack/core/ssl/CertParsingUtils.java | 4 +- .../license/LicenseServiceClusterTests.java | 1 + .../license/SelfGeneratedLicenseTests.java | 1 + .../xpack/core/ssl/CertGenUtilsTests.java | 6 + .../xpack/core/ssl/PemUtilsTests.java | 1 + .../ssl/SSLConfigurationReloaderTests.java | 94 +++++++---- .../xpack/core/ssl/SSLServiceTests.java | 92 ++++++----- .../xpack/core/ssl/StoreKeyConfigTests.java | 2 + .../transport/ssl/certs/simple/samba4.crt | 22 +++ .../ssl/certs/simple/testnode-ip-only.pem | 30 ++++ ...tname.cert => testnode-no-subjaltname.crt} | 0 .../certs/simple/testnode-no-subjaltname.pem | 30 ++++ .../ssl/certs/simple/testnode_updated.crt | 49 +++--- .../ssl/certs/simple/testnode_updated.jks | Bin 2333 -> 4003 bytes .../ssl/certs/simple/testnode_updated.p12 | Bin 2628 -> 4300 bytes .../ssl/certs/simple/testnode_updated.pem | 77 ++++++--- .../test/SecuritySettingsSource.java | 27 +++- .../test/SettingsFilterTests.java | 13 +- .../security/PkiRealmBootstrapCheckTests.java | 20 +-- .../esnative/ESNativeMigrateToolTests.java | 16 +- .../tool/CommandLineHttpClientTests.java | 23 ++- .../authc/ldap/GroupsResolverTestCase.java | 4 +- .../security/authc/ldap/LdapTestUtils.java | 2 +- .../LdapUserSearchSessionFactoryTests.java | 9 +- .../authc/pki/PkiAuthenticationTests.java | 75 +++++---- .../authc/saml/SamlAuthenticatorTests.java | 4 +- .../authc/saml/SamlMetadataCommandTests.java | 14 +- .../security/authc/saml/SamlRealmTests.java | 40 +++-- ...ServerTransportFilterIntegrationTests.java | 46 +++--- .../netty4/IPHostnameVerificationTests.java | 40 ++--- ...ecurityNetty4HttpServerTransportTests.java | 42 ++--- .../SecurityNetty4ServerTransportTests.java | 37 +++-- .../netty4/SslHostnameVerificationTests.java | 55 ++++--- .../transport/nio/SSLDriverTests.java | 31 ++-- .../nio/SimpleSecurityNioTransportTests.java | 16 +- .../transport/ssl/EllipticCurveSSLTests.java | 4 +- .../transport/ssl/SslIntegrationTests.java | 11 +- .../transport/ssl/SslMultiPortTests.java | 153 +++++++++--------- .../transport/ssl/SslNullCipherTests.java | 6 + .../xpack/ssl/SSLClientAuthTests.java | 65 ++++---- .../xpack/ssl/SSLReloadIntegTests.java | 103 ++++++------ .../xpack/ssl/SSLTrustRestrictionsTests.java | 1 + .../xpack/security/authc/ldap/support/ad.crt | 23 +++ .../security/authc/ldap/support/smb_ca.crt | 22 +++ .../security/authc/ldap/support/smb_cert.crt | 22 +++ .../webhook/WebhookHttpsIntegrationTests.java | 12 +- .../watcher/common/http/HttpClientTests.java | 100 ++++++------ .../xpack/security/keystore/testnode.crt | 21 +++ .../xpack/security/keystore/testnode.pem | 27 ++++ .../keystore/truststore-testnode-only.crt | 21 +++ .../certs/simple/testnode-no-subjaltname.crt | 20 +++ .../certs/simple/testnode-no-subjaltname.pem | 30 ++++ x-pack/qa/openldap-tests/build.gradle | 1 + ...OpenLdapUserSearchSessionFactoryTests.java | 14 +- .../authc/ldap/SearchGroupsResolverTests.java | 2 +- .../ssl/CertificateGenerateToolTests.java | 6 + .../xpack/core/ssl/CertificateToolTests.java | 6 + .../ADLdapUserSearchSessionFactoryTests.java | 9 +- .../ldap/AbstractActiveDirectoryTestCase.java | 38 +++-- .../ldap/AbstractAdLdapRealmTestCase.java | 76 ++++++--- .../ActiveDirectoryGroupsResolverTests.java | 2 +- .../ActiveDirectorySessionFactoryTests.java | 24 ++- .../authc/ldap/MultipleAdRealmIT.java | 5 +- 72 files changed, 1215 insertions(+), 610 deletions(-) create mode 100644 client/rest/src/test/resources/test.crt create mode 100644 client/rest/src/test/resources/test.der create mode 100644 x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt create mode 100644 x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem rename x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/{testnode-no-subjaltname.cert => testnode-no-subjaltname.crt} (100%) create mode 100644 x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt create mode 100644 x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt create mode 100644 x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem create mode 100644 x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt create mode 100644 x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt create mode 100644 x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem diff --git a/client/rest/build.gradle b/client/rest/build.gradle index b1ed05a8342..fc2ab0bc4c0 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -59,6 +59,10 @@ forbiddenApisMain { PrecommitTasks.getResource('/forbidden/http-signatures.txt')] } +forbiddenPatterns { + exclude '**/*.der' +} + forbiddenApisTest { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage bundledSignatures -= 'jdk-non-portable' diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java index 0d1c3ffd6b8..49eefc527ba 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java @@ -36,7 +36,13 @@ import java.io.IOException; import java.io.InputStream; import java.net.InetAddress; import java.net.InetSocketAddress; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.KeyFactory; import java.security.KeyStore; +import java.security.cert.Certificate; +import java.security.cert.CertificateFactory; +import java.security.spec.PKCS8EncodedKeySpec; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; @@ -101,12 +107,20 @@ public class RestClientBuilderIntegTests extends RestClientTestCase { private static SSLContext getSslContext() throws Exception { SSLContext sslContext = SSLContext.getInstance("TLS"); - try (InputStream in = RestClientBuilderIntegTests.class.getResourceAsStream("/testks.jks")) { - KeyStore keyStore = KeyStore.getInstance("JKS"); - keyStore.load(in, "password".toCharArray()); - KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509"); + try (InputStream certFile = RestClientBuilderIntegTests.class.getResourceAsStream("/test.crt")) { + // Build a keystore of default type programmatically since we can't use JKS keystores to + // init a KeyManagerFactory in FIPS 140 JVMs. + KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); + keyStore.load(null, "password".toCharArray()); + CertificateFactory certFactory = CertificateFactory.getInstance("X.509"); + PKCS8EncodedKeySpec privateKeySpec = new PKCS8EncodedKeySpec(Files.readAllBytes(Paths.get(RestClientBuilderIntegTests.class + .getResource("/test.der").toURI()))); + KeyFactory keyFactory = KeyFactory.getInstance("RSA"); + keyStore.setKeyEntry("mykey", keyFactory.generatePrivate(privateKeySpec), "password".toCharArray(), + new Certificate[]{certFactory.generateCertificate(certFile)}); + KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmf.init(keyStore, "password".toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509"); + TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmf.init(keyStore); sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null); } diff --git a/client/rest/src/test/resources/test.crt b/client/rest/src/test/resources/test.crt new file mode 100644 index 00000000000..ab1b8c2265d --- /dev/null +++ b/client/rest/src/test/resources/test.crt @@ -0,0 +1,24 @@ +-----BEGIN CERTIFICATE----- +MIIEATCCAumgAwIBAgIEObhDZDANBgkqhkiG9w0BAQsFADBnMQswCQYDVQQGEwJV +UzELMAkGA1UECBMCQ0ExFjAUBgNVBAcTDU1vdW50YWluIFZpZXcxEDAOBgNVBAoT +B2VsYXN0aWMxDTALBgNVBAsTBHRlc3QxEjAQBgNVBAMTCXRlc3Qgbm9kZTAeFw0x +NzA3MTcxNjEyNTZaFw0yNzA3MTUxNjEyNTZaMGcxCzAJBgNVBAYTAlVTMQswCQYD +VQQIEwJDQTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHZWxhc3Rp +YzENMAsGA1UECxMEdGVzdDESMBAGA1UEAxMJdGVzdCBub2RlMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnXtuGIgAq6vWzUD34HXkYF+0u103hb8d1h35 +kjeuNApkUhS6x/VbuNp7TpWmprfDgG5w9TourHvyiqcQMDEWrBunS6rmKo1jK1Wm +le3qA3F2l9VIZSNeeYQgezmzuElEPPmBjN8XBByIWKYjZcGd5u7DiquPUh9QLIev +itgB2jfi9D8ewyvaSbVAQuQwyIaDN9L74wKyMC8EuzzAWNSDjgIhhwcR5qg17msa +ItyM44/3hik+ObIGpMlLSxQu2V1U9bOaq48JjQBLHVg1vzC9VzGuNdEb8haFnhJN +UrdESdHymbtBSUvy30iB+kHq5R8wQ4pC+WxChQnbA2GskuFrMQIDAQABo4G0MIGx +MIGPBgNVHREEgYcwgYSHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAGCCWxvY2FsaG9z +dIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghdsb2NhbGhvc3Q0 +LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5sb2NhbGRvbWFp +bjYwHQYDVR0OBBYEFFwNcqIKfGBCBGo9faQJ3TsHmp0SMA0GCSqGSIb3DQEBCwUA +A4IBAQBvUJTRjSOf/+vtyS3OokwRilg1ZGF3psg0DWhjH2ehIRfNibU1Y8FVQo3I +VU8LjcIUK1cN85z+AsYqLXo/C4qmJPydQ1tGpQL7uIrPD4h+Xh3tY6A2DKRJRQFO +w2LjswPidGufMztpPbXxLREqvkvn80VkDnc44UPxYfHvZFqYwYyxZccA5mm+BhYu +IerjfvgX+8zMWIQZOd+jRq8EaVTmVK2Azwwhc5ImWfc0DA3pmGPdECzE4N0VVoIJ +N8PCVltXXP3F7K3LoT6CLSiJ3c/IDVNoVS4pRV6R6Y4oIKD9T/T1kAgAvOrUGRWY +ejWQ41GdUmkmxrqCaMbVCO4s72BC +-----END CERTIFICATE----- diff --git a/client/rest/src/test/resources/test.der b/client/rest/src/test/resources/test.der new file mode 100644 index 0000000000000000000000000000000000000000..454bfd286bd97937020cb8d1535c7cb3eec43557 GIT binary patch literal 1218 zcmV;z1U>sOf&{(-0RS)!1_>&LNQUrs4#*Aqyhl|0)hbn0G)eo7>EF? ztJcjx_uzHpU|+PmT{nfl9o8NBk~gk23S?3gy2tffxY~P8m8Pb*!+>sZ^*S!Bd-95> z5HK+otQ)6Gs^%(XizNwq*i zg`N^kQny4&(ejzQK}k#U-$;S_LF(lnFhhz$`D{Xk3EKlajxI^3zY*DMf42|U+NcycGN5Ii(&fd7Vo1LaD{Jj zk^&xUtM1`nB%&;_e1ejavA5F)$JNV-kx=7#3=1 zNR_IxrXGQvev4;>7K$A&AgJXrpFZ9tBu-XU3Uf%X^y(KqMsL0Z-C#fGDTJnM2BQ?+ zKE>d@a-CM0qU0BWzEYKKBGDEJ^dka+fdITy>*NbhtzpqD5?=qK)<@mW-;493$$ORU z8{^b}t5lA~o?5U70h1-GV?nHhZXY@>$0xT2_4l9h5S-b#TRao!cqSaPrDKxeqJOjo zml{5yQNEYHnnJMOUHUBmo4Q=cM1qpnio#8Uxx~|?($N?p><9S#EFe*7KU@iKo|^)J zfdGPA6Co{I{$ZaM#M7<2E=U`i@W9cZVz3Rl(n@%p zC!K#o3d*d6FqrbI!Bq4gl1aW28R!`EhPQJRp7x7Sbl81yUf$@cT zR|0{70Hk{CRiE_&-^6Tw?fk}J`Eyu list = new ArrayList<>(iterations); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java index 880cb03a64f..b82275a8833 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java @@ -53,6 +53,7 @@ public class PemUtilsTests extends ESTestCase { } public void testReadEncryptedPKCS8Key() throws Exception { + assumeFalse("Can't run in a FIPS JVM, PBE KeySpec is not available", inFipsJvm()); Key key = getKeyFromKeystore("RSA"); assertThat(key, notNullValue()); assertThat(key, instanceOf(PrivateKey.class)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java index 72cd13471df..d8e0b693f70 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java @@ -42,7 +42,10 @@ import java.security.NoSuchAlgorithmException; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.security.UnrecoverableKeyException; +import java.security.cert.Certificate; import java.security.cert.CertificateException; +import java.util.Collections; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.function.Consumer; @@ -76,6 +79,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { * Tests reloading a keystore that is used in the KeyManager of SSLContext */ public void testReloadingKeyStore() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); final Path tempDir = createTempDir(); final Path keystorePath = tempDir.resolve("testnode.jks"); final Path updatedKeystorePath = tempDir.resolve("testnode_updated.jks"); @@ -133,12 +137,10 @@ public class SSLConfigurationReloaderTests extends ESTestCase { Path updatedKeyPath = tempDir.resolve("testnode_updated.pem"); Path certPath = tempDir.resolve("testnode.crt"); Path updatedCertPath = tempDir.resolve("testnode_updated.crt"); - final Path clientTruststorePath = tempDir.resolve("testnode.jks"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"), keyPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), updatedKeyPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), updatedCertPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"), certPath); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), clientTruststorePath); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); final Settings settings = Settings.builder() @@ -150,7 +152,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { final Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); // Load HTTPClient once. Client uses a keystore containing testnode key/cert as a truststore - try (CloseableHttpClient client = getSSLClient(clientTruststorePath, "testnode")) { + try (CloseableHttpClient client = getSSLClient(Collections.singletonList(certPath))) { final Consumer keyMaterialPreChecks = (context) -> { try (MockWebServer server = new MockWebServer(context, false)) { server.enqueue(new MockResponse().setResponseCode(200).setBody("body")); @@ -190,6 +192,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { * reloadable SSLContext used in the HTTPClient) and as a KeyStore for the MockWebServer */ public void testReloadingTrustStore() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path tempDir = createTempDir(); Path trustStorePath = tempDir.resolve("testnode.jks"); Path updatedTruststorePath = tempDir.resolve("testnode_updated.jks"); @@ -240,19 +243,21 @@ public class SSLConfigurationReloaderTests extends ESTestCase { */ public void testReloadingPEMTrustConfig() throws Exception { Path tempDir = createTempDir(); - Path clientCertPath = tempDir.resolve("testnode.crt"); - Path keyStorePath = tempDir.resolve("testnode.jks"); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), keyStorePath); + Path serverCertPath = tempDir.resolve("testnode.crt"); + Path serverKeyPath = tempDir.resolve("testnode.pem"); + Path updatedCert = tempDir.resolve("updated.crt"); //Our keystore contains two Certificates it can present. One build from the RSA keypair and one build from the EC keypair. EC is // used since it keyManager presents the first one in alias alphabetical order (and testnode_ec comes before testnode_rsa) - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt"), clientCertPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"), serverCertPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"), serverKeyPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), updatedCert); Settings settings = Settings.builder() - .putList("xpack.ssl.certificate_authorities", clientCertPath.toString()) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.certificate_authorities", serverCertPath) + .put("path.home", createTempDir()) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); // Create the MockWebServer once for both pre and post checks - try (MockWebServer server = getSslServer(keyStorePath, "testnode")) { + try (MockWebServer server = getSslServer(serverKeyPath, serverCertPath, "testnode")) { final Consumer trustMaterialPreChecks = (context) -> { try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { privilegedConnect(() -> client.execute(new HttpGet("https://localhost:" + server.getPort())).close()); @@ -263,10 +268,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { final Runnable modifier = () -> { try { - Path updatedCert = tempDir.resolve("updated.crt"); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), - updatedCert, StandardCopyOption.REPLACE_EXISTING); - atomicMoveIfPossible(updatedCert, clientCertPath); + atomicMoveIfPossible(updatedCert, serverCertPath); } catch (Exception e) { throw new RuntimeException("failed to modify file", e); } @@ -277,7 +279,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { try (CloseableHttpClient client = HttpClients.custom().setSSLContext(updatedContext).build()) { SSLHandshakeException sslException = expectThrows(SSLHandshakeException.class, () -> privilegedConnect(() -> client.execute(new HttpGet("https://localhost:" + server.getPort())).close())); - assertThat(sslException.getCause().getMessage(), containsString("PKIX path building failed")); + assertThat(sslException.getCause().getMessage(), containsString("PKIX path validation failed")); } catch (Exception e) { throw new RuntimeException("Error closing CloseableHttpClient", e); } @@ -291,16 +293,17 @@ public class SSLConfigurationReloaderTests extends ESTestCase { * that is being monitored */ public void testReloadingKeyStoreException() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path tempDir = createTempDir(); Path keystorePath = tempDir.resolve("testnode.jks"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), keystorePath); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", keystorePath) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.keystore.path", keystorePath) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); @@ -336,12 +339,12 @@ public class SSLConfigurationReloaderTests extends ESTestCase { MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.key", keyPath) - .put("xpack.ssl.certificate", certPath) - .putList("xpack.ssl.certificate_authorities", certPath.toString(), clientCertPath.toString()) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .putList("xpack.ssl.certificate_authorities", certPath.toString(), clientCertPath.toString()) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); @@ -373,10 +376,10 @@ public class SSLConfigurationReloaderTests extends ESTestCase { MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.truststore.path", trustStorePath) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.truststore.path", trustStorePath) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); @@ -482,6 +485,20 @@ public class SSLConfigurationReloaderTests extends ESTestCase { return server; } + private static MockWebServer getSslServer(Path keyPath, Path certPath, String password) throws KeyStoreException, CertificateException, + NoSuchAlgorithmException, IOException, KeyManagementException, UnrecoverableKeyException { + KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); + keyStore.load(null, password.toCharArray()); + keyStore.setKeyEntry("testnode_ec", PemUtils.readPrivateKey(keyPath, password::toCharArray), password.toCharArray(), + CertParsingUtils.readCertificates(Collections.singletonList(certPath))); + final SSLContext sslContext = new SSLContextBuilder().loadKeyMaterial(keyStore, password.toCharArray()) + .build(); + MockWebServer server = new MockWebServer(sslContext, false); + server.enqueue(new MockResponse().setResponseCode(200).setBody("body")); + server.start(); + return server; + } + private static CloseableHttpClient getSSLClient(Path trustStorePath, String trustStorePass) throws KeyStoreException, NoSuchAlgorithmException, KeyManagementException, IOException, CertificateException { @@ -493,6 +510,23 @@ public class SSLConfigurationReloaderTests extends ESTestCase { return HttpClients.custom().setSSLContext(sslContext).build(); } + /** + * Creates a {@link CloseableHttpClient} that only trusts the given certificate(s) + * + * @param trustedCertificatePaths The certificates this client trusts + **/ + private static CloseableHttpClient getSSLClient(List trustedCertificatePaths) throws KeyStoreException, + NoSuchAlgorithmException, + KeyManagementException, IOException, CertificateException { + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + trustStore.load(null, null); + for (Certificate cert : CertParsingUtils.readCertificates(trustedCertificatePaths)) { + trustStore.setCertificateEntry(cert.toString(), cert); + } + final SSLContext sslContext = new SSLContextBuilder().loadTrustMaterial(trustStore, null).build(); + return HttpClients.custom().setSSLContext(sslContext).build(); + } + private static void privilegedConnect(CheckedRunnable runnable) throws Exception { try { AccessController.doPrivileged((PrivilegedExceptionAction) () -> { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index df764bb3f47..048ad2e8e36 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -72,6 +72,8 @@ public class SSLServiceTests extends ESTestCase { private Path testnodeStore; private String testnodeStoreType; private Path testclientStore; + private Path testnodeCert; + private Path testnodeKey; private Environment env; @Before @@ -80,17 +82,20 @@ public class SSLServiceTests extends ESTestCase { if (randomBoolean()) { testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); // The default is to use JKS. Randomly test with explicit and with the default value. - testnodeStoreType = randomBoolean() ? "jks" : null; + testnodeStoreType = "jks"; } else { testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12"); testnodeStoreType = randomBoolean() ? "PKCS12" : null; } logger.info("Using [{}] key/truststore [{}]", testnodeStoreType, testnodeStore); + testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); testclientStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"); env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); } public void testThatCustomTruststoreCanBeSpecified() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path testClientStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); @@ -110,7 +115,7 @@ public class SSLServiceTests extends ESTestCase { .setSecureSettings(secureCustomSettings) .build(); - SSLConfiguration configuration = new SSLConfiguration(customTruststoreSettings, globalConfiguration(sslService)); + SSLConfiguration configuration = new SSLConfiguration(customTruststoreSettings, globalConfiguration(sslService)); SSLEngine sslEngineWithTruststore = sslService.createSSLEngine(configuration, null, -1); assertThat(sslEngineWithTruststore, is(not(nullValue()))); @@ -126,10 +131,10 @@ public class SSLServiceTests extends ESTestCase { public void testThatSslContextCachingWorks() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) .setSecureSettings(secureSettings) .build(); SSLService sslService = new SSLService(settings, env); @@ -145,6 +150,7 @@ public class SSLServiceTests extends ESTestCase { } public void testThatKeyStoreAndKeyCanHaveDifferentPasswords() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path differentPasswordsStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-different-passwords.jks"); MockSecureSettings secureSettings = new MockSecureSettings(); @@ -160,6 +166,7 @@ public class SSLServiceTests extends ESTestCase { } public void testIncorrectKeyPasswordThrowsException() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path differentPasswordsStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-different-passwords.jks"); try { @@ -180,12 +187,12 @@ public class SSLServiceTests extends ESTestCase { public void testThatSSLv3IsNotEnabled() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -214,6 +221,7 @@ public class SSLServiceTests extends ESTestCase { public void testCreateWithKeystoreIsValidForServer() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); Settings settings = Settings.builder() @@ -227,6 +235,7 @@ public class SSLServiceTests extends ESTestCase { } public void testValidForServerWithFallback() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder() @@ -251,6 +260,7 @@ public class SSLServiceTests extends ESTestCase { } public void testGetVerificationMode() throws Exception { + assumeFalse("Can't run in a FIPS JVM, TrustAllConfig is not a SunJSSE TrustManagers", inFipsJvm()); SSLService sslService = new SSLService(Settings.EMPTY, env); assertThat(globalConfiguration(sslService).verificationMode(), is(XPackSettings.VERIFICATION_MODE_DEFAULT)); @@ -273,7 +283,7 @@ public class SSLServiceTests extends ESTestCase { Settings settings = Settings.builder() .put("xpack.ssl.client_authentication", "none") .put("xpack.security.transport.ssl.client_authentication", "optional") - .put("transport.profiles.foo.port", "9400-9410") + .put("transport.profiles.foo.port", "9400-9410") .build(); sslService = new SSLService(settings, env); assertFalse(sslService.isSSLClientAuthEnabled(globalConfiguration(sslService))); @@ -325,13 +335,12 @@ public class SSLServiceTests extends ESTestCase { ciphers.add("foo"); ciphers.add("bar"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .putList("xpack.ssl.ciphers", ciphers.toArray(new String[ciphers.size()])) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -342,14 +351,14 @@ public class SSLServiceTests extends ESTestCase { public void testInvalidCiphersOnlyThrowsException() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); - + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .putList("xpack.ssl.cipher_suites", new String[] { "foo", "bar" }) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .putList("xpack.ssl.cipher_suites", new String[]{"foo", "bar"}) + .setSecureSettings(secureSettings) + .build(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SSLService(settings, env)); assertThat(e.getMessage(), is("none of the ciphers [foo, bar] are supported by this JVM")); @@ -357,12 +366,12 @@ public class SSLServiceTests extends ESTestCase { public void testThatSSLEngineHasCipherSuitesOrderSet() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -372,12 +381,12 @@ public class SSLServiceTests extends ESTestCase { public void testThatSSLSocketFactoryHasProperCiphersAndProtocols() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration config = globalConfiguration(sslService); final SSLSocketFactory factory = sslService.sslSocketFactory(config); @@ -397,12 +406,12 @@ public class SSLServiceTests extends ESTestCase { public void testThatSSLEngineHasProperCiphersAndProtocols() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -462,8 +471,8 @@ public class SSLServiceTests extends ESTestCase { assertThat(trustManager.getAcceptedIssuers(), emptyArray()); } - public void testGetConfigurationByContextName() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); final SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); sslContext.init(null, null, null); final String[] cipherSuites = sslContext.getSupportedSSLParameters().getCipherSuites(); @@ -525,7 +534,8 @@ public class SSLServiceTests extends ESTestCase { assertThat(mon3Config, sameInstance(global)); } - public void testReadCertificateInformation () throws Exception { + public void testReadCertificateInformation() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); final Path jksPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); final Path p12Path = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12"); final Path pemPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/active-directory-ca.crt"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java index 6dd9bb2b46e..511fd778113 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java @@ -22,10 +22,12 @@ import static org.hamcrest.Matchers.notNullValue; public class StoreKeyConfigTests extends ESTestCase { public void testCreateKeyManagerUsingJKS() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); tryReadPrivateKeyFromKeyStore("jks", ".jks"); } public void testCreateKeyManagerUsingPKCS12() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); tryReadPrivateKeyFromKeyStore("PKCS12", ".p12"); } diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt new file mode 100644 index 00000000000..59ecbd22e8b --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDoDCCAoigAwIBAgIUMVGoHuyNTjTFaoRmqFELz75jzDEwDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTgwMjE1MTc0OTExWhcNMjEwMjE0MTc0OTExWjARMQ8wDQYD +VQQDEwZzYW1iYTQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtGBwa +n+7JN2vweSUsYh4zPmh8RPIE+nEVjK1lx/rADUBY7UVjfTYC+MVKKiezZe7gYCNT +7JNKazPpgVI9e3ZFKw/UxomLqRuuvn5bTh+1tMs3afY5+GGzi7oPmEbBO3ceg0Hi +rNSTDa1rfroZnRYK8uIeSZacQnAW90plITI7rBBt9jq+W9albFbDybfDgNv+yS/C +rzIsofm4rbFC3SMRYfrT6HvwDhjOmmYKZci5x7tsn0T+3tSiR44Bw5/DgiN5kX3m +/kl9qg1eoYWbCUy1dKmQlb4Nb4uNcxrIugLB3zjBkfhMZ0OHoveKh/lJASTWik9k +xQ9rEYbpsRbuXpsHAgMBAAGjgcwwgckwHQYDVR0OBBYEFJOLa7UXKtLPibgKeFh7 +Kq1+rS0/MG8GA1UdIwRoMGaAFGaNmN5mi9jaMW25MEWYgt+5OkDBoTikNjA0MTIw +MAYDVQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBD +QYIUdwsnIxjgSneHNVKT6JNCCsrQ3T0wLAYDVR0RBCUwI4IJbG9jYWxob3N0hwR/ +AAABhxAAAAAAAAAAAAAAAAAAAAABMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQAD +ggEBAEHqT1WHkcF8DuOgyIBx7wKcUVQ5H1qYYlJ1xgMGrKFFZLUzouLcON7oadEu +HLIJ4Z3AKD3bqWpcls5XJ9MTECGR48tou67x9cXqTV7jR3Rh0H/VGwzwhR85vbpu +o8ielOPL8XAQOfnAFESJii5sfCU4ZwLg+3evmGZdKfhU6rqQtLimgG/Gm96vOJne +y0a/TZTWrfAarithkOHHXSSAhEI5SdW5SlZAytF4AmYqFvafwxe1+NyFwfCRy0Xl +H40WgVsq+z84psU+WyORb3THX5rgB4au9nuMXOqFKAtrJSI/uApncYraaqU28rqB +gYd8XrtjhKOLw+6viqAKu8l7/cs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem new file mode 100644 index 00000000000..63dca9651ed --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-192-CBC,D097C279AD03E97C34B81B834539C0BB + +rQkO27X8phcP9ydaFArZy4SFDcoCGFsr2TcR0M6Vn8g0ZT9MIrnj2xHCmeMExlKT +MViBQzmupBs0IW7y/ovpEUBgRd4TnSigKNF2XZ3+C6F4rkziTheWOlaUq7PIqOnW +dTmf/WZDLlgms6hzrc7B447lO2FcNUDB6uXkxS1dAhh9T6DFcq9KuW7KJOWt9Dre +75z6rh5nHboUw2i01VqJK+86aL5V3vNK3bjYcbIHKHrgM/+V+hFUNJ6aEepVf+G0 +jzicgEBJwtzOg4MTSqR2z3niNbz9LLBJkH+uR+tN941+JPFmEwsLtkq3OBH7nraG +Srqz/5YcdA45jdZNme+KqorT8mNX/Hx2BsmQYnw+A0ZAqnSWeMcNZgFaAn8OcVxy +d5VS0KFlQD7DPnZi89cyTRScKhcj451NhwOPWIE7w1UpNPVGRj5FBMAtDVaE2Wkc +wuQ0nSwsC3EOvVDMe/fmv2VcoWceh1V9esA2H0n9NWQApGSqz17PPebwQadPX3Y0 +atrbbXT7fYTD3Ij38bmYZiDOluHiXxFchWxVUReq6nHJD3yo8ch2CKpx9XzjElLv +6leUZhlIlq026QxGNVf+OQLTlLXjF8jGbRC31Y4yiHj1I12P0+dic0W8YvUkC5Pp +w7NHMtYG6VHLGkPbCQxzTMS+JU24j/wMIokZWlmaRNDf2DZZAS0asQ/EOG/I1afc +SXyHDQUhi0/8N7CJWE/i1xLrazFKiIkxucxY+WDqGrk5sZnP+cH0cM7Zja253NTy +2B8LZJX360peCD15grkMt1oRtdMvZxF1rS/3BDPuANC59yg5i4eC+P39wkeMf8hu +o8I2Hp5021mT9AWE1Dxf8gSZIJZxIeEgioRvoY1LOEfghe/OXQHR0SXJq9k5GNiA +z4Tz3cfCT3WrHbReY585o2qtbpWu2OeSdxrv8p9kYi8GkevbR+nLq8NaH5WPRD4E +b9RLUz1NfiK8DOW7tk8+gwsNun3hmil7xubw1uhc6a0OGKb9ijMS0rslYW9zeowu +dkROuVwhUhXHOx0ZGWUGauJifERzICWR8Jx48/8DWD4xW3DkIRt5gh3CvzHcdSbv +4VEFSyndWeyNk2Yc6+sX0H35Rngc7gjedCAn8hUBnUq9srfhGKaZ6bahlUt0xsmK +Y3cxcd1or/2S2hONcN4NE2MfB/RRUVeHxdp34RPeW6L8qH/YZFxqt8dUm19QXr0c +CdgSEmVyKOpCPebGJwPwdJEmbxPS/98AjiqOpt83JLUhMeUGhjawXvLzl0YEBJV9 ++6waTV4Xl94aJszyvDeW/+cKIgeso9SeQSN6fLsXgdAVABCZ5yJ+liw6I84G0f2n +D6e51P0JQAL8v28bBACdoB0Qxr9UTV/X8smGTwWobv/KW1BPdvWETsc7TrtWLZ6F +qiZj7mI0h1oeyrC1h1+1oVuUTpy7JICCBloL4ir56lcSWmNZm1GRfar8UhXA7aNw +klAkS6rYHH4gDxhvwd1k/pN1HlCtbmwLyYC/f11+89RnPr0FFW48qMqmwBls63dp +4aAqneUiEow/evE4fBTLhFrgkvfZnjhd41IpzXfMWB5x9SHjrrS4/rjsHXcHUrAh +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.cert b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt similarity index 100% rename from x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.cert rename to x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem new file mode 100644 index 00000000000..b0f7a585d7f --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-192-CBC,2F36F79E75ACA7803AF1BC1B70C2360C + +d4/f7dnpPW9DfhGXuGDx7r56BjQ64iNcsQdrC1CTZB363iAvBBeHaoJFaWpQOUmT +WCBdM6YmRuMi6z4sMtm8Vre3i+Mu2Jti0LTDXqH8Y6FMpptxAYoFb9hkM0OvUGDJ +ASLGTa1XKyCzZWOe2IGLtWIS8zkGv/fSXHqDJV8ddq8uLHGZXBxAye5oJUD0654T +DasIllbiTlBEYlEJ10OyIAspS47n4LWVE5ggyGdiyIgh5s3lAMPO3+yenDU3/dzj +YF2zHKnfe0Nx2lUQVJRYknQvFCx0WTGH9gNl40FovoOMHyRjEuOHD9PFNvL/TCh6 +9h0jJPWu/SsIiYaIqR0pDgqWdHXHA5Ea3s2+w0YHbv7DqkGXWZdceLUdZtcXCBJk +P8QL9IWa7VE50SS6wV5uXX9tV5lHzMFsNGkwtGcR1UMU5cXYqckFXgoBqDN0fyWe +V5iEknSJ4Na+MHv75rvRat0kv6upu9i5NSpYTc5jLHdWZWsYMZ/ZMiMoLBP+KAPT +DQ3eyph/84BU3DePaQF3Rsp0ZvPxqQ361Zwc4zC5CKbHur1QX8WAY5XlBMfuBpkf +CKr5wgwF+ZpS7zsfUpMPPe9Y1E8TWnhx/DtCVLEslBpr2u/rMaxPp6ev9/Wry7N+ +UFBOwodipBhlfSvLqjc511L+bXRzqXiINuW0eSKUQv0J/G0Ur894kJJ6feDYlskj +JiZjOgOyyKhB+K9AXmkfRdvWUJeweL8pgDuYSyQdQ0zoUCZALEoYK2cBWzti/wep +QPFD5oz8076aXNHKMHLsRmSFuEZb6IN0PtUNVf958EbrtABNIuoufKlKtJsEnUyK +VHIEUxExEgyECiozKnxvhr7RQ9nTQXhNdgtec6jJblYnla/+OWAfHdxtHhBjp5AX +WvLyUhmgrmLNdKd1KSzcXynBHgh0hi0HJXYx31FilwbxsdhwN1LwN/Do4T4qGkUr +InrQC3ZHRuh0yAOPrwRFEWbW5m/PzVP/xYVgFtVWP7w38ftZbaBy5xPmtswn+PH+ +cIMt1Y9PaAlhLNpW/Vfn503T9M+05vu73vbU1xgu/B1kePOqE/WO0cOZl0KdaMmT +wAQBKuI7qTACH+/8g3Uir1YSChLthH+1Gs6h686EP6ZydwXq9GYXXkNmJNJJsnmU +RDjoT0F4XBKvcQdX3EeQYs3Af2yZWFDC59c1Ews2dqMK7vy2tYITbx2yn30DBDAl +xvjp2izzmAgQJEG/RqCYsUHCCEv7wz3tpsSOkFem9IHZpR2h8Rqy88GH9qYOkgwo ++fKSmIgC4RLQXsHuh7RRuyNc2FaWDgRgSxs5V4f9xOSU/ZbUftYWnwEyCwbu3RJp +CIXQFZhzU2t5l1Eh+x40rwpEJDXBEwmOIUO3x1oOqGZPPEQ674uMal5TRjvdOVGD +h665Fpo5Xu9EQwQZHYddeRl/7yw8F6LCxBLgHlngKRHHGDUHlTscLfYRqNh+x3jT +3S8dfaGzlnwdQEx32gyLAV0/nsFnzh1AknFMT8jesIYF7PLiAi67PNyNwRCc7TFp +jpKvzkDRVP72bivTmCyP5aKR0Q2oIrAw51MMinT6R2VaoR7COjoVbqYsRLwkxu+p +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt index 75aeecd0c3b..5500e5db4c5 100644 --- a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt @@ -1,23 +1,34 @@ -----BEGIN CERTIFICATE----- -MIID0zCCArugAwIBAgIJAPqdcmPxQKraMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +MIIF0zCCA7ugAwIBAgIJAJZQBp49qNzmMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp -Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTgwNDMwMTUzODM1WhcNMjIwNDI5MTUzODM1 +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTgwNzEyMTUyMjU0WhcNNDMwMzAzMTUyMjU0 WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV -BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEA0wNzfQ9K9WIs9Le6pfiEHdCI97zGZRqOREKb+zFoDllXEEWW -Y5mfXRlTYXbxBiCIV5DDW0yaXlleq62j7/O/6prNUBiYo5sK4Wfj+1UlXLmMikkv -bShm9XzBAXHK20coLJTKOH8QOnFyuVYUvHPCLsoEhQtCHU4zoGaaPmk8w1JU/bfR -+kWmU+x0Ak4rGuRWvMMqg/bu/W/1LmESO5Jsm2CnIyB/22vB08kFn1pO0IIrPQhr -dXqPxddzxc7DuAyyMyzsrLi5msugynEwm1CTBNL9cG45ujNhWzd1rqQe1HF94mEw -RinFe2Ui+vLFpNbh8EesLsy0p18J3QkGQ/0xjQIDAQABo4G/MIG8MAkGA1UdEwQC -MAAwHQYDVR0OBBYEFLUR8gs3uCNdLIwJlkp7SwngpjfhMIGPBgNVHREEgYcwgYSC -CWxvY2FsaG9zdIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghds -b2NhbGhvc3Q0LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5s -b2NhbGRvbWFpbjaHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAEwDQYJKoZIhvcNAQEL -BQADggEBAB73kmak89jgW1q3wIUMfxisokCsk7XuYqPBpc8D06UZBaArCvW2cuLP -5PLI2bR7fSIhgZmlbawa3adOLZ9n9lOJrWUHTh43kKyiSX8EobE0T/MSGVMfTEFu -c92erLS2gSDk4wLahzm5htd1h1KwzJ5j5kdzlLGaQsnxDDjbu9BiX/otEyCl8n69 -AZKOXoYscl2NxBgr2V6C2frthJFfQ9Gxzg5q6V/P3aIUyV0xsC3Ve1wdfXqNeRfO -nrnFbKRDsxJAJ/JtO3GTtqBrFjods0sepKNxFg13r/QLJnYjYW6t7o91JZj2AFOs -1INZnCOAMV3vR/FOwwOT86HDgrwSy2o= +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIICIjANBgkqhkiG9w0BAQEFAAOC +Ag8AMIICCgKCAgEAqLiqAPYBBEBvSJCiQOzV/NPgr0kLQkZGaxQ29PPoKbsKXVp+ +0Uyv9DUitPw+G04h/eOC2wiu2u5sn2rD4Ew09L41qiaViQRY6dHazAkPVq8Chet/ +GWrc+umLJUKLU7MTyC8ozmKjgkyvIuvQ6ndEP31z3JG/j9DsBAX8NWIIJSm9Jaz5 +XUS4fIXwSce141k8zb39+oStyA1qIhv6n59+oNIuuXu1XIJzjQnZCnyAO8/9i7LI +uoL93zu7xNT+ns6Tt7zhweUQEI8FeRdj/k/Of8prbaulFH9oM1g/lnGKLV7E5vh/ +foP1+SRW+MWRjAUA8MExTCtvFhRAb3x6FYzCPX3VERKn8M3m6Rewz/LQ7XG2VzdM +/dw/JSZilAWBZItkY9H1InTeIz9Sw9Z53um7tO5nzq1QQxZijh6n9vzSLoFn6qA5 +SDQl2YycjBE35i/9JBUl0KmVMOfFzpoWLiKzTJMRzNJIeRxJl3MvscbRl8fY0Kco +XQ+w84QMTo+Tn+8Ztfme4uGtHHCTRzrLSo+Hyh8BTm9jJKCaUbMnNW1p2LoxJlq5 +ah+W8QRLaFbWt4xl9TQR0kVnoOyfSGx+3SmTBODa+6Wg038eim8Tw8K+EBvxynWU +sKF1ReL0aOZLi2zqPiQTUtR2y5ue/xjsFWx+tMMGTz3Ci6UoFs9yKqQ++qkCAwEA +AaOBvzCBvDAJBgNVHRMEAjAAMB0GA1UdDgQWBBQLFB6mVsZpiHNMkxOgriN53HZv +ODCBjwYDVR0RBIGHMIGEgglsb2NhbGhvc3SCFWxvY2FsaG9zdC5sb2NhbGRvbWFp +boIKbG9jYWxob3N0NIIXbG9jYWxob3N0NC5sb2NhbGRvbWFpbjSCCmxvY2FsaG9z +dDaCF2xvY2FsaG9zdDYubG9jYWxkb21haW42hwR/AAABhxAAAAAAAAAAAAAAAAAA +AAABMA0GCSqGSIb3DQEBCwUAA4ICAQAQtteI+//qUZ7pNQ5tBuwc24UlSH4dbHFr +79pA0YN/Y7Y/PfU60YeGpPf/QzZWLnRk/8mELWy2Ok7KzhlNjr/nAksiF97LUUnl +6dP15a4ATzheYAtv/47In8ShOrPZ7YG+VAmNWRB8Rj62RuE5khcoL8BpWzCHL8Gx +Kif7lN5SOV06ExGHrBr4Y20QtZsTgkWhAP1UC5EwXxKcchCAIrb60Rx1OShzQrxz +I0RF4mfJ06Jad6qUePkPoUm1lPcFfNvAnJ+hBA210J87NSMFTSsgXT197upnCdhl +9QmKHyk12oVoMtTtf2kc1yTZQ1fnm/V4/PZ7ypyhy+jMsIQSTwEKQr7JYEQoYpdt +yTMHc9L4gPkLTwAbxUx/M1lSuQ4yD3SclBt77CxCM8I9Z76qCVakalb4VhplRbsU +sYs3dgvslFOAJKoOIPeTqm+2L6psxiV9WxA6tvEAk6P/AwjOK6Y4hclnoQDgNI2Q +U+T+ZPi5WRmibs0WxfQv4Vv4MQG/LRT3pwsKWQ76TzgtHKNHgtAKd9Sn0Nylacsh +yhDbvI0jnvwoOlUPUVWerSJLxzazG5mRLi94ZxZMb+7TUUtDBEPtkCquCAA2b4Kh +ykHbmBqhC8Pn9qc9iP0uHuuQETUl/zd7E2ZD3RIj4uYYspovihE5D1Svi5m+3NuS +sCsfHRjmMw== -----END CERTIFICATE----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks index 5a5596dae48da6686f4fee825d3c76cdaf0c8b2a..0df3fdba60daf866548fb5f6eb9be1de8ae09659 100644 GIT binary patch delta 3666 zcmV-Y4z2N>5~ClG7=L7s8Wr6D011dNf(e8$4h9M<1_1;CDgqG!0R;dAf(dZsxuawg zT;MDy9y7HOX{8g`m{ODd*LxA?dybEj!?yR!Gd6aI>u_UXnJOOZ;Et+(XKrmUfY`~2 z^U|#G5l(HJA$8OABuzcxy>*jTp41PQ$q^EF!zR1%2z3} z2malaxy|9Fuv1F;;nr!jvFV_(W_ipY>sablBHx_Q6AKM+6z>{y`{hKpMtF8DC-gJO zYN$2{C1S{ThJR&dZ68*PGaFF)xsC_b?qi#~nrpyOM}USTW@gzTs7fJA{r@PRJDIT2 z9%6%+U0+8By{3Kp8Y+nZ^^I{~!q{Tk64Y5R=;`RY-JKmnU%-!-WK&NK%F^XpK<{}- zI|8K6AI!2mq2GJ5;J2j`>kbfbL*1Nn00LgYvf`aoP=7Ed+nOt3!sm!)MqEq1%V0y9 z=z*Jd;0Rew74tgx`An@9b~fF!-D6ijllDb;byrW>{l=g+q8g4tX9YD4QPB!L;~B7R z3v{0V*Z)|Juh)#`CO@Wo_T09%QaP_?XT&?iG{Lpx%G3aoHjW>$pQg^e_@e|-IBt(B zwfbrew|^B4q@5EX4*z@d!ART}cbPiYHQ;{2oa#8+5uzuI{oE`L%SIE38%!X0H~-*H>tA3)c^07K{gc1oBwTWWuX%rt z(S_}ar<{6bG~+VzYnsy+e0$GNYIQ&&_r9AkFs zvc$ost=mEggr-2lz#iIZp@_dB!2WqNV}B0C5^C~p-L9I4C)=|S&i;bG!sU|cK_ZQR zB#6lu$gl3aJ)O}=0B}0ZT3UT&Ds#2acKRO%f#VIeD>aE@M)8MbIyk<2+N4CBLBV-o zGvqZaAPtcdT_7^ncY^;|89BmD8)UAQCa|vv$EO`Ly?WOg98^)yH0AGh(EtvcuYb3y ztQHArF9H@{j)x&cCKeD*6sXV}w}J`#^Vi^Rq`I$v_}*zMuH4IpoT*B1Y$rN9?|2b3Mfeyx_*Ks#eK1(enM zOo(H~$mp=Y!FnEuw0Cglet)lx)4%?3kg^`ReTs(1dPfC0n$QkQi7K0$40T;uL_jtP zoL&c{t)-{N5h2bwY&g6wkZeUEjK7bOtO><8fTl%UTHTn|N)C8cq!Hg_k zYZE;s87kPvm7S!28UO|D$3Rsl&H6K%`GSsB-LQ&fC0<<+M(ESAz~OEzvld|en`v7a zmSAx}i!fgacnQb=e}4iiPdyD?E$-V3ax$`IJ_$ne_1Rddj(~k_pu}f`+flOYP#i=n zIWc3vi|Ku`IOE{$yOyCtiThph(-5|zrG>lTC_NlMkJi|m=!wK*=U3vFdse>M6l8)T z+x-_E4~^^BYiH2|zu%mxW|?XS)}%aKZcT8K-CJjpP)ERF4u5IzZXeq4Bwja`GN;K4 z_W-e*SE#jZak_UoOfB6JEG{R%MXoraZOY})tAq!_UwQ zTkYT7%QPB6(aen}+|#NTl%_31+&K8&EgY_@LtM|mA5<~6zvcAY84Tzti_80005~}) zFL?La-5fFC|5{iCfvVf3t6ib8l} zmQ=VVe8`cL=?ZxBLWb`#2fR(~5>=d}g0c-_0A{ZSTufH(%c26YAh_$@9;bp;r)@&0EPLNv9~kKk!< z`n{_RuEn$b_rwH9S?&`u07acEk@T)iF2)UTXqrZeVdY|w*1H!2gsum@30$*qN5?eG zn_$%^<*K2#cikKI;rdxp9F`9rm16Ft_F{#5Mt_~DdoS7`=1$0t(Fw>drhdAO#+^D8 zDARnVyMQ0572y~G@oAd#{)fBs_=XwJ{Ean>4Z`jaO-Sc%fp%11e?*_8L}s*^suSXl zX5tS<5(g1w7GdJaPBd!HFG7$N?vW1DP(d&J(SqUDtqo@a$m%_7O_trfkyR$aZ_~A9 zsY`bmh%d|On3u?6Y&q|Parc9$w06M?1zZU4Jk2dP#U3&n{4xLl00966SS~d%IRF3! z*D!(w(=dVqyPyLC0RjRE0G3b&o;|4C=95YVU>P?tGBGtWGBq?>7Y#HsFf%YSF*PzW zH8fh2Sp`dff&wBi4F(A+hDe6@4FLfG1potr0uKN%f&vNxf&u{msJN;C_5lPyZ%B}$ zKCxKE2@h7U0)^{;8EV}6>5C;oi&L|I6UZ+p&SIm2Os^vA(CT+YKYerD zk-v}7>;wh;HDU-QDZM4E`CUY~e1-5y$F<{GJk7oR`h=~>4Qe7A`k$YEpwce6d$nAG zbBzhv3VeV&&;5(C$hw04-#fd+)c&5%lefI#!Q~JTj|F)bWByOhf68lZtECiwXfs$p zmT`)IEndXt_@+&kB7=10ZwmYB%qp6vnMreY1q0kCR({_AC~b1 zOK4Wsw~S@=G!fE8XQ1q#NNj%HDU$@?+WV!T(|;a{Zxh49z7QMn%5{{mp>;*#^l0WJ zON(skJ|q)T)OO38p8pu^6>NUA!v;@1!i%LS7SD1jq(1tolP(7#6blp{rdGyjh;vMn z6QHgmdE9nyIFn-sqkjSc01&pVK{ZtE)Fkcd!au9$bw))W=bvY<=LcDV$L`C9f$W|wa}kCH6sO0DQuA!{_#=J&64-9_x@1H6{NydlP0u-4Y|><`}Y?FNzU44^*#< knZDfHk{hrq9~~IxGnDbwQWQ%u!*BHQXGMxm(5Wer1$FAwC{R>yf*C`5JzgPUBD~`nWv0t-)Y?Wi2!^(I#vOeMy;98d z727@B@6v*%VB&Wh5jRfuY`19Iy^TTA1KRgMWbNrD`t+{u1njo{a#&`fav-TeV^l1# zX3|hGS$|~G(yVB>LQ3OkEDMxk)Ju+yrq@dnQ9V~>a-q~t6JlR)lFLn2o+)mcTNPGg z(?ws(l&sM(d)1tM@Lx|AJ^?)>8jkn}A#UgA-WsO=4N+sm&hKhn?i%`ZT1q{LoYv5X zX8Cj>l+o>G;cG+1ooK0wA0c%_v@kzt9|tee`dSL773#+Xy>-&qo?g1z*|0%r@{1lp zUfSRy%AmLR{gNd{PRlq5e!ykO|oPQ19u-AK{)#Uxw?h9J>3y_qcW|*0HvViD_ zaJW3^H)4(~-q;!h@-EY~WgoJ9k;GUSm(^45$~DvF{snY?7{8kxMc&auYNB2+j7}WU z2a8O=qWII&fELe)3t<$uXHjRj1-&@=a~^;1N;hjR<}de)`Z9GE5ozfXwezJGiJ z!V@>8T1l4&lCe7+;*ZyE5JqZmcb^x1!+#y>#3J*a<+va` z(`&#{J&AuwN8E65W91Xa;!!h$lYhLtzO4LQZuHJ%*0cTu-%HTR)VXH396we1ys>_zWI;+jY1Hn0coz%aBPI!(WX8Li#ec|c@TGZ z5vb_^ZBBk&g1vG1U^Ni{9M;gCabawlKlWQLGx2`7xbNObWA-?~?9rkQk350VjUMfX zx{n51PVx))F~e=&DjApn000311z0XMFgXAK1J^Ku1Jf{q0=u9C0s#U72>|+?a%1s8 zs@jv#31AsCGcYkVGdMFfS{Ds6GB7kUIWaXeI5Ra`livwTe}VxbFbxI?Duzgg_YDC7 z0R;d9f&mWzFoFRJ0)hbn0Mi3=eGf|YVl4Exx~2Gp9ngsPyvAi3jzmJ6`!Q$^SyvE6 zmSdTpT^UnhcJT%vh*yxqTTGf>SzfEHqwn*->YB|^7?`7*3gKtt`&A`exr~ZQFKsAh z^?bnramw3Ae<&=J$~b=zI&pHjRusH*!Y;}Lg$qI*PBWlpnm%bf!%|fJx6%4Vrc>;6 z0!}L$#m>XH z46-vU?5w!Cn#-WdaWI=ulLXTJaBexeGhtgdb*`iyBGhqx;$bjGDaCtbBKq>hq}Ji^ zN31T)w5MMQ-3bOm{V|P`vwGr|QH@G8REQ|@3N_$HQ;HEd>lj#hje*pmi9`}-F zr1RL|TUxikg$#cftfD}yleO+*qrs)m1Jk7$1)wVm^|o^2&*bvR*|d9oA|Zj9rERPl z-KS11pJ(<{iLGS^P98UqtfEPO1fj7sPxBHPQy)x0Zgbt9thBa)Amrl$+J`y0hSzn6 zQn1XPW9CP5l(CvZ$?*(0f7`pzVqf|#6CkDXe!T&bj$Vc=a$SwY7%SOcg4z1*gpprE z(Xq}BYUy9k-J%r9T`{mN)q7kWeR_>~7tWr!#cZTQvl2ij@@+eDleVC17CIfXOCF@7 zaTX1Cuk;Hhb|YbKt?rL?C7AXAQ>@g3S)3z)F;6$;*9Do?=k4RF&WGd zaFOhAJt9X68VocpIAAqvCZz}Py_P!~frq@5l-YsI!A*(e4bO&> zzC*GP9QIY9>VaPcoS|_msv$M})wFTtXQBR2Nr=K7>e`d$!^xP2Bu=|6cF~=M7l_eg zuY#)9EfFH?#myV>6Z%kBD*s3|?V7!Ja56|iPO;U{iF!K;&{m5Fi!F9LDrggi^tA<= z#Zy+o2uULNO{yWS=`!&Yl%kiKe20H~YZ1Zm?oyQx)-hM(Bq&XMm--W#aR;s(tTt)b zrimFOPvrNgYy=cy6dzg3p<5Yx9)gB!ld!D7R#5$=)lNQtUb|0rLJCuD;Z$hPMM}ga zdwjQgL9(*1Ig70{Di!FC4gAa@f{T_dR^^?rrW;BHf{Uj?bs$lDa(C*b6Dxm{_je-j z6YP7mTl6AEf9(n`WImQ*sgoM;CVuW~cgU?qsPm(P-vJq?!&!OLStbey#niEU*~^4% zIk!M#Dx0d4H=#pc((HKgE{48p3f}^(n7N+KT>5n9TP*DI1J5Kl)? zJsG6oRA`8I@ne9Lw?s$>+_QfrED&R8L6!r$Rk<}$K^l3p6Pd=(JRn%_8WmUCv!Bbp0!7w|hPGOJrf2Vj5PuLh&q6?YjiSf)N0$3$=-err52f1^pSDDAK!wOJ^F%7UMlsxs9|+RwAe@_RDeN7` zh|^^=;yF)?4j%`xh(3%~k6|yut&oAqU8j8syJ8$tD6dM(OJt-V7jQZlHqHVCufBEB zAHk@-ZPNctNn$W<#{z%+{hPny{ET%Xv^S;Y`nFk&`ijWE@o}l z>r{nd-7iv`x4$N!d4JfR0ru)_u0m8aUvhNfkWrrRT=qSOVvZ*Rqa*yv4S6(LrNFsN zf@H+aFO{=w97nHM=gb{}a8ykz%kLSb!NpP0-iH~$#yLtg0C@uwcg*N`-*94ayG_9+(e-WCZt#d{nN0%k2_Z#tCT(6|E7 z=F&Byhjdzj>UTyl&AWOG4JYlxX1T`@%A7jtp^kZAN<4oT-?T9djtiyoef!bQxyb5y z@P11!U2Y0;GqD*=wG9JOrOSXhDe>atgv04tG})xCzCH+#RC*T`*7ypymOY)^q7n`t zhOTp<*3bip7y0Yf!-rpeVeuc7ngeQIpW2x$!d5BSrAzUqy7(u#~p2uB$KxfB^bcnEASq3-EV{$$8_SC$=V zMpUUo`rEM5+*6I?bW8BeG|C3!sF&is7 znD~DVJS*g37aIetC*t93C`1rj_WsV^lqzqNF%K)*m9<#Ld$x~=RU0>f7vx0<&@8mt z1VJpw3yVwFdgK{B)lcc_6+b(_ZdB{yFbHcFC-54ISHgD?o|tpdVxTT55_KvEF;ajq zf(hUT2`Yw2hW8Bt2LYgh3DE?C3C%Eq3CVvjf(gY23o3?4hW8Bt3)al{q;l|(MG*llr~lY}!J^hk4E@j%(?B~h=^!z4BFi^ap}|B1BdE`opj zy}W7Qnr2HuFii;aXTwC6vy73GKkQEhYEATr@X%^r$N>+^$8m*?H zN%a*TRc#^=4^dxBp*h9<^zCFfWE+s*7IoN3WkiU2`Tv~fmlx3Ou&aXZ8C{1?n6m0& zEGZ#g0f>7#r{eI?)g%-C+;x90jG~~@M8aCFa={~HIM%Or6zO{MD=F+idoO>KBKM8( z)8W37vDxqZ5hDPE-fJ1$inQmy>H!S>1c32z?T=zFa`Fm9kz(sZ$lAzWno?<{hZ^FEV6vcAy~$b`rd*=*;`);y zQr@S63=Qh!J86KzIoqkBg&=e1uGZ4a@@Digt!7)`bsj*1-n)73f2UwvQ(pg zx)tulxh}es>isucSg+ZLgY3QknFY6mWAXmCvqW)N`oYyE&L=~HwT6Goo=cCrOi;k% z+ECD84K`RTNhSa2Ab)Ybr2;p_9sRQ&xv{;jWE6TNCw4AepD6QPkCLKC9?ouO4;c#O z)sjC)M#{=8MX0s^h6a5TV^tyx-xp5Tg!ySAZfq7A2uhpUS7d~f+;%GT;XihA=s;jy ztJA`RY&3)3g0tOeo|=D;WPu3~{{e#J&~1ABYd;#nEjj+C;U{K}QGpy3X!Lcf9S=0Z zlh?zjE|bk&h$)n1luZ5!pO{gDlsGAyAw1HQyljz^OHbVw+$n0A8I>t&3F54TO{E1Z zBs=fhtPS2H` z;<22am^%kA56<|&l&Qs=g5wj#+d{DXzLk+@;kE!KPN8~KRtNt03mHML+$2QX;?2Lv zS*vy@CSac?RYf6VtCCc4Dg zt9@!OHkN5Xn2qR4RlqTEco84x-JNs8nK?lSbRR(BSRkHA?uOX4doz6B#6OQDx~&D3c9s~`GO6&S#ZD(X+?0g!`GJVP?V z{=FsoCgnHA7RYH&mX+@dSpP~zAX-xxZdJJVC4;y*B z`?=QO2E)$xq+_q}9tEv)Ft_866r%*SL*A5O-Uxr^MR-bxavh`@s}X2#qNA#m>Sg6} zk?`RFVuba&+FlP4If!xV9OykP<9-cGuu3U+dJ&sg>bLCI`?loZi%x2IpCDa>5|?2i z3Z0}+{xHdoPu~dWGNj7Q8$dNF(mF#4AmN1W7KL5>E>IyL1=@DTVctHiF*oucAdQog zT8V%1hJ*(r<6?ZlVdc%}=ifR1Q#LSjF6&M+^05Pn^<@crp{Y=4;cVhE>FV3paBXJ# z#|qn&!kT`Hw^xxGItq*w2VxNtj%{H6CUu@@mDWf*FG`TmiZ9SYSKu7YY(Nf zc*J5uCG8DA1Rk2H@0n)-_n3m+dcmcRV{7sh)m)kL>_3|eXax529MT{HJ<_XFtat*G zr9l1fy=FZU!GL}#Jl@ZF^YJkq%I_zqj+Iz2ZKqmacBc|jV|Io7#Ip*mO>LVVdE}$a zvPn=?8G>k$IlSTOvnq6HdR+0Gr3c-2}x-yV<$ zpv~ldu(RC;4~TwuD~(Mwn&Mb@6p~3W+>YZ8+|Ttu4is%x|HlLHNWtO8?mNn6g+DqD zxN+$3BlfI}BZMu3-R!n{Oi$HawlMr}cX?_foI7}}g&K}A^n9C?9j?TpW;uU{iY}Bl zE=3$Fkr=qK@5lJ$wMJp#y$2F{^wq6maq+O+CZNsmQv;^>LTF2jw}QCD`j+Z^O6W)!`^pKc%J_2 zd8`u!AEzaFoSyG&=`MnXOVZ+?S-X5 zdq(G!buUn+ix2+6II$SrC&HrIAcfaxc#gT#PNyWH4P;>M55)r>sJ1I02+)Y76R{@e z^hb0%Rt?G3*#0%+;88cVWcO`0QAB@f_KRO`qIbYs)vGHp*eU$eT9xF5SjQJ$R!tWT ze_?zb0`&~sk#CD09|j(dH_?~Ax^g&F~*q(?1heY8m6L zoj3pW?8QZWK=_)jALDC_8E1gl_Zm+v_=#X#8mFY5$Nq;N?3;!#&f&HyWriAZm9>A^ zu4X=MvQqHXji(DoH-wAE?;-Kq5*i^ldLTsVH$YB``qnzKsH4FuNuZ-Cc!o-Sm+dWO zwf1Tghpxy^dupjE`EO|56QzW#E6xmGEd%kNF9iTXOWdJUE>!G5lnIY!X910o`N8aC zbImUmW%cA?MqfYMAJxb)bK++d&sKlBuMF%0!8HFcGbD?V)!Q*Ho8VBO`d=9#)Su5P zh29%wDdXo*A9w-d^a+|)idko?9V;xz#A*s(myNvTr@*J}lB0a0a08RjQwT%Yv5nZVS2kZ1N0w#P2>8PI>KD^jvx(43ZHf#07W_O#UsF2;}?oe<9EVcDe)ntXY1 z>$KWxRNRN`M7b!lhWeICsJ8k@OT2HYLU0(m#_flwHB8NEu3hw}@W)h8s#Qhy)`uOa z`!ts4&s&?%VPj!UJA45O4EV!%Py{)Jd0dpthdDBbZZ?1fvFgFqz?FYwvyET_5~@lK zqVaNgILK7FQnnVjxh6x`7nua!WmW!xtT{66?7UVO9gS+i z3*wdRN~YYqCfOW&0mFYa8sr|+?E5W3WxFhumUAZx8E{(RUaes6%uCHmqk_6e++{I^ zi6@SAhMd&mb48(sAhj;5nn4L4k{_FIv)?cvJqyHv5r!xm6}aoNO+}IGDAs-)5eKwI zz)Y9Bdl6%?ltaLp!))(_M-VVOxINC2UPISoQgXk3=@t@`ktu)UvCT)XW_0Egt7A(d zxVDRp0_)5nl!W7xc422~?Yf+gtQ9Kek5fjba2^lLV@;qqg0mCd0WG{NO13!|n~%q+ zpYKoPV@&srkY(!OUEKx17G>*wvj1$W6~~@QB%3>yOz|R|$zZu>l`Sfj# zSIX(=Qq}lby~%#Mv9N6WiX!f#}zMd@sRTf)MsCe$80o>*3FZYP|6~)8#CWORO38Uw? zr^QofK{_H!-YKAU?!CGze~6W@TS3uAitUrW(UyNWXK>T-k+`a5NXmm81Z2G;){uyUT&M(n3th(Qka=XDxk&pYxf%Z%3I+`eJ`w(5 zp!9z|*lQJwj`zqH&^}7VSIXi=if(fTZXSt*veIIzLE|;p7~OAJ2qvmL<&?JaPMWn$c1zq?QNTY*qE>$~ z4qf{FUndBaZ4~SrPLEEMhAB&qbLG%kM??jxuhX74+NiRmXx& zqi7b}b#cC4VvIU8Z~?FL;`r_f2j5GmpMaHlVM`q~n&Cj%S(q0>cyzKWwn&XM(g6;V zF54IyDa`q1dSV{!u^Z`D&G>aW5rBW9V$erF!nqwBP({Kt22^$gumsu9(SIu#8`i8` zxT)u5UHvNi=-ZH`_)nO|BU@rgUfyM_A#j+-020pe4$$|-?zjEm`(X~`RoHj|Oe@D+ z65^ZJoxmESb6jG3?^uUsgbhi4qAg9wrv_k#7+uI@`&73inHHx<*y%ZGV+1Bcv_w(oGpK*Kt~6(w!47Nv%$@`iBOYZrk}Zvh$Xkak?XC87v@OB zE9PtE`(%%4QkC8KfnjAK0WfLQ4=(>G6wV1>;Tdm;40eBx!zwvjlM^}Mb{E2_*$vp7 zd8l{m!O5XLr7?O`X!{-oV75NWP!f8bj&XY1WMRfa+U{Kz trustedCertificates) { + addSSLSettingsForPEMFiles(builder, "", keyPath, password, certificatePath, trustedCertificates, true, true, true); + } + private static void addSSLSettingsForPEMFiles(Settings.Builder builder, String prefix, String keyPath, String password, String certificatePath, List trustedCertificates, boolean sslEnabled, boolean hostnameVerificationEnabled, boolean transportClient) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java index 6447be7e69c..1886dd4249b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java @@ -60,13 +60,16 @@ public class SettingsFilterTests extends ESTestCase { // pki filtering configureUnfilteredSetting("xpack.security.authc.realms.pki1.type", "pki"); configureUnfilteredSetting("xpack.security.authc.realms.pki1.order", "0"); - configureFilteredSetting("xpack.security.authc.realms.pki1.truststore.path", + if (inFipsJvm() == false) { + configureFilteredSetting("xpack.security.authc.realms.pki1.truststore.path", getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks").toString()); + configureFilteredSetting("xpack.ssl.keystore.path", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks").toString()); + } configureSecureSetting("xpack.security.authc.realms.pki1.truststore.secure_password", "truststore-testnode-only"); configureFilteredSetting("xpack.security.authc.realms.pki1.truststore.algorithm", "SunX509"); - configureFilteredSetting("xpack.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks").toString()); + configureFilteredSetting("xpack.ssl.cipher_suites", Strings.arrayToCommaDelimitedString(XPackSettings.DEFAULT_CIPHERS.toArray())); configureFilteredSetting("xpack.ssl.supported_protocols", randomFrom("TLSv1", "TLSv1.1", "TLSv1.2")); @@ -78,8 +81,10 @@ public class SettingsFilterTests extends ESTestCase { // client profile configureUnfilteredSetting("transport.profiles.client.port", "9500-9600"); - configureFilteredSetting("transport.profiles.client.xpack.security.ssl.keystore.path", + if (inFipsJvm() == false) { + configureFilteredSetting("transport.profiles.client.xpack.security.ssl.keystore.path", getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks").toString()); + } configureFilteredSetting("transport.profiles.client.xpack.security.ssl.cipher_suites", Strings.arrayToCommaDelimitedString(XPackSettings.DEFAULT_CIPHERS.toArray())); configureFilteredSetting("transport.profiles.client.xpack.security.ssl.supported_protocols", diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java index f9b1be65736..6966b7edf67 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java @@ -100,16 +100,18 @@ public class PkiRealmBootstrapCheckTests extends ESTestCase { public void testBootstrapCheckWithClosedSecuredSetting() throws Exception { final boolean expectFail = randomBoolean(); final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.security.http.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.security.http.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.security.authc.realms.test_pki.type", PkiRealmSettings.TYPE) - .put("xpack.security.http.ssl.enabled", true) - .put("xpack.security.http.ssl.client_authentication", expectFail ? "none" : "optional") - .put("xpack.security.http.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.security.authc.realms.test_pki.type", PkiRealmSettings.TYPE) + .put("xpack.security.http.ssl.enabled", true) + .put("xpack.security.http.ssl.client_authentication", expectFail ? "none" : "optional") + .put("xpack.security.http.ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put("xpack.security.http.ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); Environment env = TestEnvironment.newEnvironment(settings); final PkiRealmBootstrapCheck check = new PkiRealmBootstrapCheck(new SSLService(settings, env)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java index 14b0a58419a..212ee7ea499 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java @@ -20,6 +20,7 @@ import org.junit.BeforeClass; import java.nio.charset.StandardCharsets; import java.nio.file.Path; +import java.util.Arrays; import java.util.HashSet; import java.util.Set; @@ -92,8 +93,12 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase { Settings.Builder builder = Settings.builder() .put("path.home", home) .put("path.conf", conf.toString()); - SecuritySettingsSource.addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + SecuritySettingsSource.addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); Settings settings = builder.build(); logger.error("--> retrieving users using URL: {}, home: {}", url, home); @@ -134,8 +139,11 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase { String url = getHttpURL(); ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles(); Settings.Builder builder = Settings.builder().put("path.home", home); - SecuritySettingsSource.addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); + SecuritySettingsSource.addSSLSettingsForPEMFiles(builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", + "testclient", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); Settings settings = builder.build(); logger.error("--> retrieving roles using URL: {}, home: {}", url, home); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java index 0d689adcdf5..9b8c3878a03 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java @@ -49,20 +49,23 @@ public class CommandLineHttpClientTests extends ESTestCase { } public void testCommandLineHttpClientCanExecuteAndReturnCorrectResultUsingSSLSettings() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); MockSecureSettings secureSettings = new MockSecureSettings(); Settings settings; if (randomBoolean()) { // with http ssl settings secureSettings.setString("xpack.security.http.ssl.truststore.secure_password", "testnode"); - settings = Settings.builder().put("xpack.security.http.ssl.truststore.path", resource.toString()) + settings = Settings.builder().put("xpack.security.http.ssl.certificate_authorities", certPath.toString()) .put("xpack.security.http.ssl.verification_mode", VerificationMode.CERTIFICATE).setSecureSettings(secureSettings) .build(); } else { // with global settings secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); - settings = Settings.builder().put("xpack.ssl.truststore.path", resource.toString()) - .put("xpack.ssl.verification_mode", VerificationMode.CERTIFICATE).setSecureSettings(secureSettings).build(); + settings = Settings.builder() + .put("xpack.ssl.certificate_authorities", certPath.toString()) + .put("xpack.ssl.verification_mode", VerificationMode.CERTIFICATE) + .setSecureSettings(secureSettings) + .build(); } CommandLineHttpClient client = new CommandLineHttpClient(settings, environment); HttpResponse httpResponse = client.execute("GET", new URL("https://localhost:" + webServer.getPort() + "/test"), "u1", @@ -74,11 +77,15 @@ public class CommandLineHttpClientTests extends ESTestCase { } private MockWebServer createMockWebServer() { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); - Settings settings = - Settings.builder().put("xpack.ssl.keystore.path", resource.toString()).setSecureSettings(secureSettings).build(); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); + Settings settings = Settings.builder() + .put("xpack.ssl.key", keyPath.toString()) + .put("xpack.ssl.certificate", certPath.toString()) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings, environment); return new MockWebServer(sslService.sslContext(), false); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java index 26cd513ec78..23010e400a5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java @@ -34,8 +34,8 @@ public abstract class GroupsResolverTestCase extends ESTestCase { @Before public void setUpLdapConnection() throws Exception { - Path truststore = getDataPath(trustPath()); - this.ldapConnection = LdapTestUtils.openConnection(ldapUrl(), bindDN(), bindPassword(), truststore); + Path trustPath = getDataPath(trustPath()); + this.ldapConnection = LdapTestUtils.openConnection(ldapUrl(), bindDN(), bindPassword(), trustPath); } @After diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java index 8bdfd02d2fc..966f2e3f549 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java @@ -67,6 +67,6 @@ public class LdapTestUtils { sslConfiguration = sslService.getSSLConfiguration("xpack.security.authc.realms.foo.ssl"); } return LdapUtils.privilegedConnect(() -> new LDAPConnection(sslService.sslSocketFactory(sslConfiguration), options, - ldapurl.getHost(), ldapurl.getPort(), bindDN, bindPassword)); + ldapurl.getHost(), ldapurl.getPort(), bindDN, bindPassword)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java index 9d8fd1544f5..19b0d4e71bb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java @@ -54,7 +54,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { @Before public void init() throws Exception { - Path keystore = getDataPath("support/ADtrust.jks"); + Path certPath = getDataPath("support/smb_ca.crt"); Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. @@ -63,10 +63,9 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { */ globalSettings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.truststore.path", keystore) - .setSecureSettings(newSecureSettings("xpack.ssl.truststore.secure_password", "changeit")) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.certificate_authorities", certPath) + .build(); sslService = new SSLService(globalSettings, env); threadPool = new TestThreadPool("LdapUserSearchSessionFactoryTests"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java index c458a9c42ea..931acc1e79d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java @@ -26,22 +26,24 @@ import org.elasticsearch.xpack.core.common.socket.SocketAccess; import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import org.elasticsearch.xpack.core.ssl.SSLClientAuth; import org.elasticsearch.xpack.security.LocalStateSecurity; -import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.TrustManager; -import java.io.InputStream; import java.net.InetSocketAddress; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.KeyStore; import java.security.SecureRandom; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; import java.util.Locale; +import java.util.stream.Collectors; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; @@ -60,16 +62,16 @@ public class PkiAuthenticationTests extends SecuritySingleNodeTestCase { SSLClientAuth sslClientAuth = randomBoolean() ? SSLClientAuth.REQUIRED : SSLClientAuth.OPTIONAL; Settings.Builder builder = Settings.builder() - .put(super.nodeSettings()) - .put("xpack.security.http.ssl.enabled", true) - .put("xpack.security.http.ssl.client_authentication", sslClientAuth) - .put("xpack.security.authc.realms.file.type", FileRealmSettings.TYPE) - .put("xpack.security.authc.realms.file.order", "0") - .put("xpack.security.authc.realms.pki1.type", PkiRealmSettings.TYPE) - .put("xpack.security.authc.realms.pki1.order", "1") - .put("xpack.security.authc.realms.pki1.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.security.authc.realms.pki1.files.role_mapping", getDataPath("role_mapping.yml")); + .put(super.nodeSettings()) + .put("xpack.security.http.ssl.enabled", true) + .put("xpack.security.http.ssl.client_authentication", sslClientAuth) + .put("xpack.security.authc.realms.file.type", FileRealmSettings.TYPE) + .put("xpack.security.authc.realms.file.order", "0") + .put("xpack.security.authc.realms.pki1.type", PkiRealmSettings.TYPE) + .put("xpack.security.authc.realms.pki1.order", "1") + .put("xpack.security.authc.realms.pki1.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("xpack.security.authc.realms.pki1.files.role_mapping", getDataPath("role_mapping.yml")); SecuritySettingsSource.addSecureSettings(builder, secureSettings -> secureSettings.setString("xpack.security.authc.realms.pki1.truststore.secure_password", "truststore-testnode-only")); @@ -90,7 +92,13 @@ public class PkiAuthenticationTests extends SecuritySingleNodeTestCase { public void testTransportClientCanAuthenticateViaPki() { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList + ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient client = createTransportClient(builder.build())) { client.addTransportAddress(randomFrom(node().injector().getInstance(Transport.class).boundAddress().boundAddresses())); IndexResponse response = client.prepareIndex("foo", "bar").setSource("pki", "auth").get(); @@ -113,7 +121,11 @@ public class PkiAuthenticationTests extends SecuritySingleNodeTestCase { } public void testRestAuthenticationViaPki() throws Exception { - SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { @@ -124,7 +136,10 @@ public class PkiAuthenticationTests extends SecuritySingleNodeTestCase { } public void testRestAuthenticationFailure() throws Exception { - SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); + SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", + "testclient", "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { @@ -135,21 +150,13 @@ public class PkiAuthenticationTests extends SecuritySingleNodeTestCase { } } - private SSLContext getRestSSLContext(String keystoreResourcePath, String password) throws Exception { + private SSLContext getRestSSLContext(String keyPath, String password, String certPath, List trustedCertPaths) throws Exception { SSLContext context = SSLContext.getInstance("TLS"); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - Path store = getDataPath(keystoreResourcePath); - KeyStore ks; - try (InputStream in = Files.newInputStream(store)) { - ks = KeyStore.getInstance("jks"); - ks.load(in, password.toCharArray()); - } - - kmf.init(ks, password.toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(ks); - context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); - + TrustManager tm = CertParsingUtils.trustManager(CertParsingUtils.readCertificates(trustedCertPaths.stream().map(p -> getDataPath + (p)).collect(Collectors.toList()))); + KeyManager km = CertParsingUtils.keyManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath))), PemUtils.readPrivateKey(getDataPath(keyPath), password::toCharArray), password.toCharArray()); + context.init(new KeyManager[]{km}, new TrustManager[]{tm}, new SecureRandom()); return context; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java index 7f79ae35ada..5a7015a4e8d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java @@ -128,6 +128,8 @@ public class SamlAuthenticatorTests extends SamlTestCase { @BeforeClass public static void init() throws Exception { + assumeFalse("Can't run in a FIPS JVM, there is no DOM XMLSignature Factory so we can't sign XML documents", inFipsJvm()); + // TODO: Refactor the signing to use org.opensaml.xmlsec.signature.support.Signer so that we can run the tests SamlUtils.initialize(Loggers.getLogger(SamlAuthenticatorTests.class)); // Initialise Apache XML security so that the signDoc methods work correctly. Init.init(); @@ -218,7 +220,7 @@ public class SamlAuthenticatorTests extends SamlTestCase { "" + IDP_ENTITY_ID + "" + "" + - ""); + ""); final ElasticsearchSecurityException exception = expectSamlException(() -> authenticator.authenticate(token)); assertThat(exception.getMessage(), containsString("No assertions found in SAML response")); assertThat(exception.getCause(), nullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java index cf41673b86b..e239c8706b9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java @@ -296,6 +296,7 @@ public class SamlMetadataCommandTests extends SamlTestCase { } public void testSigningMetadataWithPfx() throws Exception { + assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm()); final Path certPath = getDataPath("saml.crt"); final Path keyPath = getDataPath("saml.key"); final Path p12Path = getDataPath("saml.p12"); @@ -355,6 +356,7 @@ public class SamlMetadataCommandTests extends SamlTestCase { } public void testSigningMetadataWithPasswordProtectedPfx() throws Exception { + assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm()); final Path certPath = getDataPath("saml.crt"); final Path keyPath = getDataPath("saml.key"); final Path p12Path = getDataPath("saml_with_password.p12"); @@ -393,11 +395,13 @@ public class SamlMetadataCommandTests extends SamlTestCase { public void testErrorSigningMetadataWithWrongPassword() throws Exception { final Path certPath = getDataPath("saml.crt"); final Path keyPath = getDataPath("saml.key"); - final Path p12Path = getDataPath("saml_with_password.p12"); + final Path signingKeyPath = getDataPath("saml_with_password.key"); final SamlMetadataCommand command = new SamlMetadataCommand((e) -> randomFrom(keyStore, null)); final OptionSet options = command.getParser().parse(new String[]{ - "-signing-bundle", p12Path.toString(), - "-signing-key-password", "wrong_password" + "-signing-cert", certPath.toString(), + "-signing-key", signingKeyPath.toString(), + "-signing-key-password", "wrongpassword" + }); final boolean useSigningCredentials = randomBoolean(); @@ -422,7 +426,7 @@ public class SamlMetadataCommandTests extends SamlTestCase { final UserException userException = expectThrows(UserException.class, () -> command.possiblySignDescriptor(terminal, options, descriptor, env)); assertThat(userException.getMessage(), containsString("Unable to create metadata document")); - assertThat(terminal.getOutput(), containsString("keystore password was incorrect")); + assertThat(terminal.getOutput(), containsString("Error parsing Private Key from")); } public void testSigningMetadataWithPem() throws Exception { @@ -473,7 +477,7 @@ public class SamlMetadataCommandTests extends SamlTestCase { final OptionSet options = command.getParser().parse(new String[]{ "-signing-cert", certPath.toString(), "-signing-key", signingKeyPath.toString(), - "-signing-key-password", "saml" + "-signing-key-password", "saml" }); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java index 6dc9c021fc8..980abc46831 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java @@ -105,13 +105,17 @@ public class SamlRealmTests extends SamlTestCase { final Path path = getDataPath("idp1.xml"); final String body = new String(Files.readAllBytes(path), StandardCharsets.UTF_8); final MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + mockSecureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); final Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put("path.home", createTempDir()) - .setSecureSettings(mockSecureSettings) - .build(); + .put("xpack.ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put("xpack.ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("path.home", createTempDir()) + .setSecureSettings(mockSecureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings, TestEnvironment.newEnvironment(settings)); try (MockWebServer proxyServer = new MockWebServer(sslService.sslContext(Settings.EMPTY), false)) { proxyServer.start(); @@ -563,17 +567,21 @@ public class SamlRealmTests extends SamlTestCase { private Settings.Builder buildSettings(String idpMetaDataPath) { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(REALM_SETTINGS_PREFIX + ".ssl.keystore.secure_password", "testnode"); + secureSettings.setString(REALM_SETTINGS_PREFIX + ".ssl.secure_key_passphrase", "testnode"); return Settings.builder() - .put(REALM_SETTINGS_PREFIX + ".ssl.verification_mode", "certificate") - .put(REALM_SETTINGS_PREFIX + ".ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put(REALM_SETTINGS_PREFIX + ".type", "saml") - .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_PATH.getKey(), idpMetaDataPath) - .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_ENTITY_ID.getKey(), TEST_IDP_ENTITY_ID) - .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_HTTP_REFRESH.getKey(), METADATA_REFRESH + "ms") - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings); + .put(REALM_SETTINGS_PREFIX + ".ssl.verification_mode", "certificate") + .put(REALM_SETTINGS_PREFIX + ".ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put(REALM_SETTINGS_PREFIX + ".ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put(REALM_SETTINGS_PREFIX + ".ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put(REALM_SETTINGS_PREFIX + ".type", "saml") + .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_PATH.getKey(), idpMetaDataPath) + .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_ENTITY_ID.getKey(), TEST_IDP_ENTITY_ID) + .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_HTTP_REFRESH.getKey(), METADATA_REFRESH + "ms") + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings); } private RealmConfig realmConfigFromRealmSettings(Settings realmSettings) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java index bac5e0b3f50..abd5768bebe 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java @@ -43,11 +43,10 @@ import java.util.Arrays; import java.util.Collection; import java.util.concurrent.CountDownLatch; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase { private static int randomClientPort; @@ -66,25 +65,18 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settingsBuilder = Settings.builder(); String randomClientPortRange = randomClientPort + "-" + (randomClientPort+100); - - Path store; - try { - store = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); - assertThat(Files.exists(store), is(true)); - } catch (Exception e) { - throw new RuntimeException(e); - } - + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); settingsBuilder.put(super.nodeSettings(nodeOrdinal)) - .put("transport.profiles.client.xpack.security.ssl.truststore.path", store) // settings for client truststore - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("transport.profiles.client.xpack.security.type", "client") - .put("transport.profiles.client.port", randomClientPortRange) - // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent - .put("transport.profiles.client.bind_host", "localhost") - .put("xpack.security.audit.enabled", false) - .put(XPackSettings.WATCHER_ENABLED.getKey(), false) - .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false); + .putList("transport.profiles.client.xpack.security.ssl.certificate_authorities", + Arrays.asList(certPath.toString())) // settings for client truststore + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("transport.profiles.client.xpack.security.type", "client") + .put("transport.profiles.client.port", randomClientPortRange) + // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent + .put("transport.profiles.client.bind_host", "localhost") + .put("xpack.security.audit.enabled", false) + .put(XPackSettings.WATCHER_ENABLED.getKey(), false) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false); if (randomBoolean()) { settingsBuilder.put("transport.profiles.default.xpack.security.type", "node"); // this is default lets set it randomly } @@ -120,7 +112,12 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase //.put("xpack.ml.autodetect_process", false); Collection> mockPlugins = Arrays.asList( LocalStateSecurity.class, TestZenDiscovery.TestPlugin.class, MockHttpTransport.TestPlugin.class); - addSSLSettingsForStore(nodeSettings, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + addSSLSettingsForPEMFiles( + nodeSettings, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (Node node = new MockNode(nodeSettings.build(), mockPlugins)) { node.start(); ensureStableCluster(cluster().size() + 1); @@ -159,7 +156,12 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase //.put("xpack.ml.autodetect_process", false); Collection> mockPlugins = Arrays.asList( LocalStateSecurity.class, TestZenDiscovery.TestPlugin.class, MockHttpTransport.TestPlugin.class); - addSSLSettingsForStore(nodeSettings, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + addSSLSettingsForPEMFiles( + nodeSettings, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (Node node = new MockNode(nodeSettings.build(), mockPlugins)) { node.start(); TransportService instance = node.injector().getInstance(TransportService.class); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java index f03a4255b7f..bc674ae1aa0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java @@ -21,7 +21,8 @@ import static org.hamcrest.CoreMatchers.is; // TODO delete this test? public class IPHostnameVerificationTests extends SecurityIntegTestCase { - Path keystore; + private Path certPath; + private Path keyPath; @Override protected boolean transportSSLEnabled() { @@ -46,36 +47,37 @@ public class IPHostnameVerificationTests extends SecurityIntegTestCase { .putList("discovery.zen.ping.unicast.hosts", newUnicastAddresses); try { - //This keystore uses a cert with a CN of "Elasticsearch Test Node" and IPv4+IPv6 ip addresses as SubjectAlternativeNames - keystore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.jks"); - assertThat(Files.exists(keystore), is(true)); + //Use a cert with a CN of "Elasticsearch Test Node" and IPv4+IPv6 ip addresses as SubjectAlternativeNames + certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.crt"); + keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem"); + assertThat(Files.exists(certPath), is(true)); } catch (Exception e) { throw new RuntimeException(e); } SecuritySettingsSource.addSecureSettings(settingsBuilder, secureSettings -> { - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode-ip-only"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode-ip-only"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode-ip-only"); }); - return settingsBuilder.put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) // settings for client truststore - .put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) // settings for client truststore - .put(TcpTransport.BIND_HOST.getKey(), "127.0.0.1") - .put("network.host", "127.0.0.1") - .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) - .put("xpack.ssl.verification_mode", "full") - .build(); + return settingsBuilder.put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .put("xpack.ssl.certificate_authorities", certPath.toAbsolutePath()) + .put(TcpTransport.BIND_HOST.getKey(), "127.0.0.1") + .put("network.host", "127.0.0.1") + .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) + .put("xpack.ssl.verification_mode", "full") + .build(); } @Override protected Settings transportClientSettings() { Settings clientSettings = super.transportClientSettings(); return Settings.builder().put(clientSettings.filter(k -> k.startsWith("xpack.ssl.") == false)) - .put("xpack.ssl.verification_mode", "certificate") - .put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.keystore.password", "testnode-ip-only") - .put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.truststore.password", "testnode-ip-only") - .build(); + .put("xpack.ssl.verification_mode", "certificate") + .put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .put("xpack.ssl.key_passphrase", "testnode-ip-only") + .put("xpack.ssl.certificate_authorities", certPath) + .build(); } public void testTransportClientConnectionWorksWithIPOnlyHostnameVerification() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index ad64dea79a5..23ca3c1fe9f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -39,17 +39,21 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { private SSLService sslService; private Environment env; - + private Path testnodeCert; + private Path testnodeKey; @Before public void createSSLService() throws Exception { - Path testNodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); + testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testNodeStore) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); } @@ -144,15 +148,11 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { } public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() throws Exception { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .setSecureSettings(secureSettings) - .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.certificate_authorities", testnodeCert) + .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true) + .put("path.home", createTempDir()) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -163,13 +163,13 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java index f87ab36d3d5..e9d91f5bd2d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java @@ -41,15 +41,17 @@ public class SecurityNetty4ServerTransportTests extends ESTestCase { @Before public void createSSLService() throws Exception { - Path testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); + Path testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.keystore.path", testnodeStore) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); } @@ -179,17 +181,18 @@ public class SecurityNetty4ServerTransportTests extends ESTestCase { public void testTransportSSLOverridesGlobalSSL() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testnode"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "truststore-testnode-only"); + secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode"); Settings.Builder builder = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.security.transport.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put("xpack.security.transport.ssl.client_authentication", "none") - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.security.transport.ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put("xpack.security.transport.ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("xpack.security.transport.ssl.client_authentication", "none") + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()); Settings settings = builder.build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java index 148453b5f84..c61b5782f75 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.security.LocalStateSecurity; import java.net.InetSocketAddress; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Arrays; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; @@ -35,25 +36,33 @@ public class SslHostnameVerificationTests extends SecurityIntegTestCase { Settings settings = super.nodeSettings(nodeOrdinal); Settings.Builder settingsBuilder = Settings.builder(); settingsBuilder.put(settings.filter(k -> k.startsWith("xpack.ssl.") == false), false); - Path keystore; + Path keyPath; + Path certPath; + Path nodeCertPath; try { /* * This keystore uses a cert without any subject alternative names and a CN of "Elasticsearch Test Node No SAN" * that will not resolve to a DNS name and will always cause hostname verification failures */ - keystore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks"); - assert keystore != null; - assertThat(Files.exists(keystore), is(true)); + keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); + certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); + nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + assert keyPath != null; + assert certPath != null; + assert nodeCertPath != null; + assertThat(Files.exists(certPath), is(true)); + assertThat(Files.exists(nodeCertPath), is(true)); + assertThat(Files.exists(keyPath), is(true)); } catch (Exception e) { throw new RuntimeException(e); } SecuritySettingsSource.addSecureSettings(settingsBuilder, secureSettings -> { - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode-no-subjaltname"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode-no-subjaltname"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode-no-subjaltname"); }); - return settingsBuilder.put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) + return settingsBuilder.put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(certPath.toString(), nodeCertPath.toString())) // disable hostname verification as this test uses certs without a valid SAN or DNS in the CN .put("xpack.ssl.verification_mode", "certificate") .build(); @@ -61,22 +70,32 @@ public class SslHostnameVerificationTests extends SecurityIntegTestCase { @Override protected Settings transportClientSettings() { - Path keystore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks"); - assert keystore != null; + Path keyPath; + Path certPath; + Path nodeCertPath; + try { + keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); + certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); + nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + assert keyPath != null; + assert certPath != null; + assert nodeCertPath != null; + assertThat(Files.exists(certPath), is(true)); + assertThat(Files.exists(nodeCertPath), is(true)); + assertThat(Files.exists(keyPath), is(true)); + } catch (Exception e) { + throw new RuntimeException(e); + } Settings settings = super.transportClientSettings(); // remove all ssl settings Settings.Builder builder = Settings.builder(); builder.put(settings.filter( k -> k.startsWith("xpack.ssl.") == false), false); builder.put("xpack.ssl.verification_mode", "certificate") - .put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) // settings for client keystore - .put("xpack.ssl.keystore.password", "testnode-no-subjaltname"); - - if (randomBoolean()) { - // randomly set the truststore, if not set the keystore should be used - builder.put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.truststore.password", "testnode-no-subjaltname"); - } + .put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.key_passphrase", "testnode-no-subjaltname") + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(certPath.toString(), nodeCertPath.toString())); return builder.build(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java index b98e4e0ce57..e1e05032014 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java @@ -7,21 +7,21 @@ package org.elasticsearch.xpack.security.transport.nio; import org.elasticsearch.nio.InboundChannelBuffer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; -import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLException; -import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.TrustManager; import java.io.IOException; -import java.io.InputStream; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.security.KeyStore; import java.security.SecureRandom; import java.util.Arrays; +import java.util.Collections; import java.util.function.Supplier; public class SSLDriverTests extends ESTestCase { @@ -205,19 +205,16 @@ public class SSLDriverTests extends ESTestCase { } private SSLContext getSSLContext() throws Exception { - String relativePath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"; + String certPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"; + String keyPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem"; SSLContext sslContext; - try (InputStream in = Files.newInputStream(getDataPath(relativePath))) { - KeyStore keyStore = KeyStore.getInstance("jks"); - keyStore.load(in, "testclient".toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(keyStore); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - kmf.init(keyStore, "testclient".toCharArray()); - sslContext = SSLContext.getInstance("TLSv1.2"); - sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); - return sslContext; - } + TrustManager tm = CertParsingUtils.trustManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath)))); + KeyManager km = CertParsingUtils.keyManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath))), PemUtils.readPrivateKey(getDataPath(keyPath), "testclient"::toCharArray), "testclient".toCharArray()); + sslContext = SSLContext.getInstance("TLSv1.2"); + sslContext.init(new KeyManager[] { km }, new TrustManager[] { tm }, new SecureRandom()); + return sslContext; } private void normalClose(SSLDriver sendDriver, SSLDriver receiveDriver) throws IOException { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index 9f33da7ae88..feca093e581 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -55,15 +55,17 @@ import static org.hamcrest.Matchers.instanceOf; public class SimpleSecurityNioTransportTests extends AbstractSimpleTransportTestCase { private SSLService createSSLService() { - Path testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); + Path testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.keystore.path", testnodeStore) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); try { return new SSLService(settings, TestEnvironment.newEnvironment(settings)); } catch (Exception e) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java index 42c5cd7c7ab..df49103a259 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.transport.ssl; -import com.unboundid.util.ssl.TrustAllTrustManager; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -77,7 +76,8 @@ public class EllipticCurveSSLTests extends SecurityIntegTestCase { X509ExtendedKeyManager x509ExtendedKeyManager = CertParsingUtils.keyManager(certs, privateKey, new char[0]); SSLContext sslContext = SSLContext.getInstance("TLS"); sslContext.init(new X509ExtendedKeyManager[] { x509ExtendedKeyManager }, - new TrustManager[] { new TrustAllTrustManager(false) }, new SecureRandom()); + new TrustManager[]{CertParsingUtils.trustManager(CertParsingUtils.readCertificates(Collections.singletonList(certPath)))}, + new SecureRandom()); SSLSocketFactory socketFactory = sslContext.getSocketFactory(); NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().setTransport(true).get(); TransportAddress address = randomFrom(response.getNodes()).getTransport().getAddress().publishAddress(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java index fa8fd00aeba..541e6606912 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java @@ -42,12 +42,13 @@ import java.nio.charset.StandardCharsets; import java.security.KeyStore; import java.security.SecureRandom; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Set; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -97,6 +98,7 @@ public class SslIntegrationTests extends SecurityIntegTestCase { // no SSL exception as this is the exception is returned when connecting public void testThatTransportClientUsingSSLv3ProtocolIsRejected() { + assumeFalse("Can't run in a FIPS JVM as SSLv3 SSLContext not available", inFipsJvm()); try (TransportClient transportClient = new TestXPackTransportClient(Settings.builder() .put(transportClientSettings()) .put("node.name", "programmatic_transport_client") @@ -116,7 +118,11 @@ public class SslIntegrationTests extends SecurityIntegTestCase { public void testThatConnectionToHTTPWorks() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); + addSSLSettingsForPEMFiles( + builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", + "testclient", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); SSLService service = new SSLService(builder.build(), null); CredentialsProvider provider = new BasicCredentialsProvider(); @@ -135,6 +141,7 @@ public class SslIntegrationTests extends SecurityIntegTestCase { } public void testThatHttpUsingSSLv3IsRejected() throws Exception { + assumeFalse("Can't run in a FIPS JVM as we can't even get an instance of SSL SSL Context", inFipsJvm()); SSLContext sslContext = SSLContext.getInstance("SSL"); TrustManagerFactory factory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); factory.init((KeyStore) null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java index 1d7ec67762b..d3ab5d092ab 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java @@ -20,10 +20,11 @@ import org.junit.BeforeClass; import java.net.InetAddress; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Arrays; import java.util.Collections; import static org.elasticsearch.test.SecuritySettingsSource.TEST_USER_NAME; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; @@ -42,9 +43,9 @@ public class SslMultiPortTests extends SecurityIntegTestCase { /** * On each node sets up the following profiles: *
    - *
  • default: testnode keystore. Requires client auth
  • - *
  • client: testnode-client-profile keystore that only trusts the testclient cert. Requires client auth
  • - *
  • no_client_auth: testnode keystore. Does not require client auth
  • + *
  • default: testnode keypair. Requires client auth
  • + *
  • client: testnode-client-profile profile that only trusts the testclient cert. Requires client auth
  • + *
  • no_client_auth: testnode keypair. Does not require client auth
  • *
*/ @Override @@ -52,26 +53,25 @@ public class SslMultiPortTests extends SecurityIntegTestCase { String randomClientPortRange = randomClientPort + "-" + (randomClientPort+100); String randomNoClientAuthPortRange = randomNoClientAuthPort + "-" + (randomNoClientAuthPort+100); - Path store; + Path trustCert; try { - store = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-client-profile.jks"); - assertThat(Files.exists(store), is(true)); + trustCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt"); + assertThat(Files.exists(trustCert), is(true)); } catch (Exception e) { throw new RuntimeException(e); } Settings settings = Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - // client set up here - .put("transport.profiles.client.port", randomClientPortRange) - // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent - .put("transport.profiles.client.bind_host", "localhost") - .put("transport.profiles.client.xpack.security.ssl.truststore.path", store.toAbsolutePath()) - .put("transport.profiles.client.xpack.security.ssl.truststore.password", "testnode-client-profile") - .put("transport.profiles.no_client_auth.port", randomNoClientAuthPortRange) - .put("transport.profiles.no_client_auth.bind_host", "localhost") - .put("transport.profiles.no_client_auth.xpack.security.ssl.client_authentication", SSLClientAuth.NONE) - .build(); + .put(super.nodeSettings(nodeOrdinal)) + // client set up here + .put("transport.profiles.client.port", randomClientPortRange) + // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent + .put("transport.profiles.client.bind_host", "localhost") + .put("transport.profiles.client.xpack.security.ssl.certificate_authorities", trustCert.toAbsolutePath()) + .put("transport.profiles.no_client_auth.port", randomNoClientAuthPortRange) + .put("transport.profiles.no_client_auth.bind_host", "localhost") + .put("transport.profiles.no_client_auth.xpack.security.ssl.client_authentication", SSLClientAuth.NONE) + .build(); logger.info("node {} settings:\n{}", nodeOrdinal, settings); return settings; } @@ -140,15 +140,18 @@ public class SslMultiPortTests extends SecurityIntegTestCase { } /** - * Uses a transport client with a custom keystore; this keystore testclient-client-profile.jks trusts the testnode + * Uses a transport client with a custom key pair; TransportClient only trusts the testnode * certificate and had its own self signed certificate. This test connects to the client profile, which is only * set to trust the testclient-client-profile certificate so the connection should always succeed */ public void testThatProfileTransportClientCanConnectToClientProfile() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks", - "testclient-client-profile"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", + "testclient-client-profile", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient transportClient = createTransportClient(builder.build())) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("client"))); assertGreenClusterState(transportClient); @@ -156,16 +159,19 @@ public class SslMultiPortTests extends SecurityIntegTestCase { } /** - * Uses a transport client with a custom keystore; this keystore testclient-client-profile.jks trusts the testnode + * Uses a transport client with a custom key pair; TransportClient only trusts the testnode * certificate and had its own self signed certificate. This test connects to the no_client_auth profile, which * uses a truststore that does not trust the testclient-client-profile certificate but does not require client * authentication */ public void testThatProfileTransportClientCanConnectToNoClientAuthProfile() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks", - "testclient-client-profile"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", + "testclient-client-profile", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient transportClient = createTransportClient(builder.build())) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("no_client_auth"))); @@ -174,16 +180,19 @@ public class SslMultiPortTests extends SecurityIntegTestCase { } /** - * Uses a transport client with a custom keystore; this keystore testclient-client-profile.jks trusts the testnode + * Uses a transport client with a custom key pair; TransportClient only trusts the testnode * certificate and had its own self signed certificate. This test connects to the default profile, which * uses a truststore that does not trust the testclient-client-profile certificate and requires client authentication * so the connection should always fail */ public void testThatProfileTransportClientCannotConnectToDefaultProfile() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks", - "testclient-client-profile"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", + "testclient-client-profile", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient transportClient = createTransportClient(builder.build())) { TransportAddress transportAddress = randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses()); transportClient.addTransportAddress(transportAddress); @@ -253,19 +262,17 @@ public class SslMultiPortTests extends SecurityIntegTestCase { } /** - * Uses a transport client with a custom truststore; this truststore truststore-testnode-only only trusts the testnode - * certificate and contains no other certification. This test connects to the no_client_auth profile, which uses - * the testnode certificate and does not require to present a certificate, so this connection should always succeed + * Uses a transport client that only trusts the testnode certificate. This test connects to the no_client_auth profile, + * which uses the testnode certificate and does not require to present a certificate, so this connection should always succeed */ public void testThatTransportClientWithOnlyTruststoreCanConnectToNoClientAuthProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.ssl.truststore.password", "truststore-testnode-only") - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), @@ -274,21 +281,19 @@ public class SslMultiPortTests extends SecurityIntegTestCase { } /** - * Uses a transport client with a custom truststore; this truststore truststore-testnode-only only trusts the testnode - * certificate and contains no other certification. This test connects to the client profile, which uses + * Uses a transport client that only trusts the testnode certificate. This test connects to the client profile, which uses * the testnode certificate and requires the client to present a certificate, so this connection will never work as * the client has no certificate to present */ public void testThatTransportClientWithOnlyTruststoreCannotConnectToClientProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.ssl.truststore.password", "truststore-testnode-only") - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("client"))); @@ -300,21 +305,19 @@ public class SslMultiPortTests extends SecurityIntegTestCase { } /** - * Uses a transport client with a custom truststore; this truststore truststore-testnode-only only trusts the testnode - * certificate and contains no other certification. This test connects to the default profile, which uses + * Uses a transport client that only trusts the testnode certificate. This test connects to the default profile, which uses * the testnode certificate and requires the client to present a certificate, so this connection will never work as * the client has no certificate to present */ public void testThatTransportClientWithOnlyTruststoreCannotConnectToDefaultProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.ssl.truststore.password", "truststore-testnode-only") - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); @@ -332,11 +335,11 @@ public class SslMultiPortTests extends SecurityIntegTestCase { */ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToDefaultProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.security.transport.ssl.enabled", true) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); @@ -354,11 +357,11 @@ public class SslMultiPortTests extends SecurityIntegTestCase { */ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToClientProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.security.transport.ssl.enabled", true) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("client"))); @@ -376,11 +379,11 @@ public class SslMultiPortTests extends SecurityIntegTestCase { */ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToNoClientAuthProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.security.transport.ssl.enabled", true) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java index fb5d567bb36..7427c5a67e9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java @@ -10,12 +10,18 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.SecurityIntegTestCase; +import org.junit.BeforeClass; /** * An extremely simple test that shows SSL will work with a cipher that does not perform encryption */ public class SslNullCipherTests extends SecurityIntegTestCase { + @BeforeClass + public static void muteInFips() { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); + } + @Override public boolean transportSSLEnabled() { return true; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java index fdb66916884..d205c7cd933 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java @@ -22,20 +22,22 @@ import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.core.TestXPackTransportClient; import org.elasticsearch.xpack.core.security.SecurityField; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import org.elasticsearch.xpack.core.ssl.SSLClientAuth; import org.elasticsearch.xpack.security.LocalStateSecurity; -import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.TrustManager; import java.io.IOException; -import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; -import java.security.KeyStore; import java.security.SecureRandom; import java.security.cert.CertPathBuilderException; +import java.util.Arrays; +import java.util.Collections; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; @@ -74,7 +76,11 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { } catch (IOException e) { Throwable t = ExceptionsHelper.unwrap(e, CertPathBuilderException.class); assertThat(t, instanceOf(CertPathBuilderException.class)); - assertThat(t.getMessage(), containsString("unable to find valid certification path to requested target")); + if (inFipsJvm()) { + assertThat(t.getMessage(), containsString("Unable to find certificate chain")); + } else { + assertThat(t.getMessage(), containsString("unable to find valid certification path to requested target")); + } } } @@ -89,24 +95,27 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { } public void testThatTransportWorksWithoutSslClientAuth() throws IOException { - // specify an arbitrary keystore, that does not include the certs needed to connect to the transport protocol - Path store = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks"); + // specify an arbitrary key and certificate - not the certs needed to connect to the transport protocol + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt"); + Path nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - if (Files.notExists(store)) { - throw new ElasticsearchException("store path doesn't exist"); + if (Files.notExists(keyPath) || Files.notExists(certPath)) { + throw new ElasticsearchException("key or certificate path doesn't exist"); } MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testclient-client-profile"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testclient-client-profile"); Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) - .put("xpack.ssl.keystore.path", store) - .setSecureSettings(secureSettings) - .put("cluster.name", internalCluster().getClusterName()) - .put(SecurityField.USER_SETTING.getKey(), - transportClientUsername() + ":" + new String(transportClientPassword().getChars())) - .build(); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .put("xpack.ssl.certificate_authorities", nodeCertPath) + .setSecureSettings(secureSettings) + .put("cluster.name", internalCluster().getClusterName()) + .put(SecurityField.USER_SETTING.getKey(), transportClientUsername() + ":" + new String(transportClientPassword().getChars())) + .build(); try (TransportClient client = new TestXPackTransportClient(settings, LocalStateSecurity.class)) { Transport transport = internalCluster().getDataNodeInstance(Transport.class); TransportAddress transportAddress = transport.boundAddress().publishAddress(); @@ -117,19 +126,19 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { } private SSLContext getSSLContext() { - try (InputStream in = - Files.newInputStream(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"))) { - KeyStore keyStore = KeyStore.getInstance("jks"); - keyStore.load(in, "testclient".toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(keyStore); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - kmf.init(keyStore, "testclient".toCharArray()); + try { + String certPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"; + String nodeCertPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"; + String keyPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem"; + TrustManager tm = CertParsingUtils.trustManager(CertParsingUtils.readCertificates(Arrays.asList(getDataPath + (certPath), getDataPath(nodeCertPath)))); + KeyManager km = CertParsingUtils.keyManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath))), PemUtils.readPrivateKey(getDataPath(keyPath), "testclient"::toCharArray), "testclient".toCharArray()); SSLContext context = SSLContext.getInstance("TLSv1.2"); - context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); + context.init(new KeyManager[] { km }, new TrustManager[] { tm }, new SecureRandom()); return context; } catch (Exception e) { - throw new ElasticsearchException("failed to initialize a TrustManagerFactory", e); + throw new ElasticsearchException("failed to initialize SSLContext", e); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java index 4269d8a78eb..03f963cc59c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java @@ -12,9 +12,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.transport.Transport; -import org.elasticsearch.xpack.core.ssl.CertParsingUtils; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -23,16 +21,12 @@ import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import java.net.SocketException; import java.nio.file.AtomicMoveNotSupportedException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; -import java.security.KeyStore; -import java.security.cert.X509Certificate; -import java.util.Collections; +import java.util.Arrays; import java.util.concurrent.CountDownLatch; import static org.hamcrest.Matchers.containsString; @@ -43,34 +37,51 @@ import static org.hamcrest.Matchers.is; */ public class SSLReloadIntegTests extends SecurityIntegTestCase { - private Path nodeStorePath; + private Path nodeKeyPath; + private Path nodeCertPath; + private Path clientCertPath; + private Path updateableCertPath; @Override public Settings nodeSettings(int nodeOrdinal) { - //Node starts with testnode.jks - if (nodeStorePath == null) { - Path origPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); - Path tempDir = createTempDir(); - nodeStorePath = tempDir.resolve("testnode.jks"); - try { - Files.copy(origPath, nodeStorePath); - } catch (IOException e) { - throw new ElasticsearchException("failed to copy keystore"); + // Nodes start trusting testnode.crt and testclient.crt + Path origKeyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + Path origCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path origClientCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"); + Path tempDir = createTempDir(); + try { + if (nodeKeyPath == null) { + nodeKeyPath = tempDir.resolve("testnode.pem"); + Files.copy(origKeyPath, nodeKeyPath); } + if (nodeCertPath == null) { + nodeCertPath = tempDir.resolve("testnode.crt"); + Files.copy(origCertPath, nodeCertPath); + } + if (clientCertPath == null) { + clientCertPath = tempDir.resolve("testclient.crt"); + Files.copy(origClientCertPath, clientCertPath); + } + // Placeholder trusted certificate that will be updated later on + if (updateableCertPath == null) { + updateableCertPath = tempDir.resolve("updateable.crt"); + Files.copy(origCertPath, updateableCertPath); + } + } catch (IOException e) { + throw new ElasticsearchException("failed to copy key or certificate", e); } + Settings settings = super.nodeSettings(nodeOrdinal); Settings.Builder builder = Settings.builder() .put(settings.filter((s) -> s.startsWith("xpack.ssl.") == false)); - - SecuritySettingsSource.addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); - builder.put("resource.reload.interval.high", "1s") - .put("xpack.ssl.keystore.path", nodeStorePath); - - if (builder.get("xpack.ssl.truststore.path") != null) { - builder.put("xpack.ssl.truststore.path", nodeStorePath); - } + builder.put("path.home", createTempDir()) + .put("xpack.ssl.key", nodeKeyPath) + .put("xpack.ssl.key_passphrase", "testnode") + .put("xpack.ssl.certificate", nodeCertPath) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(nodeCertPath.toString(), clientCertPath.toString(), + updateableCertPath.toString())) + .put("resource.reload.interval.high", "1s"); return builder.build(); } @@ -81,25 +92,27 @@ public class SSLReloadIntegTests extends SecurityIntegTestCase { } public void testThatSSLConfigurationReloadsOnModification() throws Exception { - Path keystorePath = createTempDir().resolve("testnode_updated.jks"); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks"), keystorePath); - X509Certificate certificate = CertParsingUtils.readX509Certificates(Collections.singletonList(getDataPath - ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt")))[0]; + Path keyPath = createTempDir().resolve("testnode_updated.pem"); + Path certPath = createTempDir().resolve("testnode_updated.crt"); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), keyPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), certPath); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.keystore.path", keystorePath) - .put("xpack.ssl.truststore.path", nodeStorePath) - .setSecureSettings(secureSettings) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(nodeCertPath.toString(), clientCertPath.toString(), + updateableCertPath.toString())) + .setSecureSettings(secureSettings) + .build(); String node = randomFrom(internalCluster().getNodeNames()); SSLService sslService = new SSLService(settings, TestEnvironment.newEnvironment(settings)); SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.ssl"); SSLSocketFactory sslSocketFactory = sslService.sslSocketFactory(sslConfiguration); TransportAddress address = internalCluster() - .getInstance(Transport.class, node).boundAddress().publishAddress(); + .getInstance(Transport.class, node).boundAddress().publishAddress(); + // Fails as our nodes do not trust testnode_updated.crt try (SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket(address.getAddress(), address.getPort())) { assertThat(socket.isConnected(), is(true)); socket.startHandshake(); @@ -107,19 +120,11 @@ public class SSLReloadIntegTests extends SecurityIntegTestCase { } catch (SSLHandshakeException | SocketException expected) { logger.trace("expected exception", expected); } - KeyStore nodeStore = KeyStore.getInstance("jks"); - try (InputStream in = Files.newInputStream(nodeStorePath)) { - nodeStore.load(in, "testnode".toCharArray()); - } - nodeStore.setCertificateEntry("newcert", certificate); - Path path = nodeStorePath.getParent().resolve("updated.jks"); - try (OutputStream out = Files.newOutputStream(path)) { - nodeStore.store(out, "testnode".toCharArray()); - } + // Copy testnode_updated.crt to the placeholder updateable.crt so that the nodes will start trusting it now try { - Files.move(path, nodeStorePath, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE); + Files.move(certPath, updateableCertPath, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE); } catch (AtomicMoveNotSupportedException e) { - Files.move(path, nodeStorePath, StandardCopyOption.REPLACE_EXISTING); + Files.move(certPath, updateableCertPath, StandardCopyOption.REPLACE_EXISTING); } CountDownLatch latch = new CountDownLatch(1); assertBusy(() -> { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java index beebf928fcf..cf77ca975a4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java @@ -72,6 +72,7 @@ public class SSLTrustRestrictionsTests extends SecurityIntegTestCase { @BeforeClass public static void setupCertificates() throws Exception { + assumeFalse("Can't run in a FIPS JVM, custom TrustManager implementations cannot be used.", inFipsJvm()); configPath = createTempDir(); Path caCertPath = PathUtils.get(SSLTrustRestrictionsTests.class.getResource ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/nodes/ca.crt").toURI()); diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt new file mode 100644 index 00000000000..2dbb06c49e6 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIID1zCCAr+gAwIBAgIQWA24rVK7FopAgOHfEio/VjANBgkqhkiG9w0BAQsFADB+ +MRMwEQYKCZImiZPyLGQBGRYDY29tMR0wGwYKCZImiZPyLGQBGRYNZWxhc3RpY3Nl +YXJjaDEUMBIGCgmSJomT8ixkARkWBHRlc3QxEjAQBgoJkiaJk/IsZAEZFgJhZDEe +MBwGA1UEAxMVYWQtRUxBU1RJQ1NFQVJDSEFELUNBMB4XDTE0MDgyNzE2MjI0MloX +DTI5MDgyNzE2MzI0MlowfjETMBEGCgmSJomT8ixkARkWA2NvbTEdMBsGCgmSJomT +8ixkARkWDWVsYXN0aWNzZWFyY2gxFDASBgoJkiaJk/IsZAEZFgR0ZXN0MRIwEAYK +CZImiZPyLGQBGRYCYWQxHjAcBgNVBAMTFWFkLUVMQVNUSUNTRUFSQ0hBRC1DQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALNNZsDJ+lhsE/pCIkNlq6/F +xwv3PU2M+E1/SbWrLEtfbb1ATnn98DwxjpCj00wS0bt26/7zrhHKyX5LaxyS27ER +8bKpLSO4qcVWzDIQnVNk2XfBrYS/Og+6Pi/Lw/ylt/vE++kHWIJBc4O6i+pPByOM +oypM6bh71kTkpK8OTPqf+HiPp0qKhRah6XVtqTc+kOCOku2+wkELbCz8RNzF9ca6 +Uu3YxLi73pNdk0wDTmg6JVaUyVRpSkjJH4BAp9SVma6Rxy6tbh4e5P+8K8lY9ptM +TBzTsDS1EhNK/92xULfQbGT814Z294pF3ARMEJ89N+aegS++kz7CqjciZ1+bA6EC +AwEAAaNRME8wCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FIEKG0KdSVNknKcMZkbTlKo7N8MjMBAGCSsGAQQBgjcVAQQDAgEAMA0GCSqGSIb3 +DQEBCwUAA4IBAQBgbWBXPbEMTEsiVWzoxmTw1wJASBdPahx6CggutjGq3ASjby4p +nVCTwE4xdDEVyFGmeslSp9+23XjBuaiqVPtYw8P8hnG269J0q4cOF/VXOccRLeOw +HVDBv2a7xzgBSwc1KB50TLv07stcBmBYNu8anN6EwGksdgjb8IjRV6U3U+IvFNrI +rGifuIc/iRZD4Clhnpxw8tCsgcrcmz9CU7CN5RxKVEpZ6ou6ZjHO8l8H0t9zWrSI +PL+33iBGHNWlyU63N93XgJtxV1em1hHryLtTTtaVZJJ3R0OrLrUpG8SQ7zCUy62f +YtImFPClUMXY03yH+4DAhflueRvY/D1AKL12 +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt new file mode 100644 index 00000000000..be5a6d02645 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDmzCCAoOgAwIBAgIUdwsnIxjgSneHNVKT6JNCCsrQ3T0wDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTgwMjE1MTc0ODQ2WhcNMjEwMjE0MTc0ODQ2WjA0MTIwMAYD +VQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKQAP/hdsWdu3Ck/Zteosity +nmXJTCnkecBYSLtjgYh9rPDFppj9KdsZ7+5P9FvxLv/t4Yw81YI24TrHk0CnMrD/ +QBaXDiPGeT9b6T/gWWVm1zQj2/567gH2UaIkIffy7q09BI9ICXSKDBRXRMLgVR19 +iiJkwWb3b5TVvaQI4M8sEmJIHXei2/cfEKVR5hBprtzeKkvg6o9DXx+nDv2ZEUZ7 +it5pEN5AjD5t0S3ymtlUU5lqnr8er6/Qcrua2EXxE1HyPEkpN/Cwl7tF1ICMdguf +vght5ql1/Pk43VmBMulI/6z5e+7GZ1+x79YA17gabtGJ+onB0zJxgDBj0tto7H8C +AwEAAaOBpDCBoTAdBgNVHQ4EFgQUZo2Y3maL2NoxbbkwRZiC37k6QMEwbwYDVR0j +BGgwZoAUZo2Y3maL2NoxbbkwRZiC37k6QMGhOKQ2MDQxMjAwBgNVBAMTKUVsYXN0 +aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2VuZXJhdGVkIENBghR3CycjGOBKd4c1 +UpPok0IKytDdPTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBf +mkc4bvUR5+We/2rRqCmP4LFnl/LxfbZ9/pUPRdcxuowuK7YfxN8i44VXGpJvLtec +izhA8gvlj6GbYB/GNlHMogqEORbrMlu2o5Cev4HE/pcWpoqtVaDJqI5Hq4763EmJ +p2dXGMmU04H4LtkcCEt3xQfLQ+QIP4Dl2yEsNd248BKSsscCGm9V3vgzFzbdgndo +zUWv9hQCaEsKNtqvnkTqDy2uFjnf+xNoXFr/bI94gvD9HlZHnIC+g0TL5jjtSfCH +gjeXhC2bBKFtlSt4ClIdZTXWievYs6YDRREfaOi4F0757A/gf+hT0fjZ+9WWnUeM +UuvUnl71CNRnJ5JlNKBA +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt new file mode 100644 index 00000000000..59ecbd22e8b --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDoDCCAoigAwIBAgIUMVGoHuyNTjTFaoRmqFELz75jzDEwDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTgwMjE1MTc0OTExWhcNMjEwMjE0MTc0OTExWjARMQ8wDQYD +VQQDEwZzYW1iYTQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtGBwa +n+7JN2vweSUsYh4zPmh8RPIE+nEVjK1lx/rADUBY7UVjfTYC+MVKKiezZe7gYCNT +7JNKazPpgVI9e3ZFKw/UxomLqRuuvn5bTh+1tMs3afY5+GGzi7oPmEbBO3ceg0Hi +rNSTDa1rfroZnRYK8uIeSZacQnAW90plITI7rBBt9jq+W9albFbDybfDgNv+yS/C +rzIsofm4rbFC3SMRYfrT6HvwDhjOmmYKZci5x7tsn0T+3tSiR44Bw5/DgiN5kX3m +/kl9qg1eoYWbCUy1dKmQlb4Nb4uNcxrIugLB3zjBkfhMZ0OHoveKh/lJASTWik9k +xQ9rEYbpsRbuXpsHAgMBAAGjgcwwgckwHQYDVR0OBBYEFJOLa7UXKtLPibgKeFh7 +Kq1+rS0/MG8GA1UdIwRoMGaAFGaNmN5mi9jaMW25MEWYgt+5OkDBoTikNjA0MTIw +MAYDVQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBD +QYIUdwsnIxjgSneHNVKT6JNCCsrQ3T0wLAYDVR0RBCUwI4IJbG9jYWxob3N0hwR/ +AAABhxAAAAAAAAAAAAAAAAAAAAABMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQAD +ggEBAEHqT1WHkcF8DuOgyIBx7wKcUVQ5H1qYYlJ1xgMGrKFFZLUzouLcON7oadEu +HLIJ4Z3AKD3bqWpcls5XJ9MTECGR48tou67x9cXqTV7jR3Rh0H/VGwzwhR85vbpu +o8ielOPL8XAQOfnAFESJii5sfCU4ZwLg+3evmGZdKfhU6rqQtLimgG/Gm96vOJne +y0a/TZTWrfAarithkOHHXSSAhEI5SdW5SlZAytF4AmYqFvafwxe1+NyFwfCRy0Xl +H40WgVsq+z84psU+WyORb3THX5rgB4au9nuMXOqFKAtrJSI/uApncYraaqU28rqB +gYd8XrtjhKOLw+6viqAKu8l7/cs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index 9858a5cd118..a69445386d0 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -44,12 +44,14 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest @Override protected Settings nodeSettings(int nodeOrdinal) { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("xpack.http.ssl.keystore.path", resource.toString()) - .put("xpack.http.ssl.keystore.password", "testnode") - .build(); + .put(super.nodeSettings(nodeOrdinal)) + .put("xpack.http.ssl.key", keyPath) + .put("xpack.http.ssl.certificate", certPath) + .put("xpack.http.ssl.keystore.password", "testnode") + .build(); } @Before diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java index 10618b36e8a..03dcd794715 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java @@ -169,30 +169,31 @@ public class HttpClientTests extends ESTestCase { } public void testHttps() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.jks"); + Path trustedCertPath = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); Settings settings; if (randomBoolean()) { - secureSettings.setString("xpack.http.ssl.truststore.secure_password", "truststore-testnode-only"); settings = Settings.builder() - .put("xpack.http.ssl.truststore.path", resource.toString()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.http.ssl.certificate_authorities", trustedCertPath) + .setSecureSettings(secureSettings) + .build(); } else { - secureSettings.setString("xpack.ssl.truststore.secure_password", "truststore-testnode-only"); settings = Settings.builder() - .put("xpack.ssl.truststore.path", resource.toString()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate_authorities", trustedCertPath) + .setSecureSettings(secureSettings) + .build(); } try (HttpClient client = new HttpClient(settings, authRegistry, new SSLService(settings, environment))) { secureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it is only a truststore - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings2 = Settings.builder() - .put("xpack.ssl.keystore.path", getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks")) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings2, environment); testSslMockWebserver(client, sslService.sslContext(), false); @@ -200,34 +201,40 @@ public class HttpClientTests extends ESTestCase { } public void testHttpsDisableHostnameVerification() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); Settings settings; if (randomBoolean()) { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.http.ssl.truststore.secure_password", "testnode-no-subjaltname"); - settings = Settings.builder() - .put("xpack.http.ssl.truststore.path", resource.toString()) - .put("xpack.http.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)) - .setSecureSettings(secureSettings) - .build(); + Settings.Builder builder = Settings.builder() + .put("xpack.http.ssl.certificate_authorities", certPath); + if (inFipsJvm()) { + //Can't use TrustAllConfig in FIPS mode + builder.put("xpack.http.ssl.verification_mode", VerificationMode.CERTIFICATE); + } else { + builder.put("xpack.http.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)); + } + settings = builder.build(); } else { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode-no-subjaltname"); - settings = Settings.builder() - .put("xpack.ssl.truststore.path", resource.toString()) - .put("xpack.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)) - .setSecureSettings(secureSettings) - .build(); + Settings.Builder builder = Settings.builder() + .put("xpack.ssl.certificate_authorities", certPath); + if (inFipsJvm()) { + //Can't use TrustAllConfig in FIPS mode + builder.put("xpack.ssl.verification_mode", VerificationMode.CERTIFICATE); + } else { + builder.put("xpack.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)); + } + settings = builder.build(); } try (HttpClient client = new HttpClient(settings, authRegistry, new SSLService(settings, environment))) { MockSecureSettings secureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it only defines a truststore - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode-no-subjaltname"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode-no-subjaltname"); Settings settings2 = Settings.builder() - .put("xpack.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks")) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings2, environment); testSslMockWebserver(client, sslService.sslContext(), false); @@ -235,13 +242,15 @@ public class HttpClientTests extends ESTestCase { } public void testHttpsClientAuth() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", resource.toString()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings, environment); try (HttpClient client = new HttpClient(settings, authRegistry, sslService)) { @@ -365,30 +374,31 @@ public class HttpClientTests extends ESTestCase { } public void testProxyCanHaveDifferentSchemeThanRequest() throws Exception { + Path trustedCertPath = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); // this test fakes a proxy server that sends a response instead of forwarding it to the mock web server // on top of that the proxy request is HTTPS but the real request is HTTP only MockSecureSettings serverSecureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it is only a truststore - serverSecureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + serverSecureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings serverSettings = Settings.builder() - .put("xpack.ssl.keystore.path", getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks")) - .setSecureSettings(serverSecureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(serverSecureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(serverSettings, environment); try (MockWebServer proxyServer = new MockWebServer(sslService.sslContext(), false)) { proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody("fullProxiedContent")); proxyServer.start(); - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.jks"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.http.ssl.truststore.secure_password", "truststore-testnode-only"); Settings settings = Settings.builder() .put(HttpSettings.PROXY_HOST.getKey(), "localhost") .put(HttpSettings.PROXY_PORT.getKey(), proxyServer.getPort()) .put(HttpSettings.PROXY_SCHEME.getKey(), "https") - .put("xpack.http.ssl.truststore.path", resource.toString()) - .setSecureSettings(secureSettings) + .put("xpack.http.ssl.certificate_authorities", trustedCertPath) .build(); HttpRequest.Builder requestBuilder = HttpRequest.builder("localhost", webServer.getPort()) diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt new file mode 100644 index 00000000000..37e142afb23 --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIJAJ+K5mGS3n/AMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTQxMjE2MTcwNDQ1WhcNMTgxMjE1MTcwNDQ1 +WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAzhpW7iwkm+Og+HP7U00nbmh0Hy9Z2Ldp5i8tJSlSQwTxCCvO +rse6jwJQN98Dk1ApaSzimZrlKOotFyPV1L3fnOzJbTp1Yq/VsYP4zJkjWtID0qUf +8Rg8bLhjKAG+ZlLuai5XZqnLkdmqvQeR61VhpXWFm0Om153tWmAiHL18ywY71gXN +EnkeFo9OW4fDqkz6h7NJziYvU6URSKErZDEixk5GIPv9K9hiIfi0KQM6xaHp0d2w +VCyFVC0OUdugz6untURzJVx4U3X1bQcv/o2BoUotWh/5h8o5eeiiv2OGZ1XlO+33 +1tweYI4wFjDwnAyHHRr/rk2ZIBiBYGaSzHnuhQIDAQABo1owWDAJBgNVHRMEAjAA +MB0GA1UdDgQWBBTwGg2LF8+mzsvBBWxJKv6VXv3dMTAsBgNVHREEJTAjgglsb2Nh +bGhvc3SHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAIwDQYJKoZIhvcNAQELBQADggEB +ABP4ufLToJhcUselVxV9LPD5VGPEHGLdIFqsUEix7DMsiNpR76X6a8qNQbZpdbd6 ++qPKqoaMgC7znX7qZtCqRbIXTWbudZPxFkcHdiWx3SiALMQYabeUGetClX3sCndU +SUoV8f34i8dJxfNcqhLcsh4zpgxtmwsvs5OLMTBvm0Xo2zUFUjlmrt41pBrWEuq9 +nkObc/cr6Syiz3sy4pYVJO1/YwHaZgE/URqjVlari70DR3ES4YnIUnLQajKx2Q0/ +gXVgzjbe68KPOUGCz6GYiWq+d4tcWdHzLv1GsaqQ1MD9P21ArfrX4DpzgPDrO6MP +9Ppq5DQGa2q4mz3kipd5RIs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem new file mode 100644 index 00000000000..5de9c813fa8 --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpQIBAAKCAQEAzhpW7iwkm+Og+HP7U00nbmh0Hy9Z2Ldp5i8tJSlSQwTxCCvO +rse6jwJQN98Dk1ApaSzimZrlKOotFyPV1L3fnOzJbTp1Yq/VsYP4zJkjWtID0qUf +8Rg8bLhjKAG+ZlLuai5XZqnLkdmqvQeR61VhpXWFm0Om153tWmAiHL18ywY71gXN +EnkeFo9OW4fDqkz6h7NJziYvU6URSKErZDEixk5GIPv9K9hiIfi0KQM6xaHp0d2w +VCyFVC0OUdugz6untURzJVx4U3X1bQcv/o2BoUotWh/5h8o5eeiiv2OGZ1XlO+33 +1tweYI4wFjDwnAyHHRr/rk2ZIBiBYGaSzHnuhQIDAQABAoIBAQCWgv3A6VPC1DUV +u/1qFAobwwQqUfYXIbqgcwtQ/CAq+UzcXsGNOiavkUzrwF1oEz5qpHDHJCr9+iX7 +pBvgRNksTG+86NgYvbgc7vee0qbTCFPqXNQ6ySw3aWBgMwXMy/t4Z2dEffNAC+l4 +KjMR3UR2BKERhhItnBNd0J6Yxnh/+jg1Uf5fVMEh1/WOGLYCJnnn5oEHNKeon6XR +dobIn2QjD/PB8ZX7UubrSXmyezU0e9h3ARoI3oPMV6f8XQSa5K/KRrk8FUkVQ4vI +5+YAMjtY/K2I8xAEoPyprD/ILAVN+3E47J0K14EfKNTajSzQFVJhaaCvs7btxScA +Sx/zRsvxAoGBAP5KMH6vamdnBlZTPT2jtrsmzjyC0Z+9lbNokzRmVribps+DFdAW +YsGCbfApcbOYmpdLSeccFTA+uT5IbQ8hwBbWn/HKm+y8EDAPklf5tL0+w7pCZ4kU +50pKk6cjSTv/CDjO+hy4KIz2H/zXivXEV+4FtFKOZ3qUVg7m+1c/u5lDAoGBAM99 +L8/S9jwCkOjv+TKhmK+2/S5tVy1fdjlurTu9nI46CYa9MaOndZKY6EJ9ekBLIHUQ +h1QAsdPRHgkObuKDUHmpLr7qmoTRE7vtWC3sHK382j5CBEK00p+09wFHA03Bf40f +Jdjlzqe9F9jO6LH2RL/TECQDe7RJaTOQJrNlVtiXAoGBAOUUsNtv68t7ZJogIuuE +sPmo2+Jnd7EQeexGKVbrWvS0RHJtBRmRESaC+ceBjozczWe+y7UH946e8wLI/HbF +UOdCMpUAkbeTNIIXhR78NXbHNEx3xg4YZsTmql3HzBHgjueejnOQ8/cJQ4fkJauC +VjR3rxswbshfGagTLhpLsBVBAoGBAMBf5mN+ynRLQMXoMRlDgIhyVf2kvO5BkyCe +wBkirTOlFc4KPirpCXZ5NObo5d8UiKxhPcehuT6VpY5qBl8XtxaFyOSUKd24594W +qeox/0lFpaeRl9etRZdztoxFpgeCv1s9pN6b+2XESYboGBFgLs/XxiBN5nT6l4KK +RYeRDttTAoGAMoAreVa/i1l5YChhyskBTt+nePHGomsXC9tv7mZFLOrPQ+CLy5Xd +4PQqqYjRaJT/aP3N/q0NcauSKxYKmgnhInXpaasSVzGrM60DQLVw+SXfTiwXN0dH +V/bq2ybdSxEh2xQoyrfpiFDkCEecY0nYCL1Ff7UYY6g8P/Qj8DBiZGI= +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt new file mode 100644 index 00000000000..37e142afb23 --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIJAJ+K5mGS3n/AMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTQxMjE2MTcwNDQ1WhcNMTgxMjE1MTcwNDQ1 +WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAzhpW7iwkm+Og+HP7U00nbmh0Hy9Z2Ldp5i8tJSlSQwTxCCvO +rse6jwJQN98Dk1ApaSzimZrlKOotFyPV1L3fnOzJbTp1Yq/VsYP4zJkjWtID0qUf +8Rg8bLhjKAG+ZlLuai5XZqnLkdmqvQeR61VhpXWFm0Om153tWmAiHL18ywY71gXN +EnkeFo9OW4fDqkz6h7NJziYvU6URSKErZDEixk5GIPv9K9hiIfi0KQM6xaHp0d2w +VCyFVC0OUdugz6untURzJVx4U3X1bQcv/o2BoUotWh/5h8o5eeiiv2OGZ1XlO+33 +1tweYI4wFjDwnAyHHRr/rk2ZIBiBYGaSzHnuhQIDAQABo1owWDAJBgNVHRMEAjAA +MB0GA1UdDgQWBBTwGg2LF8+mzsvBBWxJKv6VXv3dMTAsBgNVHREEJTAjgglsb2Nh +bGhvc3SHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAIwDQYJKoZIhvcNAQELBQADggEB +ABP4ufLToJhcUselVxV9LPD5VGPEHGLdIFqsUEix7DMsiNpR76X6a8qNQbZpdbd6 ++qPKqoaMgC7znX7qZtCqRbIXTWbudZPxFkcHdiWx3SiALMQYabeUGetClX3sCndU +SUoV8f34i8dJxfNcqhLcsh4zpgxtmwsvs5OLMTBvm0Xo2zUFUjlmrt41pBrWEuq9 +nkObc/cr6Syiz3sy4pYVJO1/YwHaZgE/URqjVlari70DR3ES4YnIUnLQajKx2Q0/ +gXVgzjbe68KPOUGCz6GYiWq+d4tcWdHzLv1GsaqQ1MD9P21ArfrX4DpzgPDrO6MP +9Ppq5DQGa2q4mz3kipd5RIs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt new file mode 100644 index 00000000000..ced9d81d96f --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDTTCCAjWgAwIBAgIJALL7dwEsWamvMA0GCSqGSIb3DQEBCwUAME8xDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEnMCUGA1UEAxMeRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUgTm8gU0FOMB4XDTE0MTIxNjE5NTcyNloXDTE4MTIx +NTE5NTcyNlowTzEMMAoGA1UEChMDb3JnMRYwFAYDVQQLEw1lbGFzdGljc2VhcmNo +MScwJQYDVQQDEx5FbGFzdGljc2VhcmNoIFRlc3QgTm9kZSBObyBTQU4wggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCkIGS7A/V6TesR34ajMyNYL3tB1OjW +Raq4KtF8FfW1H6nHGrWa/qXjZWPirczy1k2n6ZL7YOCcv/YeY8xAqC9mGQxvEuqo +EaqXq2cjRdAs/7zqzRkdPPi3Jw/p/RHrDfOAzOsMnBGc0G2Hrsj//aP44vp85pek +fM3t2kNAYZWYCzXUqWAIUoxBDK4DcQdsN8H4KTMIwQEEiRtcKnL/b8QGKsyGLfLq +36ZABHZ4kY2SmcP3bWxZtbFN4hamdwoAtYe+lS0/ee8/fOTLyZ3Ey+X6EEmGO1lk +WR4XLli15k1L2HBzWGG7zwxVEC5r2h3Sx1njYh/Jq3khIdSvDbiMmM+VAgMBAAGj +LDAqMAkGA1UdEwQCMAAwHQYDVR0OBBYEFGm8wrYF9mJweJ1vloDw19e0PUuIMA0G +CSqGSIb3DQEBCwUAA4IBAQBbEZ73weDphNIcmvN25v6NIfjBebqgm0/2grDFwmZe +Z1DibzRoVfoQ7WeUqbPS7SHUQ+KzIN1GdfHXhW9r6mmLbtzPv90Q/8zBcNv5HNZZ +YK+T2r9hoAWEY6nB1fiOJ4udkFMYfAi6LiSxave4IPWp/WIqd0IWtPtkPl+MmG41 +TfRom8TnO+o+VsjgDkY5Q1JDsNQKy1BrtxzIZyz7d1zYKTQ+HXZ4yeYJoVoc3k4y +6w9eX2zAUZ6Z3d4an6CLr6Hew9Dj2VX1vqCj1a5/VvHZVyVxyh4hg8sHYm7tZOJX +wN3B5GcKwbbFjaMVBLaMlP62OdGg7tCh61evWm+l06S0 +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem new file mode 100644 index 00000000000..b0f7a585d7f --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-192-CBC,2F36F79E75ACA7803AF1BC1B70C2360C + +d4/f7dnpPW9DfhGXuGDx7r56BjQ64iNcsQdrC1CTZB363iAvBBeHaoJFaWpQOUmT +WCBdM6YmRuMi6z4sMtm8Vre3i+Mu2Jti0LTDXqH8Y6FMpptxAYoFb9hkM0OvUGDJ +ASLGTa1XKyCzZWOe2IGLtWIS8zkGv/fSXHqDJV8ddq8uLHGZXBxAye5oJUD0654T +DasIllbiTlBEYlEJ10OyIAspS47n4LWVE5ggyGdiyIgh5s3lAMPO3+yenDU3/dzj +YF2zHKnfe0Nx2lUQVJRYknQvFCx0WTGH9gNl40FovoOMHyRjEuOHD9PFNvL/TCh6 +9h0jJPWu/SsIiYaIqR0pDgqWdHXHA5Ea3s2+w0YHbv7DqkGXWZdceLUdZtcXCBJk +P8QL9IWa7VE50SS6wV5uXX9tV5lHzMFsNGkwtGcR1UMU5cXYqckFXgoBqDN0fyWe +V5iEknSJ4Na+MHv75rvRat0kv6upu9i5NSpYTc5jLHdWZWsYMZ/ZMiMoLBP+KAPT +DQ3eyph/84BU3DePaQF3Rsp0ZvPxqQ361Zwc4zC5CKbHur1QX8WAY5XlBMfuBpkf +CKr5wgwF+ZpS7zsfUpMPPe9Y1E8TWnhx/DtCVLEslBpr2u/rMaxPp6ev9/Wry7N+ +UFBOwodipBhlfSvLqjc511L+bXRzqXiINuW0eSKUQv0J/G0Ur894kJJ6feDYlskj +JiZjOgOyyKhB+K9AXmkfRdvWUJeweL8pgDuYSyQdQ0zoUCZALEoYK2cBWzti/wep +QPFD5oz8076aXNHKMHLsRmSFuEZb6IN0PtUNVf958EbrtABNIuoufKlKtJsEnUyK +VHIEUxExEgyECiozKnxvhr7RQ9nTQXhNdgtec6jJblYnla/+OWAfHdxtHhBjp5AX +WvLyUhmgrmLNdKd1KSzcXynBHgh0hi0HJXYx31FilwbxsdhwN1LwN/Do4T4qGkUr +InrQC3ZHRuh0yAOPrwRFEWbW5m/PzVP/xYVgFtVWP7w38ftZbaBy5xPmtswn+PH+ +cIMt1Y9PaAlhLNpW/Vfn503T9M+05vu73vbU1xgu/B1kePOqE/WO0cOZl0KdaMmT +wAQBKuI7qTACH+/8g3Uir1YSChLthH+1Gs6h686EP6ZydwXq9GYXXkNmJNJJsnmU +RDjoT0F4XBKvcQdX3EeQYs3Af2yZWFDC59c1Ews2dqMK7vy2tYITbx2yn30DBDAl +xvjp2izzmAgQJEG/RqCYsUHCCEv7wz3tpsSOkFem9IHZpR2h8Rqy88GH9qYOkgwo ++fKSmIgC4RLQXsHuh7RRuyNc2FaWDgRgSxs5V4f9xOSU/ZbUftYWnwEyCwbu3RJp +CIXQFZhzU2t5l1Eh+x40rwpEJDXBEwmOIUO3x1oOqGZPPEQ674uMal5TRjvdOVGD +h665Fpo5Xu9EQwQZHYddeRl/7yw8F6LCxBLgHlngKRHHGDUHlTscLfYRqNh+x3jT +3S8dfaGzlnwdQEx32gyLAV0/nsFnzh1AknFMT8jesIYF7PLiAi67PNyNwRCc7TFp +jpKvzkDRVP72bivTmCyP5aKR0Q2oIrAw51MMinT6R2VaoR7COjoVbqYsRLwkxu+p +-----END RSA PRIVATE KEY----- diff --git a/x-pack/qa/openldap-tests/build.gradle b/x-pack/qa/openldap-tests/build.gradle index 24976ab6113..5c0399a1d9f 100644 --- a/x-pack/qa/openldap-tests/build.gradle +++ b/x-pack/qa/openldap-tests/build.gradle @@ -17,6 +17,7 @@ task openLdapFixture { String outputDir = "${project.buildDir}/generated-resources/${project.name}" task copyIdpTrust(type: Copy) { from idpFixtureProject.file('src/main/resources/certs/idptrust.jks'); + from idpFixtureProject.file('src/main/resources/certs/ca.crt'); into outputDir } if (project.rootProject.vagrantSupported) { diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java index 1c9d93873a4..c7a92dccab8 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java @@ -34,7 +34,6 @@ import java.util.List; import java.util.Locale; import java.util.Objects; -import static org.elasticsearch.test.OpenLdapTests.LDAPTRUST_PATH; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -45,22 +44,20 @@ public class OpenLdapUserSearchSessionFactoryTests extends ESTestCase { private Settings globalSettings; private ThreadPool threadPool; - private MockSecureSettings globalSecureSettings; + private static final String LDAPCACERT_PATH = "/ca.crt"; @Before public void init() throws Exception { - Path keystore = getDataPath(LDAPTRUST_PATH); + Path caPath = getDataPath(LDAPCACERT_PATH); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. * If we re-use a SSLContext, previously connected sessions can get re-established which breaks hostname * verification tests since a re-established connection does not perform hostname verification. */ - globalSecureSettings = newSecureSettings("xpack.ssl.truststore.secure_password", "changeit"); globalSettings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.truststore.path", keystore) - .setSecureSettings(globalSecureSettings) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.certificate_authorities", caPath) + .build(); threadPool = new TestThreadPool("LdapUserSearchSessionFactoryTests"); } @@ -94,7 +91,6 @@ public class OpenLdapUserSearchSessionFactoryTests extends ESTestCase { .put(globalSettings, false); builder.put(Settings.builder().put(config.settings(), false).normalizePrefix("xpack.security.authc.realms.oldap-test.").build()); final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.merge(globalSecureSettings); if (useSecureBindPassword) { secureSettings.setString("xpack.security.authc.realms.oldap-test.secure_bind_password", OpenLdapTests.PASSWORD); } diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java index f277f5d84b3..b55431dee1b 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java @@ -177,4 +177,4 @@ public class SearchGroupsResolverTests extends GroupsResolverTestCase { protected String trustPath() { return "/idptrust.jks"; } -} \ No newline at end of file +} diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java index e11b62642eb..dde0b7645df 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java +++ b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.CertificateInfor import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.Name; import org.hamcrest.Matchers; import org.junit.After; +import org.junit.BeforeClass; import javax.security.auth.x500.X500Principal; @@ -96,6 +97,11 @@ public class CertificateGenerateToolTests extends ESTestCase { return tempDir; } + @BeforeClass + public static void checkFipsJvm() { + assumeFalse("Can't run in a FIPS JVM, depends on Non FIPS BouncyCastle", inFipsJvm()); + } + @After public void tearDown() throws Exception { IOUtils.close(jimfs); diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java index 795dd074a80..706d5dbab5f 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java +++ b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java @@ -47,6 +47,7 @@ import org.elasticsearch.xpack.core.ssl.CertificateTool.GenerateCertificateComma import org.elasticsearch.xpack.core.ssl.CertificateTool.Name; import org.hamcrest.Matchers; import org.junit.After; +import org.junit.BeforeClass; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManagerFactory; @@ -108,6 +109,11 @@ public class CertificateToolTests extends ESTestCase { return tempDir; } + @BeforeClass + public static void chechFipsJvm() { + assumeFalse("Can't run in a FIPS JVM, depends on Non FIPS BouncyCastle", inFipsJvm()); + } + @After public void tearDown() throws Exception { IOUtils.close(jimfs); diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java index 98594917129..32452a609e2 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java @@ -40,7 +40,7 @@ public class ADLdapUserSearchSessionFactoryTests extends AbstractActiveDirectory @Before public void init() throws Exception { - Path keystore = getDataPath("support/ADtrust.jks"); + Path certPath = getDataPath("support/smb_ca.crt"); Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. @@ -49,10 +49,9 @@ public class ADLdapUserSearchSessionFactoryTests extends AbstractActiveDirectory */ globalSettings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.truststore.path", keystore) - .setSecureSettings(newSecureSettings("xpack.ssl.truststore.secure_password", "changeit")) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.certificate_authorities", certPath) + .build(); sslService = new SSLService(globalSettings, env); threadPool = new TestThreadPool("ADLdapUserSearchSessionFactoryTests"); } diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java index 7ef1bd674a3..829e87c849d 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java @@ -23,9 +23,16 @@ import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.VerificationMode; import org.junit.Before; +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; import java.security.AccessController; import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.List; public abstract class AbstractActiveDirectoryTestCase extends ESTestCase { @@ -48,11 +55,25 @@ public abstract class AbstractActiveDirectoryTestCase extends ESTestCase { protected SSLService sslService; protected Settings globalSettings; protected boolean useGlobalSSL; + protected List certificatePaths; @Before public void initializeSslSocketFactory() throws Exception { useGlobalSSL = randomBoolean(); - Path truststore = getDataPath("../ldap/support/ADtrust.jks"); + // We use certificates in PEM format and `ssl.certificate_authorities` instead of ssl.trustore + // so that these tests can also run in a FIPS JVM where JKS keystores can't be used. + certificatePaths = new ArrayList<>(); + Files.walkFileTree(getDataPath + ("../ldap/support"), new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + String fileName = file.getFileName().toString(); + if (fileName.endsWith(".crt")) { + certificatePaths.add(getDataPath("../ldap/support/" + fileName).toString()); + } + return FileVisitResult.CONTINUE; + } + }); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. * If we re-use a SSLContext, previously connected sessions can get re-established which breaks hostname @@ -60,20 +81,16 @@ public abstract class AbstractActiveDirectoryTestCase extends ESTestCase { */ Settings.Builder builder = Settings.builder().put("path.home", createTempDir()); if (useGlobalSSL) { - builder.put("xpack.ssl.truststore.path", truststore) - .put("xpack.ssl.truststore.password", "changeit"); + builder.putList("xpack.ssl.certificate_authorities", certificatePaths); // fake realm to load config with certificate verification mode - builder.put("xpack.security.authc.realms.bar.ssl.truststore.path", truststore); - builder.put("xpack.security.authc.realms.bar.ssl.truststore.password", "changeit"); + builder.putList("xpack.security.authc.realms.bar.ssl.certificate_authorities", certificatePaths); builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } else { // fake realms so ssl will get loaded - builder.put("xpack.security.authc.realms.foo.ssl.truststore.path", truststore); - builder.put("xpack.security.authc.realms.foo.ssl.truststore.password", "changeit"); + builder.putList("xpack.security.authc.realms.foo.ssl.certificate_authorities", certificatePaths); builder.put("xpack.security.authc.realms.foo.ssl.verification_mode", VerificationMode.FULL); - builder.put("xpack.security.authc.realms.bar.ssl.truststore.path", truststore); - builder.put("xpack.security.authc.realms.bar.ssl.truststore.password", "changeit"); + builder.putList("xpack.security.authc.realms.bar.ssl.certificate_authorities", certificatePaths); builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } globalSettings = builder.build(); @@ -99,8 +116,7 @@ public abstract class AbstractActiveDirectoryTestCase extends ESTestCase { builder.put(SessionFactorySettings.HOSTNAME_VERIFICATION_SETTING, hostnameVerification); } if (useGlobalSSL == false) { - builder.put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit"); + builder.putList("ssl.certificate_authorities", certificatePaths); } return builder.build(); } diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 11da59e44d6..d35e7ab0b0c 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.ldap.ActiveDirectorySessionFactorySettings; @@ -100,7 +99,8 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase ) }; - protected static final String TESTNODE_KEYSTORE = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"; + protected static final String TESTNODE_KEY = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"; + protected static final String TESTNODE_CERT = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"; protected static RealmConfig realmConfig; protected static List roleMappings; protected static boolean useGlobalSSL; @@ -122,7 +122,8 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase @Override protected Settings nodeSettings(int nodeOrdinal) { final RealmConfig realm = AbstractAdLdapRealmTestCase.realmConfig; - Path store = getDataPath(TESTNODE_KEYSTORE); + final Path nodeCert = getDataPath(TESTNODE_CERT); + final Path nodeKey = getDataPath(TESTNODE_KEY); Settings.Builder builder = Settings.builder(); // don't use filter since it returns a prefixed secure setting instead of mock! Settings settingsToAdd = super.nodeSettings(nodeOrdinal); @@ -156,14 +157,15 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase } } } - addSslSettingsForStore(builder, store, "testnode"); - builder.put(buildRealmSettings(realm, roleMappings, store)); + addSslSettingsForKeyPair(builder, nodeKey, "testnode", nodeCert, getNodeTrustedCertificates()); + builder.put(buildRealmSettings(realm, roleMappings, getNodeTrustedCertificates())); return builder.build(); } - protected Settings buildRealmSettings(RealmConfig realm, List roleMappingEntries, Path store) { + protected Settings buildRealmSettings(RealmConfig realm, List roleMappingEntries, List + certificateAuthorities) { Settings.Builder builder = Settings.builder(); - builder.put(realm.buildSettings(store, "testnode")); + builder.put(realm.buildSettings(certificateAuthorities)); configureFileRoleMappings(builder, roleMappingEntries); return builder.build(); } @@ -216,10 +218,11 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase @Override protected Settings transportClientSettings() { if (useGlobalSSL) { - Path store = getDataPath(TESTNODE_KEYSTORE); + Path key = getDataPath(TESTNODE_KEY); + Path cert = getDataPath(TESTNODE_CERT); Settings.Builder builder = Settings.builder() .put(super.transportClientSettings().filter((s) -> s.startsWith("xpack.ssl.") == false)); - addSslSettingsForStore(builder, store, "testnode"); + addSslSettingsForKeyPair(builder, key, "testnode", cert, getNodeTrustedCertificates()); return builder.build(); } else { return super.transportClientSettings(); @@ -304,14 +307,35 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase return UsernamePasswordToken.basicAuthHeaderValue(username, new SecureString(password.toCharArray())); } - private void addSslSettingsForStore(Settings.Builder builder, Path store, String password) { - SecuritySettingsSource.addSecureSettings(builder, secureSettings -> { - secureSettings.setString("xpack.ssl.keystore.secure_password", password); - secureSettings.setString("xpack.ssl.truststore.secure_password", password); - }); - builder.put("xpack.ssl.keystore.path", store) - .put("xpack.ssl.verification_mode", "certificate") - .put("xpack.ssl.truststore.path", store); + private void addSslSettingsForKeyPair(Settings.Builder builder, Path key, String keyPassphrase, Path cert, + List certificateAuthorities) { + builder.put("xpack.ssl.key", key) + .put("xpack.ssl.key_passphrase", keyPassphrase) + .put("xpack.ssl.verification_mode", "certificate") + .put("xpack.ssl.certificate", cert) + .putList("xpack.ssl.certificate_authorities", certificateAuthorities); + } + + /** + * Collects all the certificates that are normally trusted by the node ( contained in testnode.jks ) + * + * @return + */ + List getNodeTrustedCertificates() { + Path testnodeCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeClientProfileCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-client-profile.crt"); + Path activedirCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/active-directory-ca.crt"); + Path testclientCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"); + Path openldapCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/openldap.crt"); + Path samba4Cert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt"); + return Arrays.asList(testnodeCert.toString(), testnodeClientProfileCert.toString(), activedirCert.toString(), testclientCert + .toString(), openldapCert.toString(), samba4Cert.toString()); } static class RoleMappingEntry { @@ -429,19 +453,19 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase this.mapGroupsAsRoles = randomBoolean(); } - public Settings buildSettings(Path store, String password) { - return buildSettings(store, password, 1); + public Settings buildSettings(List certificateAuthorities) { + return buildSettings(certificateAuthorities, 1); } - protected Settings buildSettings(Path store, String password, int order) { + + protected Settings buildSettings(List certificateAuthorities, int order) { Settings.Builder builder = Settings.builder() - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".order", order) - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".hostname_verification", false) - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".unmapped_groups_as_roles", mapGroupsAsRoles) - .put(this.settings); + .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".order", order) + .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".hostname_verification", false) + .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".unmapped_groups_as_roles", mapGroupsAsRoles) + .put(this.settings); if (useGlobalSSL == false) { - builder.put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".ssl.truststore.path", store) - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".ssl.truststore.password", password); + builder.putList(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".ssl.certificate_authorities", certificateAuthorities); } return builder.build(); diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java index fb7ea6c5dd7..330ec6b9a75 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java @@ -132,4 +132,4 @@ public class ActiveDirectoryGroupsResolverTests extends GroupsResolverTestCase { protected String trustPath() { return "/org/elasticsearch/xpack/security/authc/ldap/support/ADtrust.jks"; } -} \ No newline at end of file +} diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index 8f85c250f48..614d6659f2d 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -280,10 +280,9 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT .build(); if (useGlobalSSL == false) { settings = Settings.builder() - .put(settings) - .put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit") - .build(); + .put(settings) + .putList("ssl.certificate_authorities", certificatePaths) + .build(); } RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); @@ -317,10 +316,9 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT ignoreReferralErrors); if (useGlobalSSL == false) { settings = Settings.builder() - .put(settings) - .put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit") - .build(); + .put(settings) + .putList("ssl.certificate_authorities", certificatePaths) + .build(); } RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); @@ -346,10 +344,9 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT Settings settings = LdapTestCase.buildLdapSettings(new String[] { AD_LDAP_URL }, userTemplate, false); if (useGlobalSSL == false) { settings = Settings.builder() - .put(settings) - .put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit") - .build(); + .put(settings) + .putList("ssl.certificate_authorities", certificatePaths) + .build(); } RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); @@ -408,8 +405,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } if (useGlobalSSL == false) { - builder.put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit"); + builder.putList("ssl.certificate_authorities", certificatePaths); } if (useBindUser) { diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java index c4e07a846fd..3d1cdb202d1 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.settings.Settings; import org.junit.BeforeClass; import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @@ -46,9 +45,9 @@ public class MultipleAdRealmIT extends AbstractAdLdapRealmTestCase { Settings.Builder builder = Settings.builder(); builder.put(super.nodeSettings(nodeOrdinal)); - Path store = getDataPath(TESTNODE_KEYSTORE); final List secondaryRoleMappings = secondaryRealmConfig.selectRoleMappings(() -> true); - final Settings secondarySettings = super.buildRealmSettings(secondaryRealmConfig, secondaryRoleMappings, store); + final Settings secondarySettings = super.buildRealmSettings(secondaryRealmConfig, secondaryRoleMappings, + getNodeTrustedCertificates()); secondarySettings.keySet().forEach(name -> { String newName = name.replace(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL, XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + "2"); builder.copy(newName, name, secondarySettings); From 049966a829918a61cf3ac181e709183c9ef686e8 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 17 Jul 2018 11:26:28 +0200 Subject: [PATCH 056/260] Check that client methods match API defined in the REST spec (#31825) We have been encountering name mismatches between API defined in our REST spec and method names that have been added to the high-level REST client. We should check this automatically to prevent furher mismatches, and correct all the current ones. This commit adds a test for this and corrects the issues found by it. --- .../gradle/test/RestIntegTestTask.groovy | 3 +- client/rest-high-level/build.gradle | 10 +- .../elasticsearch/client/IndicesClient.java | 35 +++- .../elasticsearch/client/IngestClient.java | 8 +- .../client/RestHighLevelClient.java | 102 ++++++++++- .../elasticsearch/client/SnapshotClient.java | 6 +- .../elasticsearch/client/BulkProcessorIT.java | 10 +- .../client/BulkProcessorRetryIT.java | 2 +- .../java/org/elasticsearch/client/CrudIT.java | 4 +- .../CustomRestHighLevelClientTests.java | 2 +- .../elasticsearch/client/IndicesClientIT.java | 6 +- .../elasticsearch/client/IngestClientIT.java | 2 +- .../client/RestHighLevelClientTests.java | 161 +++++++++++++++++- .../org/elasticsearch/client/SearchIT.java | 24 +-- .../org/elasticsearch/client/SnapshotIT.java | 14 +- .../documentation/CRUDDocumentationIT.java | 14 +- .../IndicesClientDocumentationIT.java | 10 +- .../IngestClientDocumentationIT.java | 4 +- .../documentation/SearchDocumentationIT.java | 20 +-- .../SnapshotClientDocumentationIT.java | 4 +- ...rossClusterSearchUnavailableClusterIT.java | 4 +- 21 files changed, 359 insertions(+), 86 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index f2e6dc8e561..d2101c48aab 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -24,7 +24,6 @@ import org.elasticsearch.gradle.VersionProperties import org.gradle.api.DefaultTask import org.gradle.api.Project import org.gradle.api.Task -import org.gradle.api.Transformer import org.gradle.api.execution.TaskExecutionAdapter import org.gradle.api.internal.tasks.options.Option import org.gradle.api.provider.Property @@ -217,7 +216,7 @@ public class RestIntegTestTask extends DefaultTask { * @param project The project to add the copy task to * @param includePackagedTests true if the packaged tests should be copied, false otherwise */ - private static Task createCopyRestSpecTask(Project project, Provider includePackagedTests) { + static Task createCopyRestSpecTask(Project project, Provider includePackagedTests) { project.configurations { restSpec } diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 451452759f5..2fed806e98c 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -18,8 +18,8 @@ */ import org.elasticsearch.gradle.precommit.PrecommitTasks -import org.gradle.api.XmlProvider -import org.gradle.api.publish.maven.MavenPublication +import org.elasticsearch.gradle.test.RestIntegTestTask +import org.gradle.api.internal.provider.Providers buildscript { repositories { @@ -41,6 +41,10 @@ apply plugin: 'com.github.johnrengelman.shadow' group = 'org.elasticsearch.client' archivesBaseName = 'elasticsearch-rest-high-level-client' +//we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions) +Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE) +test.dependsOn(copyRestSpec) + publishing { publications { nebula(MavenPublication) { @@ -102,6 +106,8 @@ dependencies { testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}" testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" + //this is needed to make RestHighLevelClientTests#testApiNamingConventions work from IDEs + testCompile "org.elasticsearch:rest-api-spec:${version}" } dependencyLicenses { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 2944b49bf18..250bbd520da 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -174,7 +174,7 @@ public final class IndicesClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public GetMappingsResponse getMappings(GetMappingsRequest getMappingsRequest, RequestOptions options) throws IOException { + public GetMappingsResponse getMapping(GetMappingsRequest getMappingsRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(getMappingsRequest, RequestConverters::getMappings, options, GetMappingsResponse::fromXContent, emptySet()); } @@ -187,8 +187,8 @@ public final class IndicesClient { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void getMappingsAsync(GetMappingsRequest getMappingsRequest, RequestOptions options, - ActionListener listener) { + public void getMappingAsync(GetMappingsRequest getMappingsRequest, RequestOptions options, + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(getMappingsRequest, RequestConverters::getMappings, options, GetMappingsResponse::fromXContent, listener, emptySet()); } @@ -474,8 +474,23 @@ public final class IndicesClient { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #forcemerge(ForceMergeRequest, RequestOptions)} instead */ + @Deprecated public ForceMergeResponse forceMerge(ForceMergeRequest forceMergeRequest, RequestOptions options) throws IOException { + return forcemerge(forceMergeRequest, options); + } + + /** + * Force merge one or more indices using the Force Merge API. + * See + * Force Merge API on elastic.co + * @param forceMergeRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public ForceMergeResponse forcemerge(ForceMergeRequest forceMergeRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(forceMergeRequest, RequestConverters::forceMerge, options, ForceMergeResponse::fromXContent, emptySet()); } @@ -487,8 +502,22 @@ public final class IndicesClient { * @param forceMergeRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #forcemergeAsync(ForceMergeRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public void forceMergeAsync(ForceMergeRequest forceMergeRequest, RequestOptions options, ActionListener listener) { + forcemergeAsync(forceMergeRequest, options, listener); + } + + /** + * Asynchronously force merge one or more indices using the Force Merge API. + * See + * Force Merge API on elastic.co + * @param forceMergeRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void forcemergeAsync(ForceMergeRequest forceMergeRequest, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(forceMergeRequest, RequestConverters::forceMerge, options, ForceMergeResponse::fromXContent, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java index 340e1465397..e889ec5beba 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java @@ -139,7 +139,7 @@ public final class IngestClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public SimulatePipelineResponse simulatePipeline(SimulatePipelineRequest request, RequestOptions options) throws IOException { + public SimulatePipelineResponse simulate(SimulatePipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::simulatePipeline, options, SimulatePipelineResponse::fromXContent, emptySet()); } @@ -154,9 +154,9 @@ public final class IngestClient { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void simulatePipelineAsync(SimulatePipelineRequest request, - RequestOptions options, - ActionListener listener) { + public void simulateAsync(SimulatePipelineRequest request, + RequestOptions options, + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::simulatePipeline, options, SimulatePipelineResponse::fromXContent, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index b9e41b87932..c71bebf6903 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -384,8 +384,23 @@ public class RestHighLevelClient implements Closeable { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #mget(MultiGetRequest, RequestOptions)} instead */ + @Deprecated public final MultiGetResponse multiGet(MultiGetRequest multiGetRequest, RequestOptions options) throws IOException { + return mget(multiGetRequest, options); + } + + + /** + * Retrieves multiple documents by id using the Multi Get API. + * See Multi Get API on elastic.co + * @param multiGetRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final MultiGetResponse mget(MultiGetRequest multiGetRequest, RequestOptions options) throws IOException { return performRequestAndParseEntity(multiGetRequest, RequestConverters::multiGet, options, MultiGetResponse::fromXContent, singleton(404)); } @@ -396,8 +411,21 @@ public class RestHighLevelClient implements Closeable { * @param multiGetRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #mgetAsync(MultiGetRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public final void multiGetAsync(MultiGetRequest multiGetRequest, RequestOptions options, ActionListener listener) { + mgetAsync(multiGetRequest, options, listener); + } + + /** + * Asynchronously retrieves multiple documents by id using the Multi Get API. + * See Multi Get API on elastic.co + * @param multiGetRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void mgetAsync(MultiGetRequest multiGetRequest, RequestOptions options, ActionListener listener) { performRequestAsyncAndParseEntity(multiGetRequest, RequestConverters::multiGet, options, MultiGetResponse::fromXContent, listener, singleton(404)); } @@ -531,8 +559,23 @@ public class RestHighLevelClient implements Closeable { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #msearch(MultiSearchRequest, RequestOptions)} instead */ + @Deprecated public final MultiSearchResponse multiSearch(MultiSearchRequest multiSearchRequest, RequestOptions options) throws IOException { + return msearch(multiSearchRequest, options); + } + + /** + * Executes a multi search using the msearch API. + * See Multi search API on + * elastic.co + * @param multiSearchRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final MultiSearchResponse msearch(MultiSearchRequest multiSearchRequest, RequestOptions options) throws IOException { return performRequestAndParseEntity(multiSearchRequest, RequestConverters::multiSearch, options, MultiSearchResponse::fromXContext, emptySet()); } @@ -544,9 +587,24 @@ public class RestHighLevelClient implements Closeable { * @param searchRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #msearchAsync(MultiSearchRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public final void multiSearchAsync(MultiSearchRequest searchRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { + msearchAsync(searchRequest, options, listener); + } + + /** + * Asynchronously executes a multi search using the msearch API. + * See Multi search API on + * elastic.co + * @param searchRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void msearchAsync(MultiSearchRequest searchRequest, RequestOptions options, + ActionListener listener) { performRequestAsyncAndParseEntity(searchRequest, RequestConverters::multiSearch, options, MultiSearchResponse::fromXContext, listener, emptySet()); } @@ -559,8 +617,23 @@ public class RestHighLevelClient implements Closeable { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #scroll(SearchScrollRequest, RequestOptions)} instead */ + @Deprecated public final SearchResponse searchScroll(SearchScrollRequest searchScrollRequest, RequestOptions options) throws IOException { + return scroll(searchScrollRequest, options); + } + + /** + * Executes a search using the Search Scroll API. + * See Search Scroll + * API on elastic.co + * @param searchScrollRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final SearchResponse scroll(SearchScrollRequest searchScrollRequest, RequestOptions options) throws IOException { return performRequestAndParseEntity(searchScrollRequest, RequestConverters::searchScroll, options, SearchResponse::fromXContent, emptySet()); } @@ -572,9 +645,24 @@ public class RestHighLevelClient implements Closeable { * @param searchScrollRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #scrollAsync(SearchScrollRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public final void searchScrollAsync(SearchScrollRequest searchScrollRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { + scrollAsync(searchScrollRequest, options, listener); + } + + /** + * Asynchronously executes a search using the Search Scroll API. + * See Search Scroll + * API on elastic.co + * @param searchScrollRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void scrollAsync(SearchScrollRequest searchScrollRequest, RequestOptions options, + ActionListener listener) { performRequestAsyncAndParseEntity(searchScrollRequest, RequestConverters::searchScroll, options, SearchResponse::fromXContent, listener, emptySet()); } @@ -691,8 +779,8 @@ public class RestHighLevelClient implements Closeable { * See Multi Search Template API * on elastic.co. */ - public final MultiSearchTemplateResponse multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest, - RequestOptions options) throws IOException { + public final MultiSearchTemplateResponse msearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest, + RequestOptions options) throws IOException { return performRequestAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate, options, MultiSearchTemplateResponse::fromXContext, emptySet()); } @@ -703,9 +791,9 @@ public class RestHighLevelClient implements Closeable { * See Multi Search Template API * on elastic.co. */ - public final void multiSearchTemplateAsync(MultiSearchTemplateRequest multiSearchTemplateRequest, - RequestOptions options, - ActionListener listener) { + public final void msearchTemplateAsync(MultiSearchTemplateRequest multiSearchTemplateRequest, + RequestOptions options, + ActionListener listener) { performRequestAsyncAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate, options, MultiSearchTemplateResponse::fromXContext, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index f75f6cdef24..ae115839bae 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -63,7 +63,7 @@ public final class SnapshotClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public GetRepositoriesResponse getRepositories(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options) + public GetRepositoriesResponse getRepository(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options, GetRepositoriesResponse::fromXContent, emptySet()); @@ -78,8 +78,8 @@ public final class SnapshotClient { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void getRepositoriesAsync(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options, - ActionListener listener) { + public void getRepositoryAsync(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options, + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options, GetRepositoriesResponse::fromXContent, listener, emptySet()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java index 7605b1c715c..fdd5634ddd6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java @@ -79,7 +79,7 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { assertThat(listener.afterCounts.get(), equalTo(1)); assertThat(listener.bulkFailures.size(), equalTo(0)); assertResponseItems(listener.bulkItems, numDocs); - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } } @@ -105,7 +105,7 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { assertThat(listener.afterCounts.get(), equalTo(1)); assertThat(listener.bulkFailures.size(), equalTo(0)); assertResponseItems(listener.bulkItems, numDocs); - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } } @@ -157,7 +157,7 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { assertThat(ids.add(bulkItemResponse.getId()), equalTo(true)); } - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } public void testBulkProcessorWaitOnClose() throws Exception { @@ -188,7 +188,7 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { } assertThat(listener.bulkFailures.size(), equalTo(0)); assertResponseItems(listener.bulkItems, numDocs); - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception { @@ -265,7 +265,7 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { } } - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), testDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), testDocs); } private static MultiGetRequest indexDocs(BulkProcessor processor, int numDocs) throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java index c20998eeb58..5fd9fcb661c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java @@ -129,7 +129,7 @@ public class BulkProcessorRetryIT extends ESRestHighLevelClientTestCase { } highLevelClient().indices().refresh(new RefreshRequest(), RequestOptions.DEFAULT); - int multiGetResponsesCount = highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT).getResponses().length; + int multiGetResponsesCount = highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT).getResponses().length; if (rejectedExecutionExpected) { assertThat(multiGetResponsesCount, lessThanOrEqualTo(numberOfAsyncOps)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index 9de4c22611c..89f357477fa 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -253,7 +253,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.add("index", "type", "id1"); multiGetRequest.add("index", "type", "id2"); - MultiGetResponse response = execute(multiGetRequest, highLevelClient()::multiGet, highLevelClient()::multiGetAsync); + MultiGetResponse response = execute(multiGetRequest, highLevelClient()::mget, highLevelClient()::mgetAsync); assertEquals(2, response.getResponses().length); assertTrue(response.getResponses()[0].isFailed()); @@ -285,7 +285,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.add("index", "type", "id1"); multiGetRequest.add("index", "type", "id2"); - MultiGetResponse response = execute(multiGetRequest, highLevelClient()::multiGet, highLevelClient()::multiGetAsync); + MultiGetResponse response = execute(multiGetRequest, highLevelClient()::mget, highLevelClient()::mgetAsync); assertEquals(2, response.getResponses().length); assertFalse(response.getResponses()[0].isFailed()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java index 3d1db23da16..ff27fe21c27 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java @@ -121,7 +121,7 @@ public class CustomRestHighLevelClientTests extends ESTestCase { * so that they can be used by subclasses to implement custom logic. */ @SuppressForbidden(reason = "We're forced to uses Class#getDeclaredMethods() here because this test checks protected methods") - public void testMethodsVisibility() throws ClassNotFoundException { + public void testMethodsVisibility() { final String[] methodNames = new String[]{"parseEntity", "parseResponseException", "performRequest", diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 88cf445d436..36a45999b51 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -443,7 +443,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { .types("_doc"); GetMappingsResponse getMappingsResponse = - execute(request, highLevelClient().indices()::getMappings, highLevelClient().indices()::getMappingsAsync); + execute(request, highLevelClient().indices()::getMapping, highLevelClient().indices()::getMappingAsync); Map mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap(); Map type = new HashMap<>(); @@ -796,7 +796,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { createIndex(index, settings); ForceMergeRequest forceMergeRequest = new ForceMergeRequest(index); ForceMergeResponse forceMergeResponse = - execute(forceMergeRequest, highLevelClient().indices()::forceMerge, highLevelClient().indices()::forceMergeAsync); + execute(forceMergeRequest, highLevelClient().indices()::forcemerge, highLevelClient().indices()::forcemergeAsync); assertThat(forceMergeResponse.getTotalShards(), equalTo(1)); assertThat(forceMergeResponse.getSuccessfulShards(), equalTo(1)); assertThat(forceMergeResponse.getFailedShards(), equalTo(0)); @@ -807,7 +807,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { assertFalse(indexExists(nonExistentIndex)); ForceMergeRequest forceMergeRequest = new ForceMergeRequest(nonExistentIndex); ElasticsearchException exception = expectThrows(ElasticsearchException.class, - () -> execute(forceMergeRequest, highLevelClient().indices()::forceMerge, highLevelClient().indices()::forceMergeAsync)); + () -> execute(forceMergeRequest, highLevelClient().indices()::forcemerge, highLevelClient().indices()::forcemergeAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java index 6fd6f950595..1f5914f392c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java @@ -135,7 +135,7 @@ public class IngestClientIT extends ESRestHighLevelClientTestCase { ); request.setVerbose(isVerbose); SimulatePipelineResponse response = - execute(request, highLevelClient().ingest()::simulatePipeline, highLevelClient().ingest()::simulatePipelineAsync); + execute(request, highLevelClient().ingest()::simulate, highLevelClient().ingest()::simulateAsync); List results = response.getResults(); assertEquals(1, results.size()); if (isVerbose) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 2925062e0e7..47870125aa2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -20,8 +20,6 @@ package org.elasticsearch.client; import com.fasterxml.jackson.core.JsonParseException; - -import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; @@ -53,6 +51,7 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -73,20 +72,30 @@ import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregation import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import org.junit.Before; import java.io.IOException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.net.SocketTimeoutException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.Stream; -import static org.elasticsearch.client.RestClientTestUtil.randomHeaders; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.hamcrest.CoreMatchers.endsWith; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; @@ -137,7 +146,6 @@ public class RestHighLevelClientTests extends ESTestCase { } public void testInfo() throws IOException { - Header[] headers = randomHeaders(random(), "Header"); MainResponse testInfo = new MainResponse("nodeName", Version.CURRENT, new ClusterName("clusterName"), "clusterUuid", Build.CURRENT); mockResponse(testInfo); @@ -150,7 +158,7 @@ public class RestHighLevelClientTests extends ESTestCase { null, false, false, null, 1), randomAlphaOfLengthBetween(5, 10), 5, 5, 0, 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); mockResponse(mockSearchResponse); - SearchResponse searchResponse = restHighLevelClient.searchScroll( + SearchResponse searchResponse = restHighLevelClient.scroll( new SearchScrollRequest(randomAlphaOfLengthBetween(5, 10)), RequestOptions.DEFAULT); assertEquals(mockSearchResponse.getScrollId(), searchResponse.getScrollId()); assertEquals(0, searchResponse.getHits().totalHits); @@ -632,6 +640,149 @@ public class RestHighLevelClientTests extends ESTestCase { assertTrue(names.contains(DiscountedCumulativeGain.NAME)); } + public void testApiNamingConventions() throws Exception { + //this list should be empty once the high-level client is feature complete + String[] notYetSupportedApi = new String[]{ + "cluster.remote_info", + "count", + "create", + "delete_by_query", + "exists_source", + "get_source", + "indices.delete_alias", + "indices.delete_template", + "indices.exists_template", + "indices.exists_type", + "indices.get_upgrade", + "indices.put_alias", + "mtermvectors", + "put_script", + "reindex", + "reindex_rethrottle", + "render_search_template", + "scripts_painless_execute", + "snapshot.restore", + "tasks.get", + "termvectors", + "update_by_query" + }; + //These API are not required for high-level client feature completeness + String[] notRequiredApi = new String[] { + "cluster.allocation_explain", + "cluster.pending_tasks", + "cluster.reroute", + "cluster.state", + "cluster.stats", + "indices.shard_stores", + "indices.upgrade", + "indices.recovery", + "indices.segments", + "indices.stats", + "ingest.processor_grok", + "nodes.info", + "nodes.stats", + "nodes.hot_threads", + "nodes.usage", + "search_shards", + }; + Set deprecatedMethods = new HashSet<>(); + deprecatedMethods.add("indices.force_merge"); + deprecatedMethods.add("multi_get"); + deprecatedMethods.add("multi_search"); + deprecatedMethods.add("search_scroll"); + + ClientYamlSuiteRestSpec restSpec = ClientYamlSuiteRestSpec.load("/rest-api-spec/api"); + Set apiSpec = restSpec.getApis().stream().map(ClientYamlSuiteRestApi::getName).collect(Collectors.toSet()); + + Set topLevelMethodsExclusions = new HashSet<>(); + topLevelMethodsExclusions.add("getLowLevelClient"); + topLevelMethodsExclusions.add("close"); + + Map methods = Arrays.stream(RestHighLevelClient.class.getMethods()) + .filter(method -> method.getDeclaringClass().equals(RestHighLevelClient.class) + && topLevelMethodsExclusions.contains(method.getName()) == false) + .map(method -> Tuple.tuple(toSnakeCase(method.getName()), method)) + .flatMap(tuple -> tuple.v2().getReturnType().getName().endsWith("Client") + ? getSubClientMethods(tuple.v1(), tuple.v2().getReturnType()) : Stream.of(tuple)) + .collect(Collectors.toMap(Tuple::v1, Tuple::v2)); + + Set apiNotFound = new HashSet<>(); + + for (Map.Entry entry : methods.entrySet()) { + Method method = entry.getValue(); + String apiName = entry.getKey(); + + assertTrue("method [" + apiName + "] is not final", + Modifier.isFinal(method.getClass().getModifiers()) || Modifier.isFinal(method.getModifiers())); + assertTrue(Modifier.isPublic(method.getModifiers())); + + //we convert all the method names to snake case, hence we need to look for the '_async' suffix rather than 'Async' + if (apiName.endsWith("_async")) { + assertTrue("async method [" + method.getName() + "] doesn't have corresponding sync method", + methods.containsKey(apiName.substring(0, apiName.length() - 6))); + assertThat(method.getReturnType(), equalTo(Void.TYPE)); + assertEquals(0, method.getExceptionTypes().length); + assertEquals(3, method.getParameterTypes().length); + assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request")); + assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName())); + assertThat(method.getParameterTypes()[2].getName(), equalTo(ActionListener.class.getName())); + } else { + //A few methods return a boolean rather than a response object + if (apiName.equals("ping") || apiName.contains("exist")) { + assertThat(method.getReturnType().getSimpleName(), equalTo("boolean")); + } else { + assertThat(method.getReturnType().getSimpleName(), endsWith("Response")); + } + + assertEquals(1, method.getExceptionTypes().length); + //a few methods don't accept a request object as argument + if (apiName.equals("ping") || apiName.equals("info")) { + assertEquals(1, method.getParameterTypes().length); + assertThat(method.getParameterTypes()[0].getName(), equalTo(RequestOptions.class.getName())); + } else { + assertEquals(apiName, 2, method.getParameterTypes().length); + assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request")); + assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName())); + } + + boolean remove = apiSpec.remove(apiName); + if (remove == false && deprecatedMethods.contains(apiName) == false) { + //TODO xpack api are currently ignored, we need to load xpack yaml spec too + if (apiName.startsWith("xpack.") == false) { + apiNotFound.add(apiName); + } + } + } + } + assertThat("Some client method doesn't match a corresponding API defined in the REST spec: " + apiNotFound, + apiNotFound.size(), equalTo(0)); + + //we decided not to support cat API in the high-level REST client, they are supposed to be used from a low-level client + apiSpec.removeIf(api -> api.startsWith("cat.")); + Stream.concat(Arrays.stream(notYetSupportedApi), Arrays.stream(notRequiredApi)).forEach( + api -> assertTrue(api + " API is either not defined in the spec or already supported by the high-level client", + apiSpec.remove(api))); + assertThat("Some API are not supported but they should be: " + apiSpec, apiSpec.size(), equalTo(0)); + } + + private static Stream> getSubClientMethods(String namespace, Class clientClass) { + return Arrays.stream(clientClass.getMethods()).filter(method -> method.getDeclaringClass().equals(clientClass)) + .map(method -> Tuple.tuple(namespace + "." + toSnakeCase(method.getName()), method)); + } + + private static String toSnakeCase(String camelCase) { + StringBuilder snakeCaseString = new StringBuilder(); + for (Character aChar : camelCase.toCharArray()) { + if (Character.isUpperCase(aChar)) { + snakeCaseString.append('_'); + snakeCaseString.append(Character.toLowerCase(aChar)); + } else { + snakeCaseString.append(aChar); + } + } + return snakeCaseString.toString(); + } + private static class TrackingActionListener implements ActionListener { private final AtomicInteger statusCode = new AtomicInteger(-1); private final AtomicReference exception = new AtomicReference<>(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index ce9091a91ff..9c9c5425f00 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -597,7 +597,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { } searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)), - highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync); + highLevelClient()::scroll, highLevelClient()::scrollAsync); assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); @@ -606,7 +606,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { } searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)), - highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync); + highLevelClient()::scroll, highLevelClient()::scrollAsync); assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(30)); @@ -623,7 +623,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { SearchScrollRequest scrollRequest = new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)); ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> execute(scrollRequest, - highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync)); + highLevelClient()::scroll, highLevelClient()::scrollAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); assertThat(exception.getRootCause(), instanceOf(ElasticsearchException.class)); ElasticsearchException rootCause = (ElasticsearchException) exception.getRootCause(); @@ -644,7 +644,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { multiSearchRequest.add(searchRequest3); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -686,7 +686,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { multiSearchRequest.add(searchRequest3); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -734,7 +734,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { multiSearchRequest.add(searchRequest3); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -759,7 +759,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { searchRequest1.source().highlighter(new HighlightBuilder().field("field")); searchRequest2.source().highlighter(new HighlightBuilder().field("field")); searchRequest3.source().highlighter(new HighlightBuilder().field("field")); - multiSearchResponse = execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + multiSearchResponse = execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -797,7 +797,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { multiSearchRequest.add(searchRequest2); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(2)); @@ -941,8 +941,8 @@ public class SearchIT extends ESRestHighLevelClientTestCase { multiSearchTemplateRequest.add(badRequest); MultiSearchTemplateResponse multiSearchTemplateResponse = - execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, - highLevelClient()::multiSearchTemplateAsync); + execute(multiSearchTemplateRequest, highLevelClient()::msearchTemplate, + highLevelClient()::msearchTemplateAsync); Item[] responses = multiSearchTemplateResponse.getResponses(); @@ -999,8 +999,8 @@ public class SearchIT extends ESRestHighLevelClientTestCase { // The whole HTTP request should fail if no nested search requests are valid ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, - () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, - highLevelClient()::multiSearchTemplateAsync)); + () -> execute(multiSearchTemplateRequest, highLevelClient()::msearchTemplate, + highLevelClient()::msearchTemplateAsync)); assertEquals(RestStatus.BAD_REQUEST, exception.status()); assertThat(exception.getMessage(), containsString("no requests added")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index 6d035f5db65..5483f055c2c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -77,8 +77,8 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { GetRepositoriesRequest request = new GetRepositoriesRequest(); request.repositories(new String[]{testRepository}); - GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepositories, - highLevelClient().snapshot()::getRepositoriesAsync); + GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepository, + highLevelClient().snapshot()::getRepositoryAsync); assertThat(1, equalTo(response.repositories().size())); } @@ -86,8 +86,8 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { assertTrue(createTestRepository("other", FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); assertTrue(createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); - GetRepositoriesResponse response = execute(new GetRepositoriesRequest(), highLevelClient().snapshot()::getRepositories, - highLevelClient().snapshot()::getRepositoriesAsync); + GetRepositoriesResponse response = execute(new GetRepositoriesRequest(), highLevelClient().snapshot()::getRepository, + highLevelClient().snapshot()::getRepositoryAsync); assertThat(2, equalTo(response.repositories().size())); } @@ -95,7 +95,7 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { String repository = "doesnotexist"; GetRepositoriesRequest request = new GetRepositoriesRequest(new String[]{repository}); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(request, - highLevelClient().snapshot()::getRepositories, highLevelClient().snapshot()::getRepositoriesAsync)); + highLevelClient().snapshot()::getRepository, highLevelClient().snapshot()::getRepositoryAsync)); assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); assertThat(exception.getMessage(), equalTo( @@ -107,8 +107,8 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase { assertTrue(createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); GetRepositoriesRequest request = new GetRepositoriesRequest(); - GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepositories, - highLevelClient().snapshot()::getRepositoriesAsync); + GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepository, + highLevelClient().snapshot()::getRepositoryAsync); assertThat(1, equalTo(response.repositories().size())); DeleteRepositoryRequest deleteRequest = new DeleteRepositoryRequest(repository); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 9dad115643c..ad41c139ddc 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -1121,7 +1121,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // end::multi-get-request-top-level-extras // tag::multi-get-execute - MultiGetResponse response = client.multiGet(request, RequestOptions.DEFAULT); + MultiGetResponse response = client.mget(request, RequestOptions.DEFAULT); // end::multi-get-execute // tag::multi-get-response @@ -1174,7 +1174,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { listener = new LatchedActionListener<>(listener, latch); // tag::multi-get-execute-async - client.multiGetAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.mgetAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::multi-get-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1185,7 +1185,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { request.add(new MultiGetRequest.Item("index", "type", "example_id") .fetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE)); // <1> // end::multi-get-request-no-source - MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request, RequestOptions.DEFAULT)); + MultiGetItemResponse item = unwrapAndAssertExample(client.mget(request, RequestOptions.DEFAULT)); assertNull(item.getResponse().getSource()); } { @@ -1198,7 +1198,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { request.add(new MultiGetRequest.Item("index", "type", "example_id") .fetchSourceContext(fetchSourceContext)); // <1> // end::multi-get-request-source-include - MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request, RequestOptions.DEFAULT)); + MultiGetItemResponse item = unwrapAndAssertExample(client.mget(request, RequestOptions.DEFAULT)); assertThat(item.getResponse().getSource(), hasEntry("foo", "val1")); assertThat(item.getResponse().getSource(), hasEntry("bar", "val2")); assertThat(item.getResponse().getSource(), not(hasKey("baz"))); @@ -1213,7 +1213,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { request.add(new MultiGetRequest.Item("index", "type", "example_id") .fetchSourceContext(fetchSourceContext)); // <1> // end::multi-get-request-source-exclude - MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request, RequestOptions.DEFAULT)); + MultiGetItemResponse item = unwrapAndAssertExample(client.mget(request, RequestOptions.DEFAULT)); assertThat(item.getResponse().getSource(), not(hasKey("foo"))); assertThat(item.getResponse().getSource(), not(hasKey("bar"))); assertThat(item.getResponse().getSource(), hasEntry("baz", "val3")); @@ -1223,7 +1223,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // tag::multi-get-request-stored request.add(new MultiGetRequest.Item("index", "type", "example_id") .storedFields("foo")); // <1> - MultiGetResponse response = client.multiGet(request, RequestOptions.DEFAULT); + MultiGetResponse response = client.mget(request, RequestOptions.DEFAULT); MultiGetItemResponse item = response.getResponses()[0]; String value = item.getResponse().getField("foo").getValue(); // <2> // end::multi-get-request-stored @@ -1235,7 +1235,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { MultiGetRequest request = new MultiGetRequest(); request.add(new MultiGetRequest.Item("index", "type", "example_id") .version(1000L)); - MultiGetResponse response = client.multiGet(request, RequestOptions.DEFAULT); + MultiGetResponse response = client.mget(request, RequestOptions.DEFAULT); MultiGetItemResponse item = response.getResponses()[0]; assertNull(item.getResponse()); // <1> Exception e = item.getFailure().getFailure(); // <2> diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 23dab5b21e2..36d562c501f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -622,7 +622,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::get-mapping-request-indicesOptions // tag::get-mapping-execute - GetMappingsResponse getMappingResponse = client.indices().getMappings(request, RequestOptions.DEFAULT); + GetMappingsResponse getMappingResponse = client.indices().getMapping(request, RequestOptions.DEFAULT); // end::get-mapping-execute // tag::get-mapping-response @@ -704,7 +704,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase }); // tag::get-mapping-execute-async - client.indices().getMappingsAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.indices().getMappingAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-mapping-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1344,7 +1344,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::force-merge-request-flush // tag::force-merge-execute - ForceMergeResponse forceMergeResponse = client.indices().forceMerge(request, RequestOptions.DEFAULT); + ForceMergeResponse forceMergeResponse = client.indices().forcemerge(request, RequestOptions.DEFAULT); // end::force-merge-execute // tag::force-merge-response @@ -1369,14 +1369,14 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::force-merge-execute-listener // tag::force-merge-execute-async - client.indices().forceMergeAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.indices().forcemergeAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::force-merge-execute-async } { // tag::force-merge-notfound try { ForceMergeRequest request = new ForceMergeRequest("does_not_exist"); - client.indices().forceMerge(request, RequestOptions.DEFAULT); + client.indices().forcemerge(request, RequestOptions.DEFAULT); } catch (ElasticsearchException exception) { if (exception.status() == RestStatus.NOT_FOUND) { // <1> diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java index c53ec2b5d7c..98502e3668a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java @@ -317,7 +317,7 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase { // end::simulate-pipeline-request-verbose // tag::simulate-pipeline-execute - SimulatePipelineResponse response = client.ingest().simulatePipeline(request, RequestOptions.DEFAULT); // <1> + SimulatePipelineResponse response = client.ingest().simulate(request, RequestOptions.DEFAULT); // <1> // end::simulate-pipeline-execute // tag::simulate-pipeline-response @@ -381,7 +381,7 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase { listener = new LatchedActionListener<>(listener, latch); // tag::simulate-pipeline-execute-async - client.ingest().simulatePipelineAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.ingest().simulateAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::simulate-pipeline-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 26bb4682fd9..c60f2d4c92b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -583,7 +583,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { // tag::search-scroll2 SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId); // <1> scrollRequest.scroll(TimeValue.timeValueSeconds(30)); - SearchResponse searchScrollResponse = client.searchScroll(scrollRequest, RequestOptions.DEFAULT); + SearchResponse searchScrollResponse = client.scroll(scrollRequest, RequestOptions.DEFAULT); scrollId = searchScrollResponse.getScrollId(); // <2> hits = searchScrollResponse.getHits(); // <3> assertEquals(3, hits.getTotalHits()); @@ -612,7 +612,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { // end::scroll-request-arguments // tag::search-scroll-execute-sync - SearchResponse searchResponse = client.searchScroll(scrollRequest, RequestOptions.DEFAULT); + SearchResponse searchResponse = client.scroll(scrollRequest, RequestOptions.DEFAULT); // end::search-scroll-execute-sync assertEquals(0, searchResponse.getFailedShards()); @@ -638,7 +638,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { scrollListener = new LatchedActionListener<>(scrollListener, latch); // tag::search-scroll-execute-async - client.searchScrollAsync(scrollRequest, RequestOptions.DEFAULT, scrollListener); // <1> + client.scrollAsync(scrollRequest, RequestOptions.DEFAULT, scrollListener); // <1> // end::search-scroll-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -710,7 +710,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { while (searchHits != null && searchHits.length > 0) { // <2> SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId); // <3> scrollRequest.scroll(scroll); - searchResponse = client.searchScroll(scrollRequest, RequestOptions.DEFAULT); + searchResponse = client.scroll(scrollRequest, RequestOptions.DEFAULT); scrollId = searchResponse.getScrollId(); searchHits = searchResponse.getHits().getHits(); // <4> @@ -861,7 +861,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { // end::multi-search-template-request-inline // tag::multi-search-template-request-sync - MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT); + MultiSearchTemplateResponse multiResponse = client.msearchTemplate(multiRequest, RequestOptions.DEFAULT); // end::multi-search-template-request-sync // tag::multi-search-template-response @@ -916,7 +916,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { // tag::multi-search-template-execute - MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT); + MultiSearchTemplateResponse multiResponse = client.msearchTemplate(multiRequest, RequestOptions.DEFAULT); // end::multi-search-template-execute assertNotNull(multiResponse); @@ -944,7 +944,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { listener = new LatchedActionListener<>(listener, latch); // tag::multi-search-template-execute-async - client.multiSearchTemplateAsync(multiRequest, RequestOptions.DEFAULT, listener); + client.msearchTemplateAsync(multiRequest, RequestOptions.DEFAULT, listener); // end::multi-search-template-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1201,7 +1201,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { request.add(secondSearchRequest); // end::multi-search-request-basic // tag::multi-search-execute - MultiSearchResponse response = client.multiSearch(request, RequestOptions.DEFAULT); + MultiSearchResponse response = client.msearch(request, RequestOptions.DEFAULT); // end::multi-search-execute // tag::multi-search-response MultiSearchResponse.Item firstResponse = response.getResponses()[0]; // <1> @@ -1233,7 +1233,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { listener = new LatchedActionListener<>(listener, latch); // tag::multi-search-execute-async - client.multiSearchAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.msearchAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::multi-search-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1244,7 +1244,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { request.add(new SearchRequest("posts") // <1> .types("doc")); // <2> // end::multi-search-request-index - MultiSearchResponse response = client.multiSearch(request, RequestOptions.DEFAULT); + MultiSearchResponse response = client.msearch(request, RequestOptions.DEFAULT); MultiSearchResponse.Item firstResponse = response.getResponses()[0]; assertNull(firstResponse.getFailure()); SearchResponse searchResponse = firstResponse.getResponse(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index 68a8113af6d..fff3e7ece70 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -221,7 +221,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase // end::get-repository-request-masterTimeout // tag::get-repository-execute - GetRepositoriesResponse response = client.snapshot().getRepositories(request, RequestOptions.DEFAULT); + GetRepositoriesResponse response = client.snapshot().getRepository(request, RequestOptions.DEFAULT); // end::get-repository-execute // tag::get-repository-response @@ -256,7 +256,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase listener = new LatchedActionListener<>(listener, latch); // tag::get-repository-execute-async - client.snapshot().getRepositoriesAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.snapshot().getRepositoryAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-repository-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 29aec900cef..6bfa4de8d4a 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -171,7 +171,7 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase { assertEquals(10, response.getHits().totalHits); assertEquals(10, response.getHits().getHits().length); String scrollId = response.getScrollId(); - SearchResponse scrollResponse = restHighLevelClient.searchScroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); + SearchResponse scrollResponse = restHighLevelClient.scroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); assertSame(SearchResponse.Clusters.EMPTY, scrollResponse.getClusters()); assertEquals(10, scrollResponse.getHits().totalHits); assertEquals(0, scrollResponse.getHits().getHits().length); @@ -206,7 +206,7 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase { assertEquals(10, response.getHits().totalHits); assertEquals(10, response.getHits().getHits().length); String scrollId = response.getScrollId(); - SearchResponse scrollResponse = restHighLevelClient.searchScroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); + SearchResponse scrollResponse = restHighLevelClient.scroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); assertSame(SearchResponse.Clusters.EMPTY, scrollResponse.getClusters()); assertEquals(10, scrollResponse.getHits().totalHits); assertEquals(0, scrollResponse.getHits().getHits().length); From b655c11dbee65d5331819f04a15ace80b639cfbd Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 17 Jul 2018 10:34:45 +0100 Subject: [PATCH 057/260] =?UTF-8?q?Mute=20:qa:mixed-cluster=20indices.stat?= =?UTF-8?q?s/10=5Findex/Index=20-=20all=E2=80=99?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- qa/mixed-cluster/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index ac57d51def7..db743cb12b1 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -57,6 +57,7 @@ for (Version version : bwcVersions.wireCompatible) { tasks.getByName("${baseName}#mixedClusterTestRunner").configure { /* To support taking index snapshots, we have to set path.repo setting */ systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") + systemProperty 'tests.rest.blacklist', ['indices.stats/10_index/Index - all'].join(',') } } From b43fe560a43ee5c43bae5105b07aae811a69b1ef Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Tue, 17 Jul 2018 11:41:31 +0000 Subject: [PATCH 058/260] Updates the build to gradle 4.9 (#32087) There are fixes to the dependency report, most importantly for us, it still works even if `failOnVersionConflict` would fail the build. --- build.gradle | 2 +- .../carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy | 2 +- gradle/wrapper/gradle-wrapper.properties | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/build.gradle b/build.gradle index a75d093664f..187e2477052 100644 --- a/build.gradle +++ b/build.gradle @@ -596,7 +596,7 @@ if (System.properties.get("build.compare") != null) { } } sourceBuild { - gradleVersion = "4.8.1" // does not default to gradle weapper of project dir, but current version + gradleVersion = gradle.getGradleVersion() projectDir = referenceProject tasks = ["clean", "assemble"] arguments = ["-Dbuild.compare_friendly=true"] diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy index 24b66efbcef..d4c8f89bf50 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy @@ -74,7 +74,7 @@ class RandomizedTestingPlugin implements Plugin { // since we can't be sure if the task was ever realized, we remove both the provider and the task TaskProvider oldTestProvider try { - oldTestProvider = tasks.getByNameLater(Test, 'test') + oldTestProvider = tasks.named('test') } catch (UnknownTaskException unused) { // no test task, ok, user will use testing task on their own return diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 37e3d3699fa..94161917d18 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-4.8.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-4.9-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=ce1645ff129d11aad62dab70d63426fdce6cfd646fa309dc5dc5255dd03c7c11 +distributionSha256Sum=39e2d5803bbd5eaf6c8efe07067b0e5a00235e8c71318642b2ed262920b27721 From a7e477126f5b9c18decf24314cacd2ad5456b2d5 Mon Sep 17 00:00:00 2001 From: markharwood Date: Tue, 17 Jul 2018 13:11:10 +0100 Subject: [PATCH 059/260] Relax TermVectors API to work with textual fields other than TextFieldType (#31915) This changes the field-eligibility test to check one level up in the class hierarchy to allow any subclasses of StringFieldType. Closes #31902 --- .../index/termvectors/TermVectorsService.java | 5 +- .../termvectors/TermVectorsServiceTests.java | 54 ++++++++++++++++++- 2 files changed, 54 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index c13c56beb5a..bc77626b942 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -48,7 +48,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.search.dfs.AggregatedDfs; @@ -162,8 +162,7 @@ public class TermVectorsService { private static boolean isValidField(MappedFieldType fieldType) { // must be a string - if (fieldType instanceof KeywordFieldMapper.KeywordFieldType == false - && fieldType instanceof TextFieldMapper.TextFieldType == false) { + if (fieldType instanceof StringFieldType == false) { return false; } // and must be indexed diff --git a/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java b/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java index e5adbde71eb..034e7daaf7f 100644 --- a/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java @@ -109,12 +109,62 @@ public class TermVectorsServiceTests extends ESSingleNodeTestCase { IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); assertThat(shard, notNullValue()); - TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + assertEquals(1, response.getFields().size()); Terms terms = response.getFields().terms("text"); TermsEnum iterator = terms.iterator(); while (iterator.next() != null) { assertEquals(max, iterator.docFreq()); } - } + } + + public void testWithIndexedPhrases() throws IOException { + XContentBuilder mapping = jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("index_phrases", true) + .field("term_vector", "with_positions_offsets_payloads") + .endObject() + .endObject() + .endObject() + .endObject(); + Settings settings = Settings.builder() + .put("number_of_shards", 1) + .build(); + createIndex("test", settings, "_doc", mapping); + ensureGreen(); + + int max = between(3, 10); + BulkRequestBuilder bulk = client().prepareBulk(); + for (int i = 0; i < max; i++) { + bulk.add(client().prepareIndex("test", "_doc", Integer.toString(i)) + .setSource("text", "the quick brown fox jumped over the lazy dog")); + } + bulk.get(); + + TermVectorsRequest request = new TermVectorsRequest("test", "_doc", "0").termStatistics(true); + + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService(resolveIndex("test")); + IndexShard shard = test.getShardOrNull(0); + assertThat(shard, notNullValue()); + TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + assertEquals(2, response.getFields().size()); + + Terms terms = response.getFields().terms("text"); + TermsEnum iterator = terms.iterator(); + while (iterator.next() != null) { + assertEquals(max, iterator.docFreq()); + } + + Terms phrases = response.getFields().terms("text._index_phrase"); + TermsEnum phraseIterator = phrases.iterator(); + while (phraseIterator.next() != null) { + assertEquals(max, phraseIterator.docFreq()); + } + } } From ed3b44fb4cd6f957facf6d0f38daa08eeecd52ea Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 17 Jul 2018 14:14:02 +0200 Subject: [PATCH 060/260] Handle TokenizerFactory TODOs (#32063) * Don't replace Replace TokenizerFactory with Supplier, this approach was rejected in #32063 * Remove unused parameter from constructor --- .../analysis/common/CharGroupTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/ClassicTokenizerFactory.java | 2 +- .../analysis/common/EdgeNGramTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/KeywordTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/LetterTokenizerFactory.java | 2 +- .../analysis/common/LowerCaseTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/NGramTokenizerFactory.java | 2 +- .../analysis/common/PathHierarchyTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/PatternTokenizerFactory.java | 2 +- .../analysis/common/SimplePatternSplitTokenizerFactory.java | 2 +- .../analysis/common/SimplePatternTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/ThaiTokenizerFactory.java | 2 +- .../analysis/common/UAX29URLEmailTokenizerFactory.java | 2 +- .../analysis/common/WhitespaceTokenizerFactory.java | 2 +- .../org/elasticsearch/index/analysis/IcuTokenizerFactory.java | 2 +- .../elasticsearch/index/analysis/KuromojiTokenizerFactory.java | 2 +- .../org/elasticsearch/index/analysis/NoriTokenizerFactory.java | 2 +- .../index/analysis/SmartChineseTokenizerTokenizerFactory.java | 2 +- .../elasticsearch/index/analysis/AbstractTokenizerFactory.java | 3 +-- .../elasticsearch/index/analysis/StandardTokenizerFactory.java | 2 +- .../org/elasticsearch/index/analysis/TokenizerFactory.java | 2 +- .../xpack/ml/job/categorization/MlClassicTokenizerFactory.java | 2 +- 22 files changed, 22 insertions(+), 23 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java index d4e1e794a30..4920b7daae8 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java @@ -39,7 +39,7 @@ public class CharGroupTokenizerFactory extends AbstractTokenizerFactory{ private boolean tokenizeOnSymbol = false; public CharGroupTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); for (final String c : settings.getAsList("tokenize_on_chars")) { if (c == null || c.length() == 0) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java index e81f6b88d24..27316f4cde5 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java @@ -35,7 +35,7 @@ public class ClassicTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; ClassicTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java index 55a527cc792..9bb17abf0cd 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java @@ -36,7 +36,7 @@ public class EdgeNGramTokenizerFactory extends AbstractTokenizerFactory { private final CharMatcher matcher; EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); this.matcher = parseTokenChars(settings.getAsList("token_chars")); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java index abe88462cb9..e4bf2c8c4ad 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java @@ -31,7 +31,7 @@ public class KeywordTokenizerFactory extends AbstractTokenizerFactory { private final int bufferSize; KeywordTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); bufferSize = settings.getAsInt("buffer_size", 256); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java index be98eb73a9c..cba30cb63c3 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.analysis.AbstractTokenizerFactory; public class LetterTokenizerFactory extends AbstractTokenizerFactory { LetterTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java index 8f0c5f759aa..8c913a33cfe 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java @@ -30,7 +30,7 @@ import org.elasticsearch.index.analysis.MultiTermAwareComponent; public class LowerCaseTokenizerFactory extends AbstractTokenizerFactory implements MultiTermAwareComponent { LowerCaseTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java index b67f67cb2fa..b00797428b7 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java @@ -85,7 +85,7 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory { } NGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); int maxAllowedNgramDiff = indexSettings.getMaxNgramDiff(); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java index c877fe6944e..5b966c1c3b8 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java @@ -37,7 +37,7 @@ public class PathHierarchyTokenizerFactory extends AbstractTokenizerFactory { private final boolean reverse; PathHierarchyTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); bufferSize = settings.getAsInt("buffer_size", 1024); String delimiter = settings.get("delimiter"); if (delimiter == null) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java index f850b68ac98..11ba7e44db0 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java @@ -35,7 +35,7 @@ public class PatternTokenizerFactory extends AbstractTokenizerFactory { private final int group; PatternTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); String sPattern = settings.get("pattern", "\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/); if (sPattern == null) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java index f861ec3792f..0faf4078295 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java @@ -31,7 +31,7 @@ public class SimplePatternSplitTokenizerFactory extends AbstractTokenizerFactory private final String pattern; public SimplePatternSplitTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); pattern = settings.get("pattern", ""); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java index 6db3cfa67a3..67aee333d0f 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java @@ -31,7 +31,7 @@ public class SimplePatternTokenizerFactory extends AbstractTokenizerFactory { private final String pattern; public SimplePatternTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); pattern = settings.get("pattern", ""); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java index b76aca42d36..861ade079a0 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java @@ -32,7 +32,7 @@ import org.elasticsearch.index.analysis.AbstractTokenizerFactory; public class ThaiTokenizerFactory extends AbstractTokenizerFactory { ThaiTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java index 8040c88ea7f..cd02eec24b4 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java @@ -32,7 +32,7 @@ public class UAX29URLEmailTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; UAX29URLEmailTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java index 1f89d468813..7ce6a361cba 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java @@ -34,7 +34,7 @@ public class WhitespaceTokenizerFactory extends AbstractTokenizerFactory { private Integer maxTokenLength; WhitespaceTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt(MAX_TOKEN_LENGTH, StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java index 84c611c0f81..3f8b9296aa0 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java @@ -47,7 +47,7 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory { private static final String RULE_FILES = "rule_files"; public IcuTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); config = getIcuConfig(environment, settings); } diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java index 2f00e68a75e..e9268f73065 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java @@ -45,7 +45,7 @@ public class KuromojiTokenizerFactory extends AbstractTokenizerFactory { private boolean discartPunctuation; public KuromojiTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); mode = getMode(settings); userDictionary = getUserDictionary(env, settings); discartPunctuation = settings.getAsBoolean("discard_punctuation", true); diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java index 346cc84e5e6..9295ed95c3f 100644 --- a/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java +++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java @@ -38,7 +38,7 @@ public class NoriTokenizerFactory extends AbstractTokenizerFactory { private final KoreanTokenizer.DecompoundMode decompoundMode; public NoriTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); decompoundMode = getMode(settings); userDictionary = getUserDictionary(env, settings); } diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java index 9d387296152..560bce9db27 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java @@ -28,7 +28,7 @@ import org.elasticsearch.index.IndexSettings; public class SmartChineseTokenizerTokenizerFactory extends AbstractTokenizerFactory { public SmartChineseTokenizerTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java index bf6b2fd7c5b..4df0375f31c 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java @@ -27,8 +27,7 @@ import org.elasticsearch.index.IndexSettings; public abstract class AbstractTokenizerFactory extends AbstractIndexComponent implements TokenizerFactory { protected final Version version; - // TODO drop `String ignored` in a followup - public AbstractTokenizerFactory(IndexSettings indexSettings, String ignored, Settings settings) { + public AbstractTokenizerFactory(IndexSettings indexSettings, Settings settings) { super(indexSettings); this.version = Analysis.parseAnalysisVersion(this.indexSettings.getSettings(), settings, logger); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java index ed8d2b452c2..2e4473f3b0e 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java @@ -31,7 +31,7 @@ public class StandardTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; public StandardTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java index be96dbd6560..4abed5a62ce 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java @@ -21,6 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; -public interface TokenizerFactory { // TODO replace with Supplier +public interface TokenizerFactory { Tokenizer create(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java index 40fee1f40f1..95cba4f2dcc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java @@ -20,7 +20,7 @@ import org.elasticsearch.index.analysis.AbstractTokenizerFactory; public class MlClassicTokenizerFactory extends AbstractTokenizerFactory { public MlClassicTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override From ef81c1df57587d6e44dd750ad36926a5c8bce894 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 17 Jul 2018 09:41:34 -0400 Subject: [PATCH 061/260] Ensure to release translog snapshot in primary-replica resync (#32045) Previously we create a translog snapshot inside the resync method, and that snapshot will be closed by the resync listener. However, if the resync method throws an exception before the resync listener is initialized, the translog snapshot won't be released. Closes #32030 --- .../index/shard/PrimaryReplicaSyncer.java | 71 ++++++++++--------- .../shard/PrimaryReplicaSyncerTests.java | 28 +++++--- .../index/translog/TranslogTests.java | 18 +++++ 3 files changed, 75 insertions(+), 42 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java index b39ebd51f2b..e66d78f2e1a 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.tasks.Task; @@ -80,48 +81,25 @@ public class PrimaryReplicaSyncer extends AbstractComponent { } public void resync(final IndexShard indexShard, final ActionListener listener) { - ActionListener resyncListener = null; + Translog.Snapshot snapshot = null; try { final long startingSeqNo = indexShard.getGlobalCheckpoint() + 1; - Translog.Snapshot snapshot = indexShard.newTranslogSnapshotFromMinSeqNo(startingSeqNo); final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); - resyncListener = new ActionListener() { - @Override - public void onResponse(final ResyncTask resyncTask) { - try { - snapshot.close(); - listener.onResponse(resyncTask); - } catch (final Exception e) { - onFailure(e); - } - } - - @Override - public void onFailure(final Exception e) { - try { - snapshot.close(); - } catch (final Exception inner) { - e.addSuppressed(inner); - } finally { - listener.onFailure(e); - } - } - }; - ShardId shardId = indexShard.shardId(); - + final ShardId shardId = indexShard.shardId(); // Wrap translog snapshot to make it synchronized as it is accessed by different threads through SnapshotSender. // Even though those calls are not concurrent, snapshot.next() uses non-synchronized state and is not multi-thread-compatible // Also fail the resync early if the shard is shutting down - Translog.Snapshot wrappedSnapshot = new Translog.Snapshot() { - + snapshot = indexShard.newTranslogSnapshotFromMinSeqNo(startingSeqNo); + final Translog.Snapshot originalSnapshot = snapshot; + final Translog.Snapshot wrappedSnapshot = new Translog.Snapshot() { @Override public synchronized void close() throws IOException { - snapshot.close(); + originalSnapshot.close(); } @Override public synchronized int totalOperations() { - return snapshot.totalOperations(); + return originalSnapshot.totalOperations(); } @Override @@ -132,15 +110,40 @@ public class PrimaryReplicaSyncer extends AbstractComponent { } else { assert state == IndexShardState.STARTED : "resync should only happen on a started shard, but state was: " + state; } - return snapshot.next(); + return originalSnapshot.next(); } }; + final ActionListener resyncListener = new ActionListener() { + @Override + public void onResponse(final ResyncTask resyncTask) { + try { + wrappedSnapshot.close(); + listener.onResponse(resyncTask); + } catch (final Exception e) { + onFailure(e); + } + } + + @Override + public void onFailure(final Exception e) { + try { + wrappedSnapshot.close(); + } catch (final Exception inner) { + e.addSuppressed(inner); + } finally { + listener.onFailure(e); + } + } + }; + resync(shardId, indexShard.routingEntry().allocationId().getId(), indexShard.getPrimaryTerm(), wrappedSnapshot, startingSeqNo, maxSeqNo, resyncListener); } catch (Exception e) { - if (resyncListener != null) { - resyncListener.onFailure(e); - } else { + try { + IOUtils.close(snapshot); + } catch (IOException inner) { + e.addSuppressed(inner); + } finally { listener.onFailure(e); } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index b290f4d4559..4444f475329 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -123,12 +123,10 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase { public void testSyncerOnClosingShard() throws Exception { IndexShard shard = newStartedShard(true); AtomicBoolean syncActionCalled = new AtomicBoolean(); - CountDownLatch syncCalledLatch = new CountDownLatch(1); PrimaryReplicaSyncer.SyncAction syncAction = (request, parentTask, allocationId, primaryTerm, listener) -> { logger.info("Sending off {} operations", request.getOperations().length); syncActionCalled.set(true); - syncCalledLatch.countDown(); threadPool.generic().execute(() -> listener.onResponse(new ResyncReplicationResponse())); }; PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(Settings.EMPTY, @@ -147,13 +145,27 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase { shard.updateShardState(shard.routingEntry(), shard.getPrimaryTerm(), null, 1000L, Collections.singleton(allocationId), new IndexShardRoutingTable.Builder(shard.shardId()).addShard(shard.routingEntry()).build(), Collections.emptySet()); - PlainActionFuture fut = new PlainActionFuture<>(); - threadPool.generic().execute(() -> { - try { - syncer.resync(shard, fut); - } catch (AlreadyClosedException ace) { - fut.onFailure(ace); + CountDownLatch syncCalledLatch = new CountDownLatch(1); + PlainActionFuture fut = new PlainActionFuture() { + @Override + public void onFailure(Exception e) { + try { + super.onFailure(e); + } finally { + syncCalledLatch.countDown(); + } } + @Override + public void onResponse(PrimaryReplicaSyncer.ResyncTask result) { + try { + super.onResponse(result); + } finally { + syncCalledLatch.countDown(); + } + } + }; + threadPool.generic().execute(() -> { + syncer.resync(shard, fut); }); if (randomBoolean()) { syncCalledLatch.await(); diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index cf6e7536846..dbbb38090bc 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -2932,6 +2932,24 @@ public class TranslogTests extends ESTestCase { } } + /** Make sure that it's ok to close a translog snapshot multiple times */ + public void testCloseSnapshotTwice() throws Exception { + int numOps = between(0, 10); + for (int i = 0; i < numOps; i++) { + Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), i, primaryTerm.get(), new byte[]{1}); + translog.add(op); + if (randomBoolean()) { + translog.rollGeneration(); + } + } + for (int i = 0; i < 5; i++) { + Translog.Snapshot snapshot = translog.newSnapshot(); + assertThat(snapshot, SnapshotMatchers.size(numOps)); + snapshot.close(); + snapshot.close(); + } + } + static class SortedSnapshot implements Translog.Snapshot { private final Translog.Snapshot snapshot; private List operations = null; From 99c2a82c04872fe09e98aa461e744718b91ac3f1 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 17 Jul 2018 15:01:12 +0100 Subject: [PATCH 062/260] [ML] Move analyzer dependencies out of categorization config (#32123) The ML config classes will shortly be moved to the X-Pack protocol library to allow the ML APIs to be moved to the high level REST client. Dependencies on server functionality should be removed from the config classes before this is done. This change is entirely about moving code between packages. It does not add or remove any functionality or tests. --- .../config/CategorizationAnalyzerConfig.java | 204 +---------------- .../xpack/core/ml/job/config/Job.java | 18 +- .../xpack/ml/job/JobManager.java | 20 +- .../CategorizationAnalyzer.java | 210 +++++++++++++++++- .../CategorizationAnalyzerTests.java | 153 ++++++++++++- .../CategorizationAnalyzerConfigTests.java | 158 ------------- 6 files changed, 382 insertions(+), 381 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java index 1c2808c70ff..fd0fde76e68 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java @@ -5,14 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.job.config; -import org.apache.lucene.analysis.Analyzer; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -22,15 +16,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; -import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.CustomAnalyzer; -import org.elasticsearch.index.analysis.CustomAnalyzerProvider; -import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.rest.action.admin.indices.RestAnalyzeAction; import org.elasticsearch.xpack.core.ml.MlParserType; @@ -42,12 +27,11 @@ import java.util.List; import java.util.Map; import java.util.Objects; - /** * Configuration for the categorization analyzer. * * The syntax is a subset of what can be supplied to the {@linkplain RestAnalyzeAction _analyze endpoint}. - * To summarise, the first option is to specify the name of an out-of-the-box analyzer: + * To summarize, the first option is to specify the name of an out-of-the-box analyzer: * * "categorization_analyzer" : "standard" * @@ -66,11 +50,6 @@ import java.util.Objects; * { "type" : "pattern_replace", "pattern": "^[0-9].*" } * ] * - * - * Unfortunately there is no easy to to reuse a subset of the _analyze action implementation, so much - * of the code in this file is copied from {@link TransportAnalyzeAction}. Unfortunately the logic required here is - * not quite identical to that of {@link TransportAnalyzeAction}, and the required code is hard to partially reuse. - * TODO: consider refactoring ES core to allow more reuse. */ public class CategorizationAnalyzerConfig implements ToXContentFragment, Writeable { @@ -350,175 +329,6 @@ public class CategorizationAnalyzerConfig implements ToXContentFragment, Writeab return builder; } - /** - * Convert the config to an {@link Analyzer}. This may be a global analyzer or a newly created custom analyzer. - * In the case of a global analyzer the caller must NOT close it when they have finished with it. In the case of - * a newly created custom analyzer the caller is responsible for closing it. - * @return The first tuple member is the {@link Analyzer}; the second indicates whether the caller is responsible - * for closing it. - */ - public Tuple toAnalyzer(AnalysisRegistry analysisRegistry, Environment environment) throws IOException { - if (analyzer != null) { - Analyzer globalAnalyzer = analysisRegistry.getAnalyzer(analyzer); - if (globalAnalyzer == null) { - throw new IllegalArgumentException("Failed to find global analyzer [" + analyzer + "]"); - } - return new Tuple<>(globalAnalyzer, Boolean.FALSE); - } else { - List charFilterFactoryList = - parseCharFilterFactories(analysisRegistry, environment); - - Tuple tokenizerFactory = parseTokenizerFactory(analysisRegistry, - environment); - - List tokenFilterFactoryList = parseTokenFilterFactories(analysisRegistry, - environment, tokenizerFactory, charFilterFactoryList); - - return new Tuple<>(new CustomAnalyzer(tokenizerFactory.v1(), tokenizerFactory.v2(), - charFilterFactoryList.toArray(new CharFilterFactory[charFilterFactoryList.size()]), - tokenFilterFactoryList.toArray(new TokenFilterFactory[tokenFilterFactoryList.size()])), Boolean.TRUE); - } - } - - - /** - * Get char filter factories for each configured char filter. Each configuration - * element can be the name of an out-of-the-box char filter, or a custom definition. - */ - private List parseCharFilterFactories(AnalysisRegistry analysisRegistry, - Environment environment) throws IOException { - final List charFilterFactoryList = new ArrayList<>(); - for (NameOrDefinition charFilter : charFilters) { - final CharFilterFactory charFilterFactory; - if (charFilter.name != null) { - AnalysisModule.AnalysisProvider charFilterFactoryFactory = - analysisRegistry.getCharFilterProvider(charFilter.name); - if (charFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global char filter under [" + charFilter.name + "]"); - } - charFilterFactory = charFilterFactoryFactory.get(environment, charFilter.name); - } else { - String charFilterTypeName = charFilter.definition.get("type"); - if (charFilterTypeName == null) { - throw new IllegalArgumentException("Missing [type] setting for char filter: " + charFilter.definition); - } - AnalysisModule.AnalysisProvider charFilterFactoryFactory = - analysisRegistry.getCharFilterProvider(charFilterTypeName); - if (charFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global char filter under [" + charFilterTypeName + "]"); - } - Settings settings = augmentSettings(charFilter.definition); - // Need to set anonymous "name" of char_filter - charFilterFactory = charFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, - "_anonymous_charfilter", settings); - } - if (charFilterFactory == null) { - throw new IllegalArgumentException("Failed to find char filter [" + charFilter + "]"); - } - charFilterFactoryList.add(charFilterFactory); - } - return charFilterFactoryList; - } - - /** - * Get the tokenizer factory for the configured tokenizer. The configuration - * can be the name of an out-of-the-box tokenizer, or a custom definition. - */ - private Tuple parseTokenizerFactory(AnalysisRegistry analysisRegistry, - Environment environment) throws IOException { - final String name; - final TokenizerFactory tokenizerFactory; - if (tokenizer.name != null) { - name = tokenizer.name; - AnalysisModule.AnalysisProvider tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(name); - if (tokenizerFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global tokenizer under [" + name + "]"); - } - tokenizerFactory = tokenizerFactoryFactory.get(environment, name); - } else { - String tokenizerTypeName = tokenizer.definition.get("type"); - if (tokenizerTypeName == null) { - throw new IllegalArgumentException("Missing [type] setting for tokenizer: " + tokenizer.definition); - } - AnalysisModule.AnalysisProvider tokenizerFactoryFactory = - analysisRegistry.getTokenizerProvider(tokenizerTypeName); - if (tokenizerFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global tokenizer under [" + tokenizerTypeName + "]"); - } - Settings settings = augmentSettings(tokenizer.definition); - // Need to set anonymous "name" of tokenizer - name = "_anonymous_tokenizer"; - tokenizerFactory = tokenizerFactoryFactory.get(buildDummyIndexSettings(settings), environment, name, settings); - } - return new Tuple<>(name, tokenizerFactory); - } - - /** - * Get token filter factories for each configured token filter. Each configuration - * element can be the name of an out-of-the-box token filter, or a custom definition. - */ - private List parseTokenFilterFactories(AnalysisRegistry analysisRegistry, Environment environment, - Tuple tokenizerFactory, - List charFilterFactoryList) throws IOException { - final List tokenFilterFactoryList = new ArrayList<>(); - for (NameOrDefinition tokenFilter : tokenFilters) { - TokenFilterFactory tokenFilterFactory; - if (tokenFilter.name != null) { - AnalysisModule.AnalysisProvider tokenFilterFactoryFactory; - tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name); - if (tokenFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global token filter under [" + tokenFilter.name + "]"); - } - tokenFilterFactory = tokenFilterFactoryFactory.get(environment, tokenFilter.name); - } else { - String filterTypeName = tokenFilter.definition.get("type"); - if (filterTypeName == null) { - throw new IllegalArgumentException("Missing [type] setting for token filter: " + tokenFilter.definition); - } - AnalysisModule.AnalysisProvider tokenFilterFactoryFactory = - analysisRegistry.getTokenFilterProvider(filterTypeName); - if (tokenFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global token filter under [" + filterTypeName + "]"); - } - Settings settings = augmentSettings(tokenFilter.definition); - // Need to set anonymous "name" of token_filter - tokenFilterFactory = tokenFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, - "_anonymous_tokenfilter", settings); - tokenFilterFactory = CustomAnalyzerProvider.checkAndApplySynonymFilter(tokenFilterFactory, tokenizerFactory.v1(), - tokenizerFactory.v2(), tokenFilterFactoryList, charFilterFactoryList, environment); - } - if (tokenFilterFactory == null) { - throw new IllegalArgumentException("Failed to find or create token filter [" + tokenFilter + "]"); - } - tokenFilterFactoryList.add(tokenFilterFactory); - } - return tokenFilterFactoryList; - } - - /** - * The Elasticsearch analysis functionality is designed to work with indices. For - * categorization we have to pretend we've got some index settings. - */ - private IndexSettings buildDummyIndexSettings(Settings settings) { - IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build(); - return new IndexSettings(metaData, Settings.EMPTY); - } - - /** - * The behaviour of Elasticsearch analyzers can vary between versions. - * For categorization we'll always use the latest version of the text analysis. - * The other settings are just to stop classes that expect to be associated with - * an index from complaining. - */ - private Settings augmentSettings(Settings settings) { - return Settings.builder().put(settings) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - .build(); - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -609,17 +419,5 @@ public class CategorizationAnalyzerConfig implements ToXContentFragment, Writeab } return new CategorizationAnalyzerConfig(analyzer, charFilters, tokenizer, tokenFilters); } - - /** - * Verify that the builder will build a valid config. This is not done as part of the basic build - * because it verifies that the names of analyzers/tokenizers/filters referenced by the config are - * known, and the validity of these names could change over time. - */ - public void verify(AnalysisRegistry analysisRegistry, Environment environment) throws IOException { - Tuple tuple = build().toAnalyzer(analysisRegistry, environment); - if (tuple.v2()) { - tuple.v1().close(); - } - } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index c8290521f98..560bac895fa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -21,8 +21,6 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; @@ -809,8 +807,8 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO return this; } - public AnalysisLimits getAnalysisLimits() { - return analysisLimits; + public AnalysisConfig getAnalysisConfig() { + return analysisConfig; } public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { @@ -1135,18 +1133,6 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB); } - /** - * Validate the char filter/tokenizer/token filter names used in the categorization analyzer config (if any). - * The overall structure can be validated at parse time, but the exact names need to be checked separately, - * as plugins that provide the functionality can be installed/uninstalled. - */ - public void validateCategorizationAnalyzer(AnalysisRegistry analysisRegistry, Environment environment) throws IOException { - CategorizationAnalyzerConfig categorizationAnalyzerConfig = analysisConfig.getCategorizationAnalyzerConfig(); - if (categorizationAnalyzerConfig != null) { - new CategorizationAnalyzerConfig.Builder(categorizationAnalyzerConfig).verify(analysisRegistry, environment); - } - } - private void validateGroups() { for (String group : this.groups) { if (MlStrings.isValidId(group) == false) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index c3d31ae10e9..f7fab029c88 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; +import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; @@ -50,6 +51,7 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeSta import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.process.autodetect.UpdateParams; @@ -170,6 +172,22 @@ public class JobManager extends AbstractComponent { return MlMetadata.getJobState(jobId, tasks); } + /** + * Validate the char filter/tokenizer/token filter names used in the categorization analyzer config (if any). + * This validation has to be done server-side; it cannot be done in a client as that won't have loaded the + * appropriate analysis modules/plugins. + * The overall structure can be validated at parse time, but the exact names need to be checked separately, + * as plugins that provide the functionality can be installed/uninstalled. + */ + static void validateCategorizationAnalyzer(Job.Builder jobBuilder, AnalysisRegistry analysisRegistry, Environment environment) + throws IOException { + CategorizationAnalyzerConfig categorizationAnalyzerConfig = jobBuilder.getAnalysisConfig().getCategorizationAnalyzerConfig(); + if (categorizationAnalyzerConfig != null) { + CategorizationAnalyzer.verifyConfigBuilder(new CategorizationAnalyzerConfig.Builder(categorizationAnalyzerConfig), + analysisRegistry, environment); + } + } + /** * Stores a job in the cluster state */ @@ -177,7 +195,7 @@ public class JobManager extends AbstractComponent { ActionListener actionListener) throws IOException { request.getJobBuilder().validateAnalysisLimitsAndSetDefaults(maxModelMemoryLimit); - request.getJobBuilder().validateCategorizationAnalyzer(analysisRegistry, environment); + validateCategorizationAnalyzer(request.getJobBuilder(), analysisRegistry, environment); Job job = request.getJobBuilder().build(new Date()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java index a0101b999d5..6111fa139f9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java @@ -9,9 +9,21 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.CustomAnalyzerProvider; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import java.io.Closeable; @@ -19,11 +31,16 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; - /** * The categorization analyzer. * * Converts messages to lists of tokens that will be fed to the ML categorization algorithm. + * + * The code in {@link #makeAnalyzer} and the methods it calls is largely copied from {@link TransportAnalyzeAction}. + * Unfortunately there is no easy way to reuse a subset of the _analyze action implementation, as the + * logic required here is not quite identical to that of {@link TransportAnalyzeAction}, and the required code is + * hard to partially reuse. + * TODO: consider refactoring ES core to allow more reuse. */ public class CategorizationAnalyzer implements Closeable { @@ -33,7 +50,7 @@ public class CategorizationAnalyzer implements Closeable { public CategorizationAnalyzer(AnalysisRegistry analysisRegistry, Environment environment, CategorizationAnalyzerConfig categorizationAnalyzerConfig) throws IOException { - Tuple tuple = categorizationAnalyzerConfig.toAnalyzer(analysisRegistry, environment); + Tuple tuple = makeAnalyzer(categorizationAnalyzerConfig, analysisRegistry, environment); analyzer = tuple.v1(); closeAnalyzer = tuple.v2(); } @@ -69,4 +86,193 @@ public class CategorizationAnalyzer implements Closeable { } return tokens; } + + /** + * Verify that the config builder will build a valid config. This is not done as part of the basic build + * because it verifies that the names of analyzers/tokenizers/filters referenced by the config are + * known, and the validity of these names could change over time. Additionally, it has to be done + * server-side rather than client-side, as the client will not have loaded the appropriate analysis + * modules/plugins. + */ + public static void verifyConfigBuilder(CategorizationAnalyzerConfig.Builder configBuilder, AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { + Tuple tuple = makeAnalyzer(configBuilder.build(), analysisRegistry, environment); + if (tuple.v2()) { + tuple.v1().close(); + } + } + + /** + * Convert a config to an {@link Analyzer}. This may be a global analyzer or a newly created custom analyzer. + * In the case of a global analyzer the caller must NOT close it when they have finished with it. In the case of + * a newly created custom analyzer the caller is responsible for closing it. + * @return The first tuple member is the {@link Analyzer}; the second indicates whether the caller is responsible + * for closing it. + */ + private static Tuple makeAnalyzer(CategorizationAnalyzerConfig config, AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { + String analyzer = config.getAnalyzer(); + if (analyzer != null) { + Analyzer globalAnalyzer = analysisRegistry.getAnalyzer(analyzer); + if (globalAnalyzer == null) { + throw new IllegalArgumentException("Failed to find global analyzer [" + analyzer + "]"); + } + return new Tuple<>(globalAnalyzer, Boolean.FALSE); + } else { + List charFilterFactoryList = parseCharFilterFactories(config, analysisRegistry, environment); + + Tuple tokenizerFactory = parseTokenizerFactory(config, analysisRegistry, environment); + + List tokenFilterFactoryList = parseTokenFilterFactories(config, analysisRegistry, environment, + tokenizerFactory, charFilterFactoryList); + + return new Tuple<>(new CustomAnalyzer(tokenizerFactory.v1(), tokenizerFactory.v2(), + charFilterFactoryList.toArray(new CharFilterFactory[charFilterFactoryList.size()]), + tokenFilterFactoryList.toArray(new TokenFilterFactory[tokenFilterFactoryList.size()])), Boolean.TRUE); + } + } + + + /** + * Get char filter factories for each configured char filter. Each configuration + * element can be the name of an out-of-the-box char filter, or a custom definition. + */ + private static List parseCharFilterFactories(CategorizationAnalyzerConfig config, AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { + List charFilters = config.getCharFilters(); + final List charFilterFactoryList = new ArrayList<>(); + for (CategorizationAnalyzerConfig.NameOrDefinition charFilter : charFilters) { + final CharFilterFactory charFilterFactory; + if (charFilter.name != null) { + AnalysisModule.AnalysisProvider charFilterFactoryFactory = + analysisRegistry.getCharFilterProvider(charFilter.name); + if (charFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global char filter under [" + charFilter.name + "]"); + } + charFilterFactory = charFilterFactoryFactory.get(environment, charFilter.name); + } else { + String charFilterTypeName = charFilter.definition.get("type"); + if (charFilterTypeName == null) { + throw new IllegalArgumentException("Missing [type] setting for char filter: " + charFilter.definition); + } + AnalysisModule.AnalysisProvider charFilterFactoryFactory = + analysisRegistry.getCharFilterProvider(charFilterTypeName); + if (charFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global char filter under [" + charFilterTypeName + "]"); + } + Settings settings = augmentSettings(charFilter.definition); + // Need to set anonymous "name" of char_filter + charFilterFactory = charFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, "_anonymous_charfilter", + settings); + } + if (charFilterFactory == null) { + throw new IllegalArgumentException("Failed to find char filter [" + charFilter + "]"); + } + charFilterFactoryList.add(charFilterFactory); + } + return charFilterFactoryList; + } + + /** + * Get the tokenizer factory for the configured tokenizer. The configuration + * can be the name of an out-of-the-box tokenizer, or a custom definition. + */ + private static Tuple parseTokenizerFactory(CategorizationAnalyzerConfig config, + AnalysisRegistry analysisRegistry, Environment environment) + throws IOException { + CategorizationAnalyzerConfig.NameOrDefinition tokenizer = config.getTokenizer(); + final String name; + final TokenizerFactory tokenizerFactory; + if (tokenizer.name != null) { + name = tokenizer.name; + AnalysisModule.AnalysisProvider tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(name); + if (tokenizerFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global tokenizer under [" + name + "]"); + } + tokenizerFactory = tokenizerFactoryFactory.get(environment, name); + } else { + String tokenizerTypeName = tokenizer.definition.get("type"); + if (tokenizerTypeName == null) { + throw new IllegalArgumentException("Missing [type] setting for tokenizer: " + tokenizer.definition); + } + AnalysisModule.AnalysisProvider tokenizerFactoryFactory = + analysisRegistry.getTokenizerProvider(tokenizerTypeName); + if (tokenizerFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global tokenizer under [" + tokenizerTypeName + "]"); + } + Settings settings = augmentSettings(tokenizer.definition); + // Need to set anonymous "name" of tokenizer + name = "_anonymous_tokenizer"; + tokenizerFactory = tokenizerFactoryFactory.get(buildDummyIndexSettings(settings), environment, name, settings); + } + return new Tuple<>(name, tokenizerFactory); + } + + /** + * Get token filter factories for each configured token filter. Each configuration + * element can be the name of an out-of-the-box token filter, or a custom definition. + */ + private static List parseTokenFilterFactories(CategorizationAnalyzerConfig config, + AnalysisRegistry analysisRegistry, Environment environment, + Tuple tokenizerFactory, + List charFilterFactoryList) throws IOException { + List tokenFilters = config.getTokenFilters(); + final List tokenFilterFactoryList = new ArrayList<>(); + for (CategorizationAnalyzerConfig.NameOrDefinition tokenFilter : tokenFilters) { + TokenFilterFactory tokenFilterFactory; + if (tokenFilter.name != null) { + AnalysisModule.AnalysisProvider tokenFilterFactoryFactory; + tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name); + if (tokenFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global token filter under [" + tokenFilter.name + "]"); + } + tokenFilterFactory = tokenFilterFactoryFactory.get(environment, tokenFilter.name); + } else { + String filterTypeName = tokenFilter.definition.get("type"); + if (filterTypeName == null) { + throw new IllegalArgumentException("Missing [type] setting for token filter: " + tokenFilter.definition); + } + AnalysisModule.AnalysisProvider tokenFilterFactoryFactory = + analysisRegistry.getTokenFilterProvider(filterTypeName); + if (tokenFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global token filter under [" + filterTypeName + "]"); + } + Settings settings = augmentSettings(tokenFilter.definition); + // Need to set anonymous "name" of token_filter + tokenFilterFactory = tokenFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, "_anonymous_tokenfilter", + settings); + tokenFilterFactory = CustomAnalyzerProvider.checkAndApplySynonymFilter(tokenFilterFactory, tokenizerFactory.v1(), + tokenizerFactory.v2(), tokenFilterFactoryList, charFilterFactoryList, environment); + } + if (tokenFilterFactory == null) { + throw new IllegalArgumentException("Failed to find or create token filter [" + tokenFilter + "]"); + } + tokenFilterFactoryList.add(tokenFilterFactory); + } + return tokenFilterFactoryList; + } + + /** + * The Elasticsearch analysis functionality is designed to work with indices. For + * categorization we have to pretend we've got some index settings. + */ + private static IndexSettings buildDummyIndexSettings(Settings settings) { + IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build(); + return new IndexSettings(metaData, Settings.EMPTY); + } + + /** + * The behaviour of Elasticsearch analyzers can vary between versions. + * For categorization we'll always use the latest version of the text analysis. + * The other settings are just to stop classes that expect to be associated with + * an index from complaining. + */ + private static Settings augmentSettings(Settings settings) { + return Settings.builder().put(settings) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .build(); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java index 9f03952165c..59413f6a618 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java @@ -22,7 +22,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; - public class CategorizationAnalyzerTests extends ESTestCase { private AnalysisRegistry analysisRegistry; @@ -41,6 +40,158 @@ public class CategorizationAnalyzerTests extends ESTestCase { analysisRegistry = buildTestAnalysisRegistry(environment); } + public void testVerifyConfigBuilder_GivenNoConfig() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenDefault() throws IOException { + CategorizationAnalyzerConfig defaultConfig = CategorizationAnalyzerConfig.buildDefaultCategorizationAnalyzer(null); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(defaultConfig); + CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment); + } + + public void testVerifyConfigBuilder_GivenValidAnalyzer() throws IOException { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("standard"); + CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment); + } + + public void testVerifyConfigBuilder_GivenInvalidAnalyzer() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("does not exist"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global analyzer [does not exist]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenValidCustomConfig() throws IOException { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); + ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); + ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter(ignoreStuffThatBeginsWithADigit) + .addTokenFilter("snowball"); + CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithInvalidCharFilter() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter("wrong!") + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global char filter under [wrong!]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithMisconfiguredCharFilter() { + Map noPattern = new HashMap<>(); + noPattern.put("type", "pattern_replace"); + noPattern.put("attern", "should have been pattern"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(noPattern) + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("pattern is missing for [_anonymous_charfilter] char filter of type 'pattern_replace'", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithInvalidTokenizer() { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .setTokenizer("oops!") + .addTokenFilter("lowercase") + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global tokenizer under [oops!]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenNoTokenizer() { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); + ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); + ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .addTokenFilter("lowercase") + .addTokenFilter(ignoreStuffThatBeginsWithADigit) + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithInvalidTokenFilter() { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter("oh dear!"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global token filter under [oh dear!]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithMisconfiguredTokenFilter() { + Map noPattern = new HashMap<>(); + noPattern.put("type", "pattern_replace"); + noPattern.put("attern", "should have been pattern"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter("html_strip") + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter(noPattern); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("pattern is missing for [_anonymous_tokenfilter] token filter of type 'pattern_replace'", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenAnalyzerAndCharFilter() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .setAnalyzer("standard") + .addCharFilter("html_strip"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [char_filter] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenAnalyzerAndTokenizer() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .setAnalyzer("standard") + .setTokenizer("classic"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [tokenizer] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenAnalyzerAndTokenFilter() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .setAnalyzer("standard") + .addTokenFilter("lowercase"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [filter] field", e.getMessage()); + } + // The default categorization analyzer matches what the analyzer in the ML C++ does public void testDefaultCategorizationAnalyzer() throws IOException { CategorizationAnalyzerConfig defaultConfig = CategorizationAnalyzerConfig.buildDefaultCategorizationAnalyzer(null); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java index 9c725fe7629..2fe2c0b334c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java @@ -6,175 +6,17 @@ package org.elasticsearch.xpack.ml.job.config; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import org.elasticsearch.xpack.core.ml.MlParserType; -import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests; -import org.junit.Before; import java.io.IOException; import java.util.HashMap; import java.util.Map; - public class CategorizationAnalyzerConfigTests extends AbstractSerializingTestCase { - private AnalysisRegistry analysisRegistry; - private Environment environment; - - @Before - public void setup() throws Exception { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); - environment = TestEnvironment.newEnvironment(settings); - analysisRegistry = CategorizationAnalyzerTests.buildTestAnalysisRegistry(environment); - } - - public void testVerify_GivenNoConfig() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); - } - - public void testVerify_GivenDefault() throws IOException { - CategorizationAnalyzerConfig defaultConfig = CategorizationAnalyzerConfig.buildDefaultCategorizationAnalyzer(null); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(defaultConfig); - builder.verify(analysisRegistry, environment); - } - - public void testVerify_GivenValidAnalyzer() throws IOException { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("standard"); - builder.verify(analysisRegistry, environment); - } - - public void testVerify_GivenInvalidAnalyzer() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("does not exist"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global analyzer [does not exist]", e.getMessage()); - } - - public void testVerify_GivenValidCustomConfig() throws IOException { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); - ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); - ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter(ignoreStuffThatBeginsWithADigit) - .addTokenFilter("snowball"); - builder.verify(analysisRegistry, environment); - } - - public void testVerify_GivenCustomConfigWithInvalidCharFilter() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter("wrong!") - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global char filter under [wrong!]", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithMisconfiguredCharFilter() { - Map noPattern = new HashMap<>(); - noPattern.put("type", "pattern_replace"); - noPattern.put("attern", "should have been pattern"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(noPattern) - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("pattern is missing for [_anonymous_charfilter] char filter of type 'pattern_replace'", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithInvalidTokenizer() { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .setTokenizer("oops!") - .addTokenFilter("lowercase") - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global tokenizer under [oops!]", e.getMessage()); - } - - public void testVerify_GivenNoTokenizer() { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); - ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); - ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .addTokenFilter("lowercase") - .addTokenFilter(ignoreStuffThatBeginsWithADigit) - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithInvalidTokenFilter() { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter("oh dear!"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global token filter under [oh dear!]", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithMisconfiguredTokenFilter() { - Map noPattern = new HashMap<>(); - noPattern.put("type", "pattern_replace"); - noPattern.put("attern", "should have been pattern"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter("html_strip") - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter(noPattern); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("pattern is missing for [_anonymous_tokenfilter] token filter of type 'pattern_replace'", e.getMessage()); - } - - public void testVerify_GivenAnalyzerAndCharFilter() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .setAnalyzer("standard") - .addCharFilter("html_strip"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [char_filter] field", e.getMessage()); - } - - public void testVerify_GivenAnalyzerAndTokenizer() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .setAnalyzer("standard") - .setTokenizer("classic"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [tokenizer] field", e.getMessage()); - } - - public void testVerify_GivenAnalyzerAndTokenFilter() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .setAnalyzer("standard") - .addTokenFilter("lowercase"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [filter] field", e.getMessage()); - } - @Override protected CategorizationAnalyzerConfig createTestInstance() { return createRandomized().build(); From 2bd40a79276f1e73ebfdda8c0c28a26800b4e349 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 17 Jul 2018 16:21:53 +0100 Subject: [PATCH 063/260] [ML] Wait for aliases in multi-node tests (#32086) --- .../xpack/ml/integration/MlJobIT.java | 62 ++++++++++++------- .../integration/RestoreModelSnapshotIT.java | 12 ++-- 2 files changed, 48 insertions(+), 26 deletions(-) diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 7820cbc06f5..07529acdb88 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.integration; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; @@ -185,23 +186,32 @@ public class MlJobIT extends ESRestTestCase { + "anomaly_detectors/" + jobId2, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON)); assertEquals(200, response.getStatusLine().getStatusCode()); - response = client().performRequest("get", "_aliases"); - assertEquals(200, response.getStatusLine().getStatusCode()); - String responseAsString = responseEntityToString(response); + // With security enabled GET _aliases throws an index_not_found_exception + // if no aliases have been created. In multi-node tests the alias may not + // appear immediately so wait here. + assertBusy(() -> { + try { + Response aliasesResponse = client().performRequest("get", "_aliases"); + assertEquals(200, aliasesResponse.getStatusLine().getStatusCode()); + String responseAsString = responseEntityToString(aliasesResponse); + assertThat(responseAsString, + containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName("custom-" + indexName) + "\":{\"aliases\":{")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId1 + "\",\"boost\":1.0}}}}")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId1) + "\":{}")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId2) + + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId2 + "\",\"boost\":1.0}}}}")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId2) + "\":{}")); + } catch (ResponseException e) { + throw new AssertionError(e); + } + }); + Response indicesResponse = client().performRequest("get", "_cat/indices"); + assertEquals(200, indicesResponse.getStatusLine().getStatusCode()); + String responseAsString = responseEntityToString(indicesResponse); assertThat(responseAsString, - containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName("custom-" + indexName) + "\":{\"aliases\":{")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) - + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId1 + "\",\"boost\":1.0}}}}")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId1) + "\":{}")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId2) - + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId2 + "\",\"boost\":1.0}}}}")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId2) + "\":{}")); - - response = client().performRequest("get", "_cat/indices"); - assertEquals(200, response.getStatusLine().getStatusCode()); - responseAsString = responseEntityToString(response); - assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); + containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)))); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)))); @@ -438,7 +448,6 @@ public class MlJobIT extends ESRestTestCase { client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32034") public void testDeleteJobAfterMissingAliases() throws Exception { String jobId = "delete-job-after-missing-alias-job"; String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); @@ -446,15 +455,24 @@ public class MlJobIT extends ESRestTestCase { String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); - Response response = client().performRequest("get", "_cat/aliases"); - assertEquals(200, response.getStatusLine().getStatusCode()); - String responseAsString = responseEntityToString(response); - assertThat(responseAsString, containsString(readAliasName)); - assertThat(responseAsString, containsString(writeAliasName)); + // With security enabled cat aliases throws an index_not_found_exception + // if no aliases have been created. In multi-node tests the alias may not + // appear immediately so wait here. + assertBusy(() -> { + try { + Response aliasesResponse = client().performRequest(new Request("get", "_cat/aliases")); + assertEquals(200, aliasesResponse.getStatusLine().getStatusCode()); + String responseAsString = responseEntityToString(aliasesResponse); + assertThat(responseAsString, containsString(readAliasName)); + assertThat(responseAsString, containsString(writeAliasName)); + } catch (ResponseException e) { + throw new AssertionError(e); + } + }); // Manually delete the aliases so that we can test that deletion proceeds // normally anyway - response = client().performRequest("delete", indexName + "/_alias/" + readAliasName); + Response response = client().performRequest("delete", indexName + "/_alias/" + readAliasName); assertEquals(200, response.getStatusLine().getStatusCode()); response = client().performRequest("delete", indexName + "/_alias/" + writeAliasName); assertEquals(200, response.getStatusLine().getStatusCode()); diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java index 9ff80bc739b..d7a2b857bf3 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java @@ -72,10 +72,14 @@ public class RestoreModelSnapshotIT extends MlNativeAutodetectIntegTestCase { openJob(job.getId()); String forecastId = forecast(job.getId(), TimeValue.timeValueHours(3), null); waitForecastToFinish(job.getId(), forecastId); - ForecastRequestStats forecastStats = getForecastStats(job.getId(), forecastId); - assertThat(forecastStats.getMessages(), anyOf(nullValue(), empty())); - assertThat(forecastStats.getMemoryUsage(), greaterThan(0L)); - assertEquals(forecastStats.getRecordCount(), 3L); + // In a multi-node cluster the replica may not be up to date + // so wait for the change + assertBusy(() -> { + ForecastRequestStats forecastStats = getForecastStats(job.getId(), forecastId); + assertThat(forecastStats.getMessages(), anyOf(nullValue(), empty())); + assertThat(forecastStats.getMemoryUsage(), greaterThan(0L)); + assertThat(forecastStats.getRecordCount(), equalTo(3L)); + }); closeJob(job.getId()); From efb4e97cfb5dc35da640d8e2b8d249b11db54c1e Mon Sep 17 00:00:00 2001 From: aptxx Date: Tue, 17 Jul 2018 23:42:24 +0800 Subject: [PATCH 064/260] Docs: Fix missing example script quote (#32010) --- .../client/documentation/ReindexDocumentationIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java index 0766560a849..93c785e754a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java @@ -96,7 +96,7 @@ public class ReindexDocumentationIT extends ESIntegTestCase { updateByQuery.source("source_index") .script(new Script( ScriptType.INLINE, - "if (ctx._source.awesome == 'absolutely) {" + "if (ctx._source.awesome == 'absolutely') {" + " ctx.op='noop'" + "} else if (ctx._source.awesome == 'lame') {" + " ctx.op='delete'" From 0f7d2044c321d49c2e1ed71273d4f39fc7cfeb9a Mon Sep 17 00:00:00 2001 From: Andy Bristol Date: Tue, 17 Jul 2018 09:15:11 -0700 Subject: [PATCH 065/260] Re-disable packaging tests on suse boxes This reverts commit 14d7e2c7b2d3761a361edd720f98353f856936a4. --- .../gradle/vagrant/VagrantTestPlugin.groovy | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index d4d1d857e90..de3c0dfc328 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -526,7 +526,11 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(batsPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - packagingTest.dependsOn(batsPackagingTest) + // these tests are temporarily disabled for suse boxes while we debug an issue + // https://github.com/elastic/elasticsearch/issues/30295 + if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { + packagingTest.dependsOn(batsPackagingTest) + } } } @@ -565,7 +569,11 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(javaPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - packagingTest.dependsOn(javaPackagingTest) + // these tests are temporarily disabled for suse boxes while we debug an issue + // https://github.com/elastic/elasticsearch/issues/30295 + if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { + packagingTest.dependsOn(javaPackagingTest) + } } /* From 9cdbd1bd85e92181ee258b9c874be985fcc61de5 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 17 Jul 2018 20:25:38 +0300 Subject: [PATCH 066/260] Remove empty @param from Javadoc --- .../test/java/org/elasticsearch/test/SecuritySettingsSource.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index 7d329781fad..56d5fec3f20 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -298,7 +298,6 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas * Returns the SSL related configuration settings given the location of a key and certificate and the location * of the PEM certificates to be trusted * - * @param builder * @param keyPath The path to the Private key to be used for SSL * @param password The password with which the private key is protected * @param certificatePath The path to the PEM formatted Certificate encapsulating the public key that corresponds From 1c63eb108164af85a4006650f9c9753f64eb5622 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Tue, 17 Jul 2018 10:33:38 -0700 Subject: [PATCH 067/260] Painless: Fix Bug with Duplicate PainlessClasses (#32110) When building the PainlessMethods and PainlessFields they stored a reference to a PainlessClass. This reference was prior to "freezing" the PainlessClass so the data was both incomplete and mutable. This has been replaced with a target java class instead since the PainlessClass is accessible through a java class now and it requires no special modifications to get around a chicken and egg issue. --- .../java/org/elasticsearch/painless/Def.java | 4 +- .../elasticsearch/painless/FunctionRef.java | 8 ++-- .../painless/lookup/PainlessField.java | 6 +-- .../lookup/PainlessLookupBuilder.java | 14 +++---- .../lookup/PainlessLookupUtility.java | 6 +-- .../painless/lookup/PainlessMethod.java | 15 +++---- .../painless/node/EListInit.java | 3 +- .../elasticsearch/painless/node/EMapInit.java | 3 +- .../elasticsearch/painless/node/ENewObj.java | 3 +- .../painless/node/PSubField.java | 13 +++--- .../painless/PainlessDocGenerator.java | 42 ++++++++++--------- 11 files changed, 62 insertions(+), 55 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 78db712d183..fe11ff4814b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -334,8 +334,8 @@ public final class Def { } int arity = interfaceMethod.arguments.size(); PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity); - return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType, implMethod.owner.name, - implMethod.name, receiverClass); + return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType, + PainlessLookupUtility.anyTypeToPainlessTypeName(implMethod.target), implMethod.name, receiverClass); } /** Returns a method handle to an implementation of clazz, given method reference signature. */ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 9e72dc2c835..925359fabc5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -102,22 +102,22 @@ public class FunctionRef { interfaceMethodType = interfaceMethod.getMethodType().dropParameterTypes(0, 1); // the Painless$Script class can be inferred if owner is null - if (delegateMethod.owner == null) { + if (delegateMethod.target == null) { delegateClassName = CLASS_NAME; isDelegateInterface = false; } else if (delegateMethod.augmentation != null) { delegateClassName = delegateMethod.augmentation.getName(); isDelegateInterface = delegateMethod.augmentation.isInterface(); } else { - delegateClassName = delegateMethod.owner.clazz.getName(); - isDelegateInterface = delegateMethod.owner.clazz.isInterface(); + delegateClassName = delegateMethod.target.getName(); + isDelegateInterface = delegateMethod.target.isInterface(); } if ("".equals(delegateMethod.name)) { delegateInvokeType = H_NEWINVOKESPECIAL; } else if (Modifier.isStatic(delegateMethod.modifiers)) { delegateInvokeType = H_INVOKESTATIC; - } else if (delegateMethod.owner.clazz.isInterface()) { + } else if (delegateMethod.target.isInterface()) { delegateInvokeType = H_INVOKEINTERFACE; } else { delegateInvokeType = H_INVOKEVIRTUAL; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java index 7c85bd269b4..f316e1438ec 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java @@ -23,18 +23,18 @@ import java.lang.invoke.MethodHandle; public final class PainlessField { public final String name; - public final PainlessClass owner; + public final Class target; public final Class clazz; public final String javaName; public final int modifiers; public final MethodHandle getter; public final MethodHandle setter; - PainlessField(String name, String javaName, PainlessClass owner, Class clazz, int modifiers, + PainlessField(String name, String javaName, Class target, Class clazz, int modifiers, MethodHandle getter, MethodHandle setter) { this.name = name; this.javaName = javaName; - this.owner = owner; + this.target = target; this.clazz = clazz; this.modifiers = modifiers; this.getter = getter; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 9a5e08d65a7..5641eee1b5d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -310,7 +310,7 @@ public class PainlessLookupBuilder { } painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "", painlessParametersTypes), - key -> new PainlessMethod("", ownerStruct, null, void.class, painlessParametersTypes, + key -> new PainlessMethod("", ownerStruct.clazz, null, void.class, painlessParametersTypes, asmConstructor, javaConstructor.getModifiers(), javaHandle)); ownerStruct.constructors.put(painlessMethodKey, painlessConstructor); } else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){ @@ -419,7 +419,7 @@ public class PainlessLookupBuilder { painlessMethod = methodCache.computeIfAbsent( buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass, + key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, null, painlessReturnClass, painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod); } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass && @@ -445,7 +445,7 @@ public class PainlessLookupBuilder { painlessMethod = methodCache.computeIfAbsent( buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass, + key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, javaAugmentedClass, painlessReturnClass, painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); ownerStruct.methods.put(painlessMethodKey, painlessMethod); } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) && @@ -501,7 +501,7 @@ public class PainlessLookupBuilder { painlessField = fieldCache.computeIfAbsent( buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()), key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), - ownerStruct, painlessFieldClass, javaField.getModifiers(), null, null)); + ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), null, null)); ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField); } else if (painlessField.clazz != painlessFieldClass) { throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " + @@ -530,7 +530,7 @@ public class PainlessLookupBuilder { painlessField = fieldCache.computeIfAbsent( buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()), key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), - ownerStruct, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter)); + ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter)); ownerStruct.members.put(whitelistField.javaFieldName, painlessField); } else if (painlessField.clazz != painlessFieldClass) { throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " + @@ -615,8 +615,8 @@ public class PainlessLookupBuilder { for (PainlessField field : child.members.values()) { if (owner.members.get(field.name) == null) { - owner.members.put(field.name, - new PainlessField(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter)); + owner.members.put(field.name, new PainlessField( + field.name, field.javaName, owner.clazz, field.clazz, field.modifiers, field.getter, field.setter)); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index d1f3ee4ece3..0f7c8fb915c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -158,7 +158,7 @@ public final class PainlessLookupUtility { painlessTypeName.charAt(arrayIndex++) == ']') { ++arrayDimensions; } else { - throw new IllegalArgumentException("invalid painless type [" + painlessTypeName + "]."); + throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found"); } } @@ -192,7 +192,7 @@ public final class PainlessLookupUtility { try { return Class.forName(javaDescriptor); } catch (ClassNotFoundException cnfe) { - throw new IllegalStateException("painless type [" + painlessTypeName + "] not found", cnfe); + throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found", cnfe); } } @@ -207,7 +207,7 @@ public final class PainlessLookupUtility { } if (javaClasses.contains(painlessType) == false) { - throw new IllegalStateException("painless type [" + painlessTypeName + "] not found"); + throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found"); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java index 8d8a7f691fe..2b0d44e7176 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java @@ -21,6 +21,7 @@ package org.elasticsearch.painless.lookup; import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Opcodes; +import org.objectweb.asm.Type; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodType; @@ -30,7 +31,7 @@ import java.util.List; public class PainlessMethod { public final String name; - public final PainlessClass owner; + public final Class target; public final Class augmentation; public final Class rtn; public final List> arguments; @@ -38,11 +39,11 @@ public class PainlessMethod { public final int modifiers; public final MethodHandle handle; - public PainlessMethod(String name, PainlessClass owner, Class augmentation, Class rtn, List> arguments, + public PainlessMethod(String name, Class target, Class augmentation, Class rtn, List> arguments, org.objectweb.asm.commons.Method method, int modifiers, MethodHandle handle) { this.name = name; this.augmentation = augmentation; - this.owner = owner; + this.target = target; this.rtn = rtn; this.arguments = Collections.unmodifiableList(arguments); this.method = method; @@ -85,11 +86,11 @@ public class PainlessMethod { for (int i = 0; i < arguments.size(); i++) { params[i] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } - returnValue = owner.clazz; + returnValue = target; } else { // virtual/interface method: add receiver class params = new Class[1 + arguments.size()]; - params[0] = owner.clazz; + params[0] = target; for (int i = 0; i < arguments.size(); i++) { params[i + 1] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } @@ -106,8 +107,8 @@ public class PainlessMethod { clazz = augmentation; type = org.objectweb.asm.Type.getType(augmentation); } else { - clazz = owner.clazz; - type = owner.type; + clazz = target; + type = Type.getType(target); } if (Modifier.isStatic(modifiers)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index 90475419b32..7e923e5f90f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -26,6 +26,7 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; +import org.objectweb.asm.Type; import java.util.ArrayList; import java.util.List; @@ -90,7 +91,7 @@ public final class EListInit extends AExpression { writer.newInstance(MethodWriter.getType(actual)); writer.dup(); - writer.invokeConstructor(constructor.owner.type, constructor.method); + writer.invokeConstructor(Type.getType(constructor.target), constructor.method); for (AExpression value : values) { writer.dup(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index c6474846d4c..b350a758944 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -26,6 +26,7 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; +import org.objectweb.asm.Type; import java.util.HashMap; import java.util.List; @@ -109,7 +110,7 @@ public final class EMapInit extends AExpression { writer.newInstance(MethodWriter.getType(actual)); writer.dup(); - writer.invokeConstructor(constructor.owner.type, constructor.method); + writer.invokeConstructor(Type.getType(constructor.target), constructor.method); for (int index = 0; index < keys.size(); ++index) { AExpression key = keys.get(index); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index a780ea3e05b..cf6f040c975 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -26,6 +26,7 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.objectweb.asm.Type; import java.util.List; import java.util.Objects; @@ -104,7 +105,7 @@ public final class ENewObj extends AExpression { argument.write(writer, globals); } - writer.invokeConstructor(constructor.owner.type, constructor.method); + writer.invokeConstructor(Type.getType(constructor.target), constructor.method); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java index 8eb154e745b..a1a0ee1dade 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -25,6 +25,7 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.objectweb.asm.Type; import java.lang.reflect.Modifier; import java.util.Objects; @@ -63,9 +64,9 @@ final class PSubField extends AStoreable { writer.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.getStatic(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } else { - writer.getField(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } } @@ -94,9 +95,9 @@ final class PSubField extends AStoreable { writer.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.getStatic(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } else { - writer.getField(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } } @@ -105,9 +106,9 @@ final class PSubField extends AStoreable { writer.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.putStatic(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.putStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } else { - writer.putField(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.putField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index 5e8e6ad47d8..4486a52ccb1 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -27,6 +27,7 @@ import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.spi.Whitelist; @@ -67,8 +68,8 @@ public class PainlessDocGenerator { Path indexPath = apiRootPath.resolve("index.asciidoc"); logger.info("Starting to write [index.asciidoc]"); try (PrintStream indexStream = new PrintStream( - Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), - false, StandardCharsets.UTF_8.name())) { + Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), + false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(indexStream); List structs = PAINLESS_LOOKUP.getStructs().stream().sorted(comparing(t -> t.name)).collect(toList()); for (PainlessClass struct : structs) { @@ -91,7 +92,7 @@ public class PainlessDocGenerator { false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(typeStream); typeStream.print("[["); - emitAnchor(typeStream, struct); + emitAnchor(typeStream, struct.clazz); typeStream.print("]]++"); typeStream.print(struct.name); typeStream.println("++::"); @@ -104,10 +105,11 @@ public class PainlessDocGenerator { struct.constructors.values().stream().sorted(NUMBER_OF_ARGS).forEach(documentMethod); Map inherited = new TreeMap<>(); struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(method -> { - if (method.owner == struct) { + if (method.target == struct.clazz) { documentMethod(typeStream, method); } else { - inherited.put(method.owner.name, method.owner); + PainlessClass painlessClass = PAINLESS_LOOKUP.getPainlessStructFromJavaClass(method.target); + inherited.put(painlessClass.name, painlessClass); } }); @@ -206,16 +208,16 @@ public class PainlessDocGenerator { /** * Anchor text for a {@link PainlessClass}. */ - private static void emitAnchor(PrintStream stream, PainlessClass struct) { + private static void emitAnchor(PrintStream stream, Class clazz) { stream.print("painless-api-reference-"); - stream.print(struct.name.replace('.', '-')); + stream.print(PainlessLookupUtility.anyTypeToPainlessTypeName(clazz).replace('.', '-')); } /** * Anchor text for a {@link PainlessMethod}. */ private static void emitAnchor(PrintStream stream, PainlessMethod method) { - emitAnchor(stream, method.owner); + emitAnchor(stream, method.target); stream.print('-'); stream.print(methodName(method)); stream.print('-'); @@ -226,18 +228,18 @@ public class PainlessDocGenerator { * Anchor text for a {@link PainlessField}. */ private static void emitAnchor(PrintStream stream, PainlessField field) { - emitAnchor(stream, field.owner); + emitAnchor(stream, field.target); stream.print('-'); stream.print(field.name); } private static String methodName(PainlessMethod method) { - return method.name.equals("") ? method.owner.name : method.name; + return method.name.equals("") ? PainlessLookupUtility.anyTypeToPainlessTypeName(method.target) : method.name; } /** * Emit a {@link Class}. If the type is primitive or an array of primitives this just emits the name of the type. Otherwise this emits - an internal link with the text. + an internal link with the text. */ private static void emitType(PrintStream stream, Class clazz) { emitStruct(stream, PAINLESS_LOOKUP.getPainlessStructFromJavaClass(clazz)); @@ -253,7 +255,7 @@ public class PainlessDocGenerator { private static void emitStruct(PrintStream stream, PainlessClass struct) { if (false == struct.clazz.isPrimitive() && false == struct.name.equals("def")) { stream.print("<<"); - emitAnchor(stream, struct); + emitAnchor(stream, struct.clazz); stream.print(','); stream.print(struct.name); stream.print(">>"); @@ -271,14 +273,14 @@ public class PainlessDocGenerator { stream.print("link:{"); stream.print(root); stream.print("-javadoc}/"); - stream.print(classUrlPath(method.augmentation != null ? method.augmentation : method.owner.clazz)); + stream.print(classUrlPath(method.augmentation != null ? method.augmentation : method.target)); stream.print(".html#"); stream.print(methodName(method)); stream.print("%2D"); boolean first = true; if (method.augmentation != null) { first = false; - stream.print(method.owner.clazz.getName()); + stream.print(method.target.getName()); } for (Class clazz: method.arguments) { if (first) { @@ -303,7 +305,7 @@ public class PainlessDocGenerator { stream.print("link:{"); stream.print(root); stream.print("-javadoc}/"); - stream.print(classUrlPath(field.owner.clazz)); + stream.print(classUrlPath(field.target)); stream.print(".html#"); stream.print(field.javaName); } @@ -315,21 +317,21 @@ public class PainlessDocGenerator { if (method.augmentation != null) { return "painless"; } - return javadocRoot(method.owner); + return javadocRoot(method.target); } /** * Pick the javadoc root for a {@link PainlessField}. */ private static String javadocRoot(PainlessField field) { - return javadocRoot(field.owner); + return javadocRoot(field.target); } /** - * Pick the javadoc root for a {@link PainlessClass}. + * Pick the javadoc root for a {@link Class}. */ - private static String javadocRoot(PainlessClass struct) { - String classPackage = struct.clazz.getPackage().getName(); + private static String javadocRoot(Class clazz) { + String classPackage = clazz.getPackage().getName(); if (classPackage.startsWith("java")) { return "java8"; } From 1b97652a4cc1aa2acbe761464745abb35dd8faa0 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 17 Jul 2018 14:20:41 -0400 Subject: [PATCH 068/260] Build: Move shadow customizations into common code (#32014) Moves the customizations to the build to produce nice shadow jars and javadocs into common build code, mostly BuildPlugin with a little into the root build.gradle file. This means that any project that applies the shadow plugin will automatically be set up just like the high level rest client: * The non-shadow jar will not be built * The shadow jar will not have a "classifier" * Tests will run against the shadow jar * Javadoc will include all of the shadowed classes * Service files in `META-INF/services` will be merged --- benchmarks/build.gradle | 15 --- build.gradle | 57 +++++++++-- buildSrc/build.gradle | 1 + .../elasticsearch/gradle/BuildPlugin.groovy | 92 ++++++++++++++++- .../gradle/plugin/PluginBuildPlugin.groovy | 14 +++ client/benchmark/build.gradle | 12 --- client/rest-high-level/build.gradle | 99 ------------------- x-pack/plugin/sql/jdbc/build.gradle | 36 ------- 8 files changed, 153 insertions(+), 173 deletions(-) diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index e7ee5a059ab..80d1982300d 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -17,17 +17,6 @@ * under the License. */ -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - apply plugin: 'elasticsearch.build' // order of this section matters, see: https://github.com/johnrengelman/shadow/issues/336 @@ -81,10 +70,6 @@ thirdPartyAudit.excludes = [ 'org.openjdk.jmh.util.Utils' ] -shadowJar { - classifier = 'benchmarks' -} - runShadow { executable = new File(project.runtimeJavaHome, 'bin/java') } diff --git a/build.gradle b/build.gradle index 187e2477052..ccbb6898dc4 100644 --- a/build.gradle +++ b/build.gradle @@ -17,7 +17,7 @@ * under the License. */ - +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import org.apache.tools.ant.taskdefs.condition.Os import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.BuildPlugin @@ -303,18 +303,55 @@ subprojects { if (project.plugins.hasPlugin(BuildPlugin)) { String artifactsHost = VersionProperties.elasticsearch.isSnapshot() ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co" Closure sortClosure = { a, b -> b.group <=> a.group } - Closure depJavadocClosure = { dep -> - if (dep.group != null && dep.group.startsWith('org.elasticsearch')) { - Project upstreamProject = dependencyToProject(dep) - if (upstreamProject != null) { - project.javadoc.dependsOn "${upstreamProject.path}:javadoc" - String artifactPath = dep.group.replaceAll('\\.', '/') + '/' + dep.name.replaceAll('\\.', '/') + '/' + dep.version - project.javadoc.options.linksOffline artifactsHost + "/javadoc/" + artifactPath, "${upstreamProject.buildDir}/docs/javadoc/" + Closure depJavadocClosure = { shadowed, dep -> + if (dep.group == null || false == dep.group.startsWith('org.elasticsearch')) { + return + } + Project upstreamProject = dependencyToProject(dep) + if (upstreamProject == null) { + return + } + if (shadowed) { + /* + * Include the source of shadowed upstream projects so we don't + * have to publish their javadoc. + */ + project.evaluationDependsOn(upstreamProject.path) + project.javadoc.source += upstreamProject.javadoc.source + /* + * Do not add those projects to the javadoc classpath because + * we are going to resolve them with their source instead. + */ + project.javadoc.classpath = project.javadoc.classpath.filter { f -> + false == upstreamProject.configurations.archives.artifacts.files.files.contains(f) } + /* + * Instead we need the upstream project's javadoc classpath so + * we don't barf on the classes that it references. + */ + project.javadoc.classpath += upstreamProject.javadoc.classpath + } else { + // Link to non-shadowed dependant projects + project.javadoc.dependsOn "${upstreamProject.path}:javadoc" + String artifactPath = dep.group.replaceAll('\\.', '/') + '/' + dep.name.replaceAll('\\.', '/') + '/' + dep.version + project.javadoc.options.linksOffline artifactsHost + "/javadoc/" + artifactPath, "${upstreamProject.buildDir}/docs/javadoc/" } } - project.configurations.compile.dependencies.findAll().toSorted(sortClosure).each(depJavadocClosure) - project.configurations.compileOnly.dependencies.findAll().toSorted(sortClosure).each(depJavadocClosure) + boolean hasShadow = project.plugins.hasPlugin(ShadowPlugin) + project.configurations.compile.dependencies + .findAll() + .toSorted(sortClosure) + .each({ c -> depJavadocClosure(hasShadow, c) }) + project.configurations.compileOnly.dependencies + .findAll() + .toSorted(sortClosure) + .each({ c -> depJavadocClosure(hasShadow, c) }) + if (hasShadow) { + project.configurations.shadow.dependencies + .findAll() + .toSorted(sortClosure) + .each({ c -> depJavadocClosure(false, c) }) + } } } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 3d100daf7d6..eb95ff148f6 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -104,6 +104,7 @@ dependencies { compile 'de.thetaphi:forbiddenapis:2.5' compile 'org.apache.rat:apache-rat:0.11' compile "org.elasticsearch:jna:4.5.1" + compile 'com.github.jengelman.gradle.plugins:shadow:2.0.4' testCompile "junit:junit:${props.getProperty('junit')}" } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 89e10c50ff7..b5b5ec95bec 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -19,6 +19,7 @@ package org.elasticsearch.gradle import com.carrotsearch.gradle.junit4.RandomizedTestingTask +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import org.apache.tools.ant.taskdefs.condition.Os import org.eclipse.jgit.lib.Constants import org.eclipse.jgit.lib.RepositoryBuilder @@ -36,12 +37,14 @@ import org.gradle.api.artifacts.ModuleDependency import org.gradle.api.artifacts.ModuleVersionIdentifier import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ResolvedArtifact +import org.gradle.api.artifacts.SelfResolvingDependency import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.api.execution.TaskExecutionGraph import org.gradle.api.plugins.JavaPlugin import org.gradle.api.publish.maven.MavenPublication import org.gradle.api.publish.maven.plugins.MavenPublishPlugin import org.gradle.api.publish.maven.tasks.GenerateMavenPom +import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.GroovyCompile import org.gradle.api.tasks.compile.JavaCompile @@ -498,7 +501,41 @@ class BuildPlugin implements Plugin { } } } + project.plugins.withType(ShadowPlugin).whenPluginAdded { + project.publishing { + publications { + nebula(MavenPublication) { + artifact project.tasks.shadowJar + artifactId = project.archivesBaseName + /* + * Configure the pom to include the "shadow" as compile dependencies + * because that is how we're using them but remove all other dependencies + * because they've been shaded into the jar. + */ + pom.withXml { XmlProvider xml -> + Node root = xml.asNode() + root.remove(root.dependencies) + Node dependenciesNode = root.appendNode('dependencies') + project.configurations.shadow.allDependencies.each { + if (false == it instanceof SelfResolvingDependency) { + Node dependencyNode = dependenciesNode.appendNode('dependency') + dependencyNode.appendNode('groupId', it.group) + dependencyNode.appendNode('artifactId', it.name) + dependencyNode.appendNode('version', it.version) + dependencyNode.appendNode('scope', 'compile') + } + } + // Be tidy and remove the element if it is empty + if (dependenciesNode.children.empty) { + root.remove(dependenciesNode) + } + } + } + } + } + } } + } /** Adds compiler settings to the project */ @@ -660,6 +697,28 @@ class BuildPlugin implements Plugin { } } } + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * When we use the shadow plugin we entirely replace the + * normal jar with the shadow jar so we no longer want to run + * the jar task. + */ + project.tasks.jar.enabled = false + project.tasks.shadowJar { + /* + * Replace the default "shadow" classifier with null + * which will leave the classifier off of the file name. + */ + classifier = null + /* + * Not all cases need service files merged but it is + * better to be safe + */ + mergeServiceFiles() + } + // Make sure we assemble the shadow jar + project.tasks.assemble.dependsOn project.tasks.shadowJar + } } /** Returns a closure of common configuration shared by unit and integration tests. */ @@ -744,6 +803,18 @@ class BuildPlugin implements Plugin { } exclude '**/*$*.class' + + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * If we make a shaded jar we test against it. + */ + classpath -= project.tasks.compileJava.outputs.files + classpath -= project.configurations.compile + classpath -= project.configurations.runtime + classpath += project.configurations.shadow + classpath += project.tasks.shadowJar.outputs.files + dependsOn project.tasks.shadowJar + } } } @@ -766,7 +837,26 @@ class BuildPlugin implements Plugin { additionalTest.dependsOn(project.tasks.testClasses) test.dependsOn(additionalTest) }); - return test + + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * We need somewhere to configure dependencies that we don't wish + * to shade into the jar. The shadow plugin creates a "shadow" + * configuration which is *almost* exactly that. It is never + * bundled into the shaded jar but is used for main source + * compilation. Unfortunately, by default it is not used for + * *test* source compilation and isn't used in tests at all. This + * change makes it available for test compilation. + * + * Note that this isn't going to work properly with qa projects + * but they have no business applying the shadow plugin in the + * firstplace. + */ + SourceSet testSourceSet = project.sourceSets.findByName('test') + if (testSourceSet != null) { + testSourceSet.compileClasspath += project.configurations.shadow + } + } } private static configurePrecommit(Project project) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index eb4da8d1f31..d76084bf22e 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -18,11 +18,13 @@ */ package org.elasticsearch.gradle.plugin +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import nebula.plugin.info.scm.ScmInfoPlugin import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask +import org.gradle.api.InvalidUserDataException import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.Task @@ -46,6 +48,18 @@ public class PluginBuildPlugin extends BuildPlugin { @Override public void apply(Project project) { super.apply(project) + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * We've not tested these plugins together and we're fairly sure + * they aren't going to work properly as is *and* we're not really + * sure *why* you'd want to shade stuff in plugins. So we throw an + * exception here to make you come and read this comment. If you + * have a need for shadow while building plugins then know that you + * are probably going to have to fight with gradle for a while.... + */ + throw new InvalidUserDataException('elasticsearch.esplugin is not ' + + 'compatible with com.github.johnrengelman.shadow'); + } configureDependencies(project) // this afterEvaluate must happen before the afterEvaluate added by integTest creation, // so that the file name resolution for installing the plugin will be setup diff --git a/client/benchmark/build.gradle b/client/benchmark/build.gradle index 77867f5e273..0c3238d9853 100644 --- a/client/benchmark/build.gradle +++ b/client/benchmark/build.gradle @@ -17,18 +17,6 @@ * under the License. */ -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - - apply plugin: 'elasticsearch.build' // build an uberjar with all benchmarks apply plugin: 'com.github.johnrengelman.shadow' diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 2fed806e98c..a1260894bf7 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -21,17 +21,6 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks import org.elasticsearch.gradle.test.RestIntegTestTask import org.gradle.api.internal.provider.Providers -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.rest-test' apply plugin: 'nebula.maven-base-publish' @@ -45,49 +34,6 @@ archivesBaseName = 'elasticsearch-rest-high-level-client' Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE) test.dependsOn(copyRestSpec) -publishing { - publications { - nebula(MavenPublication) { - artifact shadowJar - artifactId = archivesBaseName - /* - * Configure the pom to include the "shadow" as compile dependencies - * because that is how we're using them but remove all other dependencies - * because they've been shaded into the jar. - */ - pom.withXml { XmlProvider xml -> - Node root = xml.asNode() - root.remove(root.dependencies) - Node dependenciesNode = root.appendNode('dependencies') - project.configurations.shadow.allDependencies.each { - if (false == it instanceof SelfResolvingDependency) { - Node dependencyNode = dependenciesNode.appendNode('dependency') - dependencyNode.appendNode('groupId', it.group) - dependencyNode.appendNode('artifactId', it.name) - dependencyNode.appendNode('version', it.version) - dependencyNode.appendNode('scope', 'compile') - } - } - } - } - } -} - -/* - * We need somewhere to configure dependencies that we don't wish to shade - * into the high level REST client. The shadow plugin creates a "shadow" - * configuration which is *almost* exactly that. It is never bundled into - * the shaded jar but is used for main source compilation. Unfortunately, - * by default it is not used for *test* source compilation and isn't used - * in tests at all. This change makes it available for test compilation. - * A change below makes it available for testing. - */ -sourceSets { - test { - compileClasspath += configurations.shadow - } -} - dependencies { /* * Everything in the "shadow" configuration is *not* copied into the @@ -124,48 +70,3 @@ forbiddenApisMain { signaturesURLs += [PrecommitTasks.getResource('/forbidden/http-signatures.txt')] signaturesURLs += [file('src/main/resources/forbidden/rest-high-level-signatures.txt').toURI().toURL()] } - -shadowJar { - classifier = null - mergeServiceFiles() -} - -// We don't need normal jar, we use shadow jar instead -jar.enabled = false -assemble.dependsOn shadowJar - -javadoc { - /* - * Bundle all of the javadoc from all of the shaded projects into this one - * so we don't *have* to publish javadoc for all of the "client" jars. - */ - configurations.compile.dependencies.all { Dependency dep -> - Project p = dependencyToProject(dep) - if (p != null) { - evaluationDependsOn(p.path) - source += p.sourceSets.main.allJava - } - } -} - -/* - * Use the jar for testing so we have tests of the bundled jar. - * Use the "shadow" configuration for testing because we need things - * in it. - */ -test { - classpath -= compileJava.outputs.files - classpath -= configurations.compile - classpath -= configurations.runtime - classpath += configurations.shadow - classpath += shadowJar.outputs.files - dependsOn shadowJar -} -integTestRunner { - classpath -= compileJava.outputs.files - classpath -= configurations.compile - classpath -= configurations.runtime - classpath += configurations.shadow - classpath += shadowJar.outputs.files - dependsOn shadowJar -} diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index 9d27c2030d6..a0d9b24c507 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -1,15 +1,3 @@ - -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -49,7 +37,6 @@ dependencyLicenses { } shadowJar { - classifier = null relocate 'com.fasterxml', 'org.elasticsearch.fasterxml' } @@ -70,26 +57,3 @@ artifacts { nodeps nodepsJar archives shadowJar } - -publishing { - publications { - nebula(MavenPublication) { - artifact shadowJar - pom.withXml { - // Nebula is mistakenly including all dependencies that are already shadowed into the shadow jar - asNode().remove(asNode().dependencies) - } - } - } -} - -assemble.dependsOn shadowJar - -// Use the jar for testing so the tests are more "real" -test { - classpath -= compileJava.outputs.files - classpath -= configurations.compile - classpath -= configurations.runtime - classpath += shadowJar.outputs.files - dependsOn shadowJar -} From d170ab33150ff96ed14cc47ea61e44136cc2dc1f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 17 Jul 2018 15:49:20 -0400 Subject: [PATCH 069/260] Disable C2 from using AVX-512 on JDK 10 (#32138) The C2 compiler in JDK 10 appears to have an issue compiling to AVX-512 instructions (on hardware that supports such). As a workaround, this commit adds a JVM flag on JDK 10+ to disable the use of AVX-512 instructions until a fix is introduced to the JDK. Instead, we use a flag to enable AVX and AVX2 only. Note: Based on my reading of the C2 code, this flag does not appear to have any impact on hardware that does not support AVX2. I have tested this manually on an Intel Atom C2538 processor that supports neither AVX nor AVX2. I have also tested this manually on an Intel i5-3317U processor that supports AVX but not AVX2. --- distribution/src/config/jvm.options | 3 +++ 1 file changed, 3 insertions(+) diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index c5c0f44caeb..e486735eb8f 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -100,3 +100,6 @@ ${error.file} # due to internationalization enhancements in JDK 9 Elasticsearch need to set the provider to COMPAT otherwise # time/date parsing will break in an incompatible way for some date patterns and locals 9-:-Djava.locale.providers=COMPAT + +# temporary workaround for C2 bug with JDK 10 on hardware with AVX-512 +10-:-XX:UseAVX=2 From 6371d51866e6b015f378fad568abbc0a7df94e2d Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 17 Jul 2018 13:14:46 -0700 Subject: [PATCH 070/260] Build: Make additional test deps of check (#32015) This commit moves additional unit test runners from being dependencies of the test task to dependencies of check. Without this change, reproduce lines are incorrect due to the additional test runner not matching any of the reproduce class/method info. closes #31964 --- .../elasticsearch/gradle/BuildPlugin.groovy | 2 +- server/build.gradle | 21 ++++++++++--------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index b5b5ec95bec..219d00ba640 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -835,7 +835,7 @@ class BuildPlugin implements Plugin { additionalTest.configure(commonTestConfig(project)) additionalTest.configure(config) additionalTest.dependsOn(project.tasks.testClasses) - test.dependsOn(additionalTest) + project.check.dependsOn(additionalTest) }); project.plugins.withType(ShadowPlugin).whenPluginAdded { diff --git a/server/build.gradle b/server/build.gradle index da60bca5a3e..7db073f43a5 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -156,6 +156,16 @@ if (isEclipse) { compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" +// TODO: remove ScriptDocValuesMissingV6BehaviourTests in 7.0 +additionalTest('testScriptDocValuesMissingV6Behaviour'){ + include '**/ScriptDocValuesMissingV6BehaviourTests.class' + systemProperty 'es.scripting.exception_for_missing_value', 'false' +} +test { + // these are tested explicitly in separate test tasks + exclude '**/*ScriptDocValuesMissingV6BehaviourTests.class' +} + forbiddenPatterns { exclude '**/*.json' exclude '**/*.jmx' @@ -329,7 +339,7 @@ if (isEclipse == false || project.path == ":server-tests") { task integTest(type: RandomizedTestingTask, group: JavaBasePlugin.VERIFICATION_GROUP, description: 'Multi-node tests', - dependsOn: test.dependsOn.collect()) { + dependsOn: test.dependsOn) { configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath testClassesDirs = project.test.testClassesDirs @@ -339,12 +349,3 @@ if (isEclipse == false || project.path == ":server-tests") { integTest.mustRunAfter test } -// TODO: remove ScriptDocValuesMissingV6BehaviourTests in 7.0 -additionalTest('testScriptDocValuesMissingV6Behaviour'){ - include '**/ScriptDocValuesMissingV6BehaviourTests.class' - systemProperty 'es.scripting.exception_for_missing_value', 'false' -} -test { - // these are tested explicitly in separate test tasks - exclude '**/*ScriptDocValuesMissingV6BehaviourTests.class' -} From 03c16cd0e33a3c869c2a0347147771b895ba20e8 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Tue, 17 Jul 2018 13:54:49 -0700 Subject: [PATCH 071/260] Painless: Add PainlessClassBuilder (#32141) Several pieces of data in PainlessClass cannot be passed in at the time the PainlessClass is created so it must be "frozen" after all the data is collected. This means PainlessClass is currently serving two functions as both a builder and a set of data. This separates the two pieces into clearly distinct values. This change also removes the PainlessMethodKey in favor of a simple String. The goal is to have the painless method key be completely internal to the PainlessLookup eventually and this simplifies the way there. Note that this was added since PainlessClass and PainlessClassBuilder were already being changed instead of a follow up PR. --- .../java/org/elasticsearch/painless/Def.java | 3 +- .../elasticsearch/painless/FunctionRef.java | 8 +- .../org/elasticsearch/painless/Locals.java | 9 +-- .../painless/lookup/PainlessClass.java | 73 +++++------------ .../painless/lookup/PainlessClassBuilder.java | 70 +++++++++++++++++ .../lookup/PainlessLookupBuilder.java | 78 ++++++++++--------- .../painless/lookup/PainlessMethodKey.java | 75 ------------------ .../painless/node/ECallLocal.java | 4 +- .../painless/node/EFunctionRef.java | 4 +- .../painless/node/EListInit.java | 9 ++- .../elasticsearch/painless/node/EMapInit.java | 9 ++- .../elasticsearch/painless/node/ENewObj.java | 4 +- .../painless/node/PCallInvoke.java | 3 +- .../elasticsearch/painless/node/PField.java | 13 ++-- .../painless/node/PSubListShortcut.java | 6 +- .../painless/node/PSubMapShortcut.java | 6 +- .../elasticsearch/painless/node/SSource.java | 6 +- .../painless/node/SSubEachIterable.java | 5 +- .../painless/node/NodeToStringTests.java | 10 +-- 19 files changed, 178 insertions(+), 217 deletions(-) create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index fe11ff4814b..f3388fc4bb2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.lang.invoke.CallSite; import java.lang.invoke.MethodHandle; @@ -185,7 +184,7 @@ public final class Def { * @throws IllegalArgumentException if no matching whitelisted method was found. */ static PainlessMethod lookupMethodInternal(PainlessLookup painlessLookup, Class receiverClass, String name, int arity) { - PainlessMethodKey key = new PainlessMethodKey(name, arity); + String key = PainlessLookupUtility.buildPainlessMethodKey(name, arity); // check whitelist for matching method for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 925359fabc5..d64e833912f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.lang.invoke.MethodType; @@ -177,10 +176,11 @@ public class FunctionRef { final PainlessMethod impl; // ctor ref if ("new".equals(call)) { - impl = struct.constructors.get(new PainlessMethodKey("", method.arguments.size())); + impl = struct.constructors.get(PainlessLookupUtility.buildPainlessMethodKey("", method.arguments.size())); } else { // look for a static impl first - PainlessMethod staticImpl = struct.staticMethods.get(new PainlessMethodKey(call, method.arguments.size())); + PainlessMethod staticImpl = + struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.arguments.size())); if (staticImpl == null) { // otherwise a virtual impl final int arity; @@ -191,7 +191,7 @@ public class FunctionRef { // receiver passed arity = method.arguments.size() - 1; } - impl = struct.methods.get(new PainlessMethodKey(call, arity)); + impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity)); } else { impl = staticImpl; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index e797740fed1..6c1010a3450 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.ScriptClassInfo.MethodArgument; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Arrays; import java.util.Collection; @@ -144,7 +143,7 @@ public final class Locals { } /** Looks up a method. Returns null if the method does not exist. */ - public PainlessMethod getMethod(PainlessMethodKey key) { + public PainlessMethod getMethod(String key) { PainlessMethod method = lookupMethod(key); if (method != null) { return method; @@ -200,7 +199,7 @@ public final class Locals { // variable name -> variable private Map variables; // method name+arity -> methods - private Map methods; + private Map methods; /** * Create a new Locals @@ -238,7 +237,7 @@ public final class Locals { } /** Looks up a method at this scope only. Returns null if the method does not exist. */ - private PainlessMethod lookupMethod(PainlessMethodKey key) { + private PainlessMethod lookupMethod(String key) { if (methods == null) { return null; } @@ -261,7 +260,7 @@ public final class Locals { if (methods == null) { methods = new HashMap<>(); } - methods.put(new PainlessMethodKey(method.name, method.arguments.size()), method); + methods.put(PainlessLookupUtility.buildPainlessMethodKey(method.name, method.arguments.size()), method); // TODO: check result } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index 7d84899b00e..57b18bc60da 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -19,19 +19,20 @@ package org.elasticsearch.painless.lookup; +import org.objectweb.asm.Type; + import java.lang.invoke.MethodHandle; import java.util.Collections; -import java.util.HashMap; import java.util.Map; public final class PainlessClass { public final String name; public final Class clazz; - public final org.objectweb.asm.Type type; + public final Type type; - public final Map constructors; - public final Map staticMethods; - public final Map methods; + public final Map constructors; + public final Map staticMethods; + public final Map methods; public final Map staticMembers; public final Map members; @@ -41,63 +42,25 @@ public final class PainlessClass { public final PainlessMethod functionalMethod; - PainlessClass(String name, Class clazz, org.objectweb.asm.Type type) { + PainlessClass(String name, Class clazz, Type type, + Map constructors, Map staticMethods, Map methods, + Map staticMembers, Map members, + Map getters, Map setters, + PainlessMethod functionalMethod) { this.name = name; this.clazz = clazz; this.type = type; - constructors = new HashMap<>(); - staticMethods = new HashMap<>(); - methods = new HashMap<>(); + this.constructors = Collections.unmodifiableMap(constructors); + this.staticMethods = Collections.unmodifiableMap(staticMethods); + this.methods = Collections.unmodifiableMap(methods); - staticMembers = new HashMap<>(); - members = new HashMap<>(); + this.staticMembers = Collections.unmodifiableMap(staticMembers); + this.members = Collections.unmodifiableMap(members); - getters = new HashMap<>(); - setters = new HashMap<>(); - - functionalMethod = null; - } - - private PainlessClass(PainlessClass struct, PainlessMethod functionalMethod) { - name = struct.name; - clazz = struct.clazz; - type = struct.type; - - constructors = Collections.unmodifiableMap(struct.constructors); - staticMethods = Collections.unmodifiableMap(struct.staticMethods); - methods = Collections.unmodifiableMap(struct.methods); - - staticMembers = Collections.unmodifiableMap(struct.staticMembers); - members = Collections.unmodifiableMap(struct.members); - - getters = Collections.unmodifiableMap(struct.getters); - setters = Collections.unmodifiableMap(struct.setters); + this.getters = Collections.unmodifiableMap(getters); + this.setters = Collections.unmodifiableMap(setters); this.functionalMethod = functionalMethod; } - - public PainlessClass freeze(PainlessMethod functionalMethod) { - return new PainlessClass(this, functionalMethod); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessClass struct = (PainlessClass)object; - - return name.equals(struct.name); - } - - @Override - public int hashCode() { - return name.hashCode(); - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java new file mode 100644 index 00000000000..0eda3660f0b --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import org.objectweb.asm.Type; + +import java.lang.invoke.MethodHandle; +import java.util.HashMap; +import java.util.Map; + +final class PainlessClassBuilder { + final String name; + final Class clazz; + final Type type; + + final Map constructors; + final Map staticMethods; + final Map methods; + + final Map staticMembers; + final Map members; + + final Map getters; + final Map setters; + + PainlessMethod functionalMethod; + + PainlessClassBuilder(String name, Class clazz, Type type) { + this.name = name; + this.clazz = clazz; + this.type = type; + + constructors = new HashMap<>(); + staticMethods = new HashMap<>(); + methods = new HashMap<>(); + + staticMembers = new HashMap<>(); + members = new HashMap<>(); + + getters = new HashMap<>(); + setters = new HashMap<>(); + + functionalMethod = null; + } + + PainlessClass build() { + return new PainlessClass(name, clazz, type, + constructors, staticMethods, methods, + staticMembers, members, + getters, setters, + functionalMethod); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 5641eee1b5d..2150c0b210a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -37,6 +37,8 @@ import java.util.Map; import java.util.Stack; import java.util.regex.Pattern; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; + public class PainlessLookupBuilder { private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); @@ -60,16 +62,16 @@ public class PainlessLookupBuilder { } private final Map> painlessTypesToJavaClasses; - private final Map, PainlessClass> javaClassesToPainlessStructs; + private final Map, PainlessClassBuilder> javaClassesToPainlessClassBuilders; public PainlessLookupBuilder(List whitelists) { painlessTypesToJavaClasses = new HashMap<>(); - javaClassesToPainlessStructs = new HashMap<>(); + javaClassesToPainlessClassBuilders = new HashMap<>(); String origin = null; painlessTypesToJavaClasses.put("def", def.class); - javaClassesToPainlessStructs.put(def.class, new PainlessClass("def", Object.class, Type.getType(Object.class))); + javaClassesToPainlessClassBuilders.put(def.class, new PainlessClassBuilder("def", Object.class, Type.getType(Object.class))); try { // first iteration collects all the Painless type names that @@ -77,7 +79,8 @@ public class PainlessLookupBuilder { for (Whitelist whitelist : whitelists) { for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - PainlessClass painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName)); + PainlessClassBuilder painlessStruct = + javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(painlessTypeName)); if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + @@ -87,8 +90,8 @@ public class PainlessLookupBuilder { origin = whitelistStruct.origin; addStruct(whitelist.javaClassLoader, whitelistStruct); - painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName)); - javaClassesToPainlessStructs.put(painlessStruct.clazz, painlessStruct); + painlessStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(painlessTypeName)); + javaClassesToPainlessClassBuilders.put(painlessStruct.clazz, painlessStruct); } } @@ -121,8 +124,8 @@ public class PainlessLookupBuilder { // goes through each Painless struct and determines the inheritance list, // and then adds all inherited types to the Painless struct's whitelist - for (Class javaClass : javaClassesToPainlessStructs.keySet()) { - PainlessClass painlessStruct = javaClassesToPainlessStructs.get(javaClass); + for (Class javaClass : javaClassesToPainlessClassBuilders.keySet()) { + PainlessClassBuilder painlessStruct = javaClassesToPainlessClassBuilders.get(javaClass); List painlessSuperStructs = new ArrayList<>(); Class javaSuperClass = painlessStruct.clazz.getSuperclass(); @@ -133,7 +136,7 @@ public class PainlessLookupBuilder { // adds super classes to the inheritance list if (javaSuperClass != null && javaSuperClass.isInterface() == false) { while (javaSuperClass != null) { - PainlessClass painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass); + PainlessClassBuilder painlessSuperStruct = javaClassesToPainlessClassBuilders.get(javaSuperClass); if (painlessSuperStruct != null) { painlessSuperStructs.add(painlessSuperStruct.name); @@ -149,7 +152,7 @@ public class PainlessLookupBuilder { Class javaInterfaceLookup = javaInteraceLookups.pop(); for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) { - PainlessClass painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface); + PainlessClassBuilder painlessInterfaceStruct = javaClassesToPainlessClassBuilders.get(javaSuperInterface); if (painlessInterfaceStruct != null) { String painlessInterfaceStructName = painlessInterfaceStruct.name; @@ -170,7 +173,7 @@ public class PainlessLookupBuilder { // copies methods and fields from Object into interface types if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) { - PainlessClass painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class); + PainlessClassBuilder painlessObjectStruct = javaClassesToPainlessClassBuilders.get(Object.class); if (painlessObjectStruct != null) { copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name)); @@ -179,14 +182,9 @@ public class PainlessLookupBuilder { } // precompute runtime classes - for (PainlessClass painlessStruct : javaClassesToPainlessStructs.values()) { + for (PainlessClassBuilder painlessStruct : javaClassesToPainlessClassBuilders.values()) { addRuntimeClass(painlessStruct); } - - // copy all structs to make them unmodifiable for outside users: - for (Map.Entry,PainlessClass> entry : javaClassesToPainlessStructs.entrySet()) { - entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue()))); - } } private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) { @@ -223,12 +221,12 @@ public class PainlessLookupBuilder { } } - PainlessClass existingStruct = javaClassesToPainlessStructs.get(javaClass); + PainlessClassBuilder existingStruct = javaClassesToPainlessClassBuilders.get(javaClass); if (existingStruct == null) { - PainlessClass struct = new PainlessClass(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass)); + PainlessClassBuilder struct = new PainlessClassBuilder(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass)); painlessTypesToJavaClasses.put(painlessTypeName, javaClass); - javaClassesToPainlessStructs.put(javaClass, struct); + javaClassesToPainlessClassBuilders.put(javaClass, struct); } else if (existingStruct.clazz.equals(javaClass) == false) { throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " + "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " + @@ -261,7 +259,7 @@ public class PainlessLookupBuilder { } private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) { - PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " + @@ -295,7 +293,7 @@ public class PainlessLookupBuilder { " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception); } - PainlessMethodKey painlessMethodKey = new PainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size()); + String painlessMethodKey = buildPainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size()); PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey); if (painlessConstructor == null) { @@ -321,7 +319,7 @@ public class PainlessLookupBuilder { } private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) { - PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + @@ -400,8 +398,8 @@ public class PainlessLookupBuilder { "and parameters " + whitelistMethod.painlessParameterTypeNames); } - PainlessMethodKey painlessMethodKey = - new PainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size()); + String painlessMethodKey = + buildPainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size()); if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey); @@ -459,7 +457,7 @@ public class PainlessLookupBuilder { } private void addField(String ownerStructName, WhitelistField whitelistField) { - PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + @@ -540,14 +538,14 @@ public class PainlessLookupBuilder { } private void copyStruct(String struct, List children) { - final PainlessClass owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct)); + final PainlessClassBuilder owner = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(struct)); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); } for (int count = 0; count < children.size(); ++count) { - final PainlessClass child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count))); + final PainlessClassBuilder child = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(children.get(count))); if (child == null) { throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + @@ -559,8 +557,8 @@ public class PainlessLookupBuilder { " is not a super type of owner struct [" + owner.name + "] in copy."); } - for (Map.Entry kvPair : child.methods.entrySet()) { - PainlessMethodKey methodKey = kvPair.getKey(); + for (Map.Entry kvPair : child.methods.entrySet()) { + String methodKey = kvPair.getKey(); PainlessMethod method = kvPair.getValue(); if (owner.methods.get(methodKey) == null) { // TODO: some of these are no longer valid or outright don't work @@ -625,10 +623,10 @@ public class PainlessLookupBuilder { /** * Precomputes a more efficient structure for dynamic method/field access. */ - private void addRuntimeClass(final PainlessClass struct) { + private void addRuntimeClass(final PainlessClassBuilder struct) { // add all getters/setters - for (Map.Entry method : struct.methods.entrySet()) { - String name = method.getKey().name; + for (Map.Entry method : struct.methods.entrySet()) { + String name = method.getValue().name; PainlessMethod m = method.getValue(); if (m.arguments.size() == 0 && @@ -668,7 +666,7 @@ public class PainlessLookupBuilder { } /** computes the functional interface method for a class, or returns null */ - private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) { + private PainlessMethod computeFunctionalInterfaceMethod(PainlessClassBuilder clazz) { if (!clazz.clazz.isInterface()) { return null; } @@ -703,7 +701,7 @@ public class PainlessLookupBuilder { } // inspect the one method found from the reflection API, it should match the whitelist! java.lang.reflect.Method oneMethod = methods.get(0); - PainlessMethod painless = clazz.methods.get(new PainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount())); + PainlessMethod painless = clazz.methods.get(buildPainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount())); if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) { throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " + "method is not whitelisted!"); @@ -712,7 +710,15 @@ public class PainlessLookupBuilder { } public PainlessLookup build() { - return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessStructs); + Map, PainlessClass> javaClassesToPainlessClasses = new HashMap<>(); + + // copy all structs to make them unmodifiable for outside users: + for (Map.Entry,PainlessClassBuilder> entry : javaClassesToPainlessClassBuilders.entrySet()) { + entry.getValue().functionalMethod = computeFunctionalInterfaceMethod(entry.getValue()); + javaClassesToPainlessClasses.put(entry.getKey(), entry.getValue().build()); + } + + return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessClasses); } public Class getJavaClassFromPainlessType(String painlessType) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java deleted file mode 100644 index 49413ab0c5f..00000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.lookup; - -import java.util.Objects; - -/** - * Key for looking up a method. - *

- * Methods are keyed on both name and arity, and can be overloaded once per arity. - * This allows signatures such as {@code String.indexOf(String) vs String.indexOf(String, int)}. - *

- * It is less flexible than full signature overloading where types can differ too, but - * better than just the name, and overloading types adds complexity to users, too. - */ -public final class PainlessMethodKey { - public final String name; - public final int arity; - - /** - * Create a new lookup key - * @param name name of the method - * @param arity number of parameters - */ - public PainlessMethodKey(String name, int arity) { - this.name = Objects.requireNonNull(name); - this.arity = arity; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + arity; - result = prime * result + name.hashCode(); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - PainlessMethodKey other = (PainlessMethodKey) obj; - if (arity != other.arity) return false; - if (!name.equals(other.name)) return false; - return true; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(name); - sb.append('/'); - sb.append(arity); - return sb.toString(); - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index dfed0ca47b4..098c75386e1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -23,8 +23,8 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.List; import java.util.Objects; @@ -58,7 +58,7 @@ public final class ECallLocal extends AExpression { @Override void analyze(Locals locals) { - PainlessMethodKey methodKey = new PainlessMethodKey(name, arguments.size()); + String methodKey = PainlessLookupUtility.buildPainlessMethodKey(name, arguments.size()); method = locals.getMethod(methodKey); if (method == null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index d4eddb059a8..92b14a885a1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -27,7 +27,6 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.util.Objects; @@ -71,7 +70,8 @@ public final class EFunctionRef extends AExpression implements ILambda { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface"); } - PainlessMethod delegateMethod = locals.getMethod(new PainlessMethodKey(call, interfaceMethod.arguments.size())); + PainlessMethod delegateMethod = + locals.getMethod(PainlessLookupUtility.buildPainlessMethodKey(call, interfaceMethod.arguments.size())); if (delegateMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], function not found"); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index 7e923e5f90f..e0af653d209 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -23,8 +23,8 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; @@ -62,14 +62,15 @@ public final class EListInit extends AExpression { actual = ArrayList.class; - constructor = - locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get(new PainlessMethodKey("", 0)); + constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors + .get(PainlessLookupUtility.buildPainlessMethodKey("", 0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); } - method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods.get(new PainlessMethodKey("add", 1)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods + .get(PainlessLookupUtility.buildPainlessMethodKey("add", 1)); if (method == null) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index b350a758944..d81f08dc3cc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -23,8 +23,8 @@ import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; @@ -68,14 +68,15 @@ public final class EMapInit extends AExpression { actual = HashMap.class; - constructor = - locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get(new PainlessMethodKey("", 0)); + constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors + .get(PainlessLookupUtility.buildPainlessMethodKey("", 0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); } - method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods.get(new PainlessMethodKey("put", 2)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods + .get(PainlessLookupUtility.buildPainlessMethodKey("put", 2)); if (method == null) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index cf6f040c975..c0d4433f7fb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -24,8 +24,8 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.util.List; @@ -65,7 +65,7 @@ public final class ENewObj extends AExpression { } PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual); - constructor = struct.constructors.get(new PainlessMethodKey("", arguments.size())); + constructor = struct.constructors.get(PainlessLookupUtility.buildPainlessMethodKey("", arguments.size())); if (constructor != null) { Class[] types = new Class[constructor.arguments.size()]; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 445c053347e..cd5d6483791 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -26,7 +26,6 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import java.util.List; @@ -77,7 +76,7 @@ public final class PCallInvoke extends AExpression { struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookupUtility.getBoxedAnyType(prefix.actual)); } - PainlessMethodKey methodKey = new PainlessMethodKey(name, arguments.size()); + String methodKey = PainlessLookupUtility.buildPainlessMethodKey(name, arguments.size()); PainlessMethod method = prefix instanceof EStatic ? struct.staticMethods.get(methodKey) : struct.methods.get(methodKey); if (method != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index 3f2f8879564..b5df74358d3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -27,7 +27,6 @@ import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import java.util.List; @@ -74,16 +73,16 @@ public final class PField extends AStoreable { if (field != null) { sub = new PSubField(location, field); } else { - PainlessMethod getter = struct.methods.get( - new PainlessMethodKey("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + PainlessMethod getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( + "get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); if (getter == null) { - getter = struct.methods.get( - new PainlessMethodKey("is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( + "is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); } - PainlessMethod setter = struct.methods.get( - new PainlessMethodKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); + PainlessMethod setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( + "set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); if (getter != null || setter != null) { sub = new PSubShortcut(location, value, PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual), getter, setter); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 0a3ab142ddc..3841b1fece1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -25,8 +25,8 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; import java.util.Set; @@ -56,8 +56,8 @@ final class PSubListShortcut extends AStoreable { @Override void analyze(Locals locals) { - getter = struct.methods.get(new PainlessMethodKey("get", 1)); - setter = struct.methods.get(new PainlessMethodKey("set", 2)); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1)); + setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("set", 2)); if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1 || getter.arguments.get(0) != int.class)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index f71e2ac5d1f..13a3b9c9b94 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -24,8 +24,8 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; import java.util.Set; @@ -55,8 +55,8 @@ final class PSubMapShortcut extends AStoreable { @Override void analyze(Locals locals) { - getter = struct.methods.get(new PainlessMethodKey("get", 1)); - setter = struct.methods.get(new PainlessMethodKey("put", 2)); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1)); + setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("put", 2)); if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1)) { throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + struct.name + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index cd473e2c84e..c354e78a961 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -30,8 +30,8 @@ import org.elasticsearch.painless.ScriptClassInfo; import org.elasticsearch.painless.SimpleChecksAdapter; import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.node.SFunction.FunctionReserved; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; @@ -165,12 +165,12 @@ public final class SSource extends AStatement { } public void analyze(PainlessLookup painlessLookup) { - Map methods = new HashMap<>(); + Map methods = new HashMap<>(); for (SFunction function : functions) { function.generateSignature(painlessLookup); - PainlessMethodKey key = new PainlessMethodKey(function.name, function.parameters.size()); + String key = PainlessLookupUtility.buildPainlessMethodKey(function.name, function.parameters.size()); if (methods.put(key, function.method) != null) { throw createError(new IllegalArgumentException("Duplicate functions with name [" + function.name + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index cec1297a4c4..798b30e2b6d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -29,7 +29,6 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; @@ -77,8 +76,8 @@ final class SSubEachIterable extends AStatement { if (expression.actual == def.class) { method = null; } else { - method = locals.getPainlessLookup(). - getPainlessStructFromJavaClass(expression.actual).methods.get(new PainlessMethodKey("iterator", 0)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(expression.actual).methods + .get(PainlessLookupUtility.buildPainlessMethodKey("iterator", 0)); if (method == null) { throw createError(new IllegalArgumentException("Unable to create iterator for the type " + diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index 86d365e0fcc..cd3e4123e12 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -24,8 +24,8 @@ import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.FeatureTest; import org.elasticsearch.painless.GenericElasticsearchScript; @@ -405,14 +405,14 @@ public class NodeToStringTests extends ESTestCase { public void testPSubCallInvoke() { Location l = new Location(getTestName(), 0); PainlessClass c = painlessLookup.getPainlessStructFromJavaClass(Integer.class); - PainlessMethod m = c.methods.get(new PainlessMethodKey("toString", 0)); + PainlessMethod m = c.methods.get(PainlessLookupUtility.buildPainlessMethodKey("toString", 0)); PSubCallInvoke node = new PSubCallInvoke(l, m, null, emptyList()); node.prefix = new EVariable(l, "a"); assertEquals("(PSubCallInvoke (EVariable a) toString)", node.toString()); assertEquals("(PSubNullSafeCallInvoke (PSubCallInvoke (EVariable a) toString))", new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 1); - m = c.methods.get(new PainlessMethodKey("equals", 1)); + m = c.methods.get(PainlessLookupUtility.buildPainlessMethodKey("equals", 1)); node = new PSubCallInvoke(l, m, null, singletonList(new EVariable(l, "b"))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubCallInvoke (EVariable a) equals (Args (EVariable b)))", node.toString()); @@ -502,8 +502,8 @@ public class NodeToStringTests extends ESTestCase { public void testPSubShortcut() { Location l = new Location(getTestName(), 0); PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(FeatureTest.class); - PainlessMethod getter = s.methods.get(new PainlessMethodKey("getX", 0)); - PainlessMethod setter = s.methods.get(new PainlessMethodKey("setX", 1)); + PainlessMethod getter = s.methods.get(PainlessLookupUtility.buildPainlessMethodKey("getX", 0)); + PainlessMethod setter = s.methods.get(PainlessLookupUtility.buildPainlessMethodKey("setX", 1)); PSubShortcut node = new PSubShortcut(l, "x", FeatureTest.class.getName(), getter, setter); node.prefix = new EVariable(l, "a"); assertEquals("(PSubShortcut (EVariable a) x)", node.toString()); From 91d8371325bfd4a6cbf88d4e77f4769f1d8bdea0 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 17 Jul 2018 18:41:31 -0400 Subject: [PATCH 072/260] Build: Skip jar tests if jar disabled The shadow plugin disables the jar task but we still attempted to extract the jar to see if it had the right license and notice file. This skips the extraction and those tests if the jar is built for any reason which fixes projects that use the shadow plugin. --- build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/build.gradle b/build.gradle index ccbb6898dc4..90a9d88ac8b 100644 --- a/build.gradle +++ b/build.gradle @@ -574,6 +574,7 @@ subprojects { project -> commandLine "${->new File(rootProject.compilerJavaHome, 'bin/jar')}", 'xf', "${-> jarTask.outputs.files.singleFile}", 'META-INF/LICENSE.txt', 'META-INF/NOTICE.txt' workingDir destination + onlyIf {jarTask.enabled} doFirst { project.delete(destination) Files.createDirectories(destination) @@ -582,6 +583,7 @@ subprojects { project -> final Task checkNotice = project.task("verify${jarTask.name.capitalize()}Notice") { dependsOn extract + onlyIf {jarTask.enabled} doLast { final List noticeLines = Files.readAllLines(project.noticeFile.toPath()) final Path noticePath = extract.destination.resolve('META-INF/NOTICE.txt') @@ -592,6 +594,7 @@ subprojects { project -> final Task checkLicense = project.task("verify${jarTask.name.capitalize()}License") { dependsOn extract + onlyIf {jarTask.enabled} doLast { final List licenseLines = Files.readAllLines(project.licenseFile.toPath()) final Path licensePath = extract.destination.resolve('META-INF/LICENSE.txt') From 351bbb89063dc00dbea76f8e09300ede9d1be28c Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 17 Jul 2018 20:25:27 -0400 Subject: [PATCH 073/260] Switch distribution to new style Requests (#30595) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes all calls in the `distribution/archives/integ-test-zip` project to use the new versions. --- .../test/rest/CreatedLocationHeaderIT.java | 24 +++++----- .../test/rest/NodeRestUsageIT.java | 44 +++++++++---------- .../test/rest/RequestsWithoutContentIT.java | 37 ++++++++-------- .../rest/WaitForRefreshAndCloseTests.java | 42 +++++++++--------- 4 files changed, 74 insertions(+), 73 deletions(-) diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java index 9c9b6af705a..71a41db80a2 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java @@ -19,14 +19,11 @@ package org.elasticsearch.test.rest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import java.io.IOException; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; @@ -49,26 +46,31 @@ public class CreatedLocationHeaderIT extends ESRestTestCase { } public void testUpsert() throws IOException { - locationTestCase(client().performRequest("POST", "test/test/1/_update", emptyMap(), new StringEntity("{" - + "\"doc\": {\"test\": \"test\"}," - + "\"doc_as_upsert\": true}", ContentType.APPLICATION_JSON))); + Request request = new Request("POST", "test/test/1/_update"); + request.setJsonEntity("{" + + "\"doc\": {\"test\": \"test\"}," + + "\"doc_as_upsert\": true}"); + locationTestCase(client().performRequest(request)); } private void locationTestCase(String method, String url) throws IOException { - locationTestCase(client().performRequest(method, url, emptyMap(), - new StringEntity("{\"test\": \"test\"}", ContentType.APPLICATION_JSON))); + final Request request = new Request(method, url); + request.setJsonEntity("{\"test\": \"test\"}"); + locationTestCase(client().performRequest(request)); // we have to delete the index otherwise the second indexing request will route to the single shard and not produce a 201 final Response response = client().performRequest(new Request("DELETE", "test")); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - locationTestCase(client().performRequest(method, url + "?routing=cat", emptyMap(), - new StringEntity("{\"test\": \"test\"}", ContentType.APPLICATION_JSON))); + final Request withRouting = new Request(method, url); + withRouting.addParameter("routing", "cat"); + withRouting.setJsonEntity("{\"test\": \"test\"}"); + locationTestCase(client().performRequest(withRouting)); } private void locationTestCase(Response response) throws IOException { assertEquals(201, response.getStatusLine().getStatusCode()); String location = response.getHeader("Location"); assertThat(location, startsWith("/test/test/")); - Response getResponse = client().performRequest("GET", location); + Response getResponse = client().performRequest(new Request("GET", location)); assertEquals(singletonMap("test", "test"), entityAsMap(getResponse).get("_source")); } diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java index b94aa71b040..818037f68a1 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java @@ -19,13 +19,11 @@ package org.elasticsearch.test.rest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.Request; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -39,8 +37,8 @@ public class NodeRestUsageIT extends ESRestTestCase { @SuppressWarnings("unchecked") public void testWithRestUsage() throws IOException { // First get the current usage figures - Response beforeResponse = client().performRequest("GET", - randomFrom("_nodes/usage", "_nodes/usage/rest_actions", "_nodes/usage/_all")); + String path = randomFrom("_nodes/usage", "_nodes/usage/rest_actions", "_nodes/usage/_all"); + Response beforeResponse = client().performRequest(new Request("GET", path)); Map beforeResponseBodyMap = entityAsMap(beforeResponse); assertThat(beforeResponseBodyMap, notNullValue()); Map before_nodesMap = (Map) beforeResponseBodyMap.get("_nodes"); @@ -80,24 +78,24 @@ public class NodeRestUsageIT extends ESRestTestCase { } // Do some requests to get some rest usage stats - client().performRequest("PUT", "/test"); - client().performRequest("POST", "/test/doc/1", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("POST", "/test/doc/2", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("POST", "/test/doc/3", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("GET", "/test/_search"); - client().performRequest("POST", "/test/doc/4", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("POST", "/test/_refresh"); - client().performRequest("GET", "/_cat/indices"); - client().performRequest("GET", "/_nodes"); - client().performRequest("GET", "/test/_search"); - client().performRequest("GET", "/_nodes/stats"); - client().performRequest("DELETE", "/test"); + client().performRequest(new Request("PUT", "/test")); + for (int i = 0; i < 3; i++) { + final Request index = new Request("POST", "/test/doc/1"); + index.setJsonEntity("{\"foo\": \"bar\"}"); + client().performRequest(index); + } + client().performRequest(new Request("GET", "/test/_search")); + final Request index4 = new Request("POST", "/test/doc/4"); + index4.setJsonEntity("{\"foo\": \"bar\"}"); + client().performRequest(index4); + client().performRequest(new Request("POST", "/test/_refresh")); + client().performRequest(new Request("GET", "/_cat/indices")); + client().performRequest(new Request("GET", "/_nodes")); + client().performRequest(new Request("GET", "/test/_search")); + client().performRequest(new Request("GET", "/_nodes/stats")); + client().performRequest(new Request("DELETE", "/test")); - Response response = client().performRequest("GET", "_nodes/usage"); + Response response = client().performRequest(new Request("GET", "_nodes/usage")); Map responseBodyMap = entityAsMap(response); assertThat(responseBodyMap, notNullValue()); Map _nodesMap = (Map) responseBodyMap.get("_nodes"); @@ -139,7 +137,7 @@ public class NodeRestUsageIT extends ESRestTestCase { public void testMetricsWithAll() throws IOException { ResponseException exception = expectThrows(ResponseException.class, - () -> client().performRequest("GET", "_nodes/usage/_all,rest_actions")); + () -> client().performRequest(new Request("GET", "_nodes/usage/_all,rest_actions"))); assertNotNull(exception); assertThat(exception.getMessage(), containsString("\"type\":\"illegal_argument_exception\"," + "\"reason\":\"request [_nodes/usage/_all,rest_actions] contains _all and individual metrics [_all,rest_actions]\"")); diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java index ce72af26628..a6fc7b9cce1 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.rest; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.Request; import java.io.IOException; @@ -28,56 +29,56 @@ import static org.hamcrest.CoreMatchers.containsString; public class RequestsWithoutContentIT extends ESRestTestCase { public void testIndexMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/idx/type/123")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/idx/type/123"))); assertResponseException(responseException, "request body is required"); } public void testBulkMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/_bulk")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/_bulk"))); assertResponseException(responseException, "request body is required"); } public void testPutSettingsMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - "PUT", "/_settings")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request("PUT", "/_settings"))); assertResponseException(responseException, "request body is required"); } public void testPutMappingsMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/test_index/test_type/_mapping")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/test_index/test_type/_mapping"))); assertResponseException(responseException, "request body is required"); } public void testPutIndexTemplateMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "PUT" : "POST", "/_template/my_template")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "PUT" : "POST", "/_template/my_template"))); assertResponseException(responseException, "request body is required"); } public void testMultiSearchMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "GET", "/_msearch")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "GET", "/_msearch"))); assertResponseException(responseException, "request body or source parameter is required"); } public void testPutPipelineMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - "PUT", "/_ingest/pipeline/my_pipeline")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request("PUT", "/_ingest/pipeline/my_pipeline"))); assertResponseException(responseException, "request body or source parameter is required"); } public void testSimulatePipelineMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "GET", "/_ingest/pipeline/my_pipeline/_simulate")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "GET", "/_ingest/pipeline/my_pipeline/_simulate"))); assertResponseException(responseException, "request body or source parameter is required"); } public void testPutScriptMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/_scripts/lang")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/_scripts/lang"))); assertResponseException(responseException, "request body is required"); } diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java index 0b1ad2a6dd9..fab809a51bc 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java @@ -19,26 +19,21 @@ package org.elasticsearch.test.rest; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.client.Request; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.HashMap; import java.util.Locale; import java.util.Map; -import static java.util.Collections.emptyMap; - /** * Tests that wait for refresh is fired if the index is closed. */ @@ -46,13 +41,14 @@ public class WaitForRefreshAndCloseTests extends ESRestTestCase { @Before public void setupIndex() throws IOException { try { - client().performRequest("DELETE", indexName()); + client().performRequest(new Request("DELETE", indexName())); } catch (ResponseException e) { // If we get an error, it should be because the index doesn't exist assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); } - client().performRequest("PUT", indexName(), emptyMap(), - new StringEntity("{\"settings\":{\"refresh_interval\":-1}}", ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", indexName()); + request.setJsonEntity("{\"settings\":{\"refresh_interval\":-1}}"); + client().performRequest(request); } @After @@ -69,17 +65,20 @@ public class WaitForRefreshAndCloseTests extends ESRestTestCase { } public void testIndexAndThenClose() throws Exception { - closeWhileListenerEngaged(start("PUT", "", new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON))); + closeWhileListenerEngaged(start("PUT", "", "{\"test\":\"test\"}")); } public void testUpdateAndThenClose() throws Exception { - client().performRequest("PUT", docPath(), emptyMap(), new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON)); - closeWhileListenerEngaged(start("POST", "/_update", - new StringEntity("{\"doc\":{\"name\":\"test\"}}", ContentType.APPLICATION_JSON))); + Request request = new Request("PUT", docPath()); + request.setJsonEntity("{\"test\":\"test\"}"); + client().performRequest(request); + closeWhileListenerEngaged(start("POST", "/_update", "{\"doc\":{\"name\":\"test\"}}")); } public void testDeleteAndThenClose() throws Exception { - client().performRequest("PUT", docPath(), emptyMap(), new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", docPath()); + request.setJsonEntity("{\"test\":\"test\"}"); + client().performRequest(request); closeWhileListenerEngaged(start("DELETE", "", null)); } @@ -88,7 +87,7 @@ public class WaitForRefreshAndCloseTests extends ESRestTestCase { assertBusy(() -> { Map stats; try { - stats = entityAsMap(client().performRequest("GET", indexName() + "/_stats/refresh")); + stats = entityAsMap(client().performRequest(new Request("GET", indexName() + "/_stats/refresh"))); } catch (IOException e) { throw new RuntimeException(e); } @@ -105,18 +104,19 @@ public class WaitForRefreshAndCloseTests extends ESRestTestCase { }); // Close the index. That should flush the listener. - client().performRequest("POST", indexName() + "/_close"); + client().performRequest(new Request("POST", indexName() + "/_close")); // The request shouldn't fail. It certainly shouldn't hang. future.get(); } - private ActionFuture start(String method, String path, HttpEntity body) { + private ActionFuture start(String method, String path, String body) { PlainActionFuture future = new PlainActionFuture<>(); - Map params = new HashMap<>(); - params.put("refresh", "wait_for"); - params.put("error_trace", ""); - client().performRequestAsync(method, docPath() + path, params, body, new ResponseListener() { + Request request = new Request(method, docPath() + path); + request.addParameter("refresh", "wait_for"); + request.addParameter("error_trace", ""); + request.setJsonEntity(body); + client().performRequestAsync(request, new ResponseListener() { @Override public void onSuccess(Response response) { try { From df1380b8d3267d70771a640c7bac5a04fb63a8e1 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 17 Jul 2018 21:59:48 -0400 Subject: [PATCH 074/260] Remove versionType from translog (#31945) With the introduction of sequence number, we no longer use versionType to resolve out of order collision in replication and recovery requests. This PR removes removes the versionType from translog. We can only remove it in 7.0 because it is still required in a mixed cluster between 6.x and 5.x. --- .../action/bulk/TransportShardBulkAction.java | 5 +- .../org/elasticsearch/index/VersionType.java | 15 ----- .../elasticsearch/index/engine/Engine.java | 2 + .../index/engine/InternalEngine.java | 25 +------- .../elasticsearch/index/shard/IndexShard.java | 23 ++++--- .../index/translog/Translog.java | 61 ++++++++----------- .../index/translog/TranslogWriter.java | 8 ++- .../resync/ResyncReplicationRequestTests.java | 4 +- .../index/engine/InternalEngineTests.java | 30 ++++----- .../RecoveryDuringReplicationTests.java | 1 - .../index/shard/IndexShardTests.java | 27 ++++---- .../index/translog/TranslogTests.java | 25 ++++---- .../indices/recovery/RecoveryTests.java | 10 +-- .../index/engine/EngineTestCase.java | 9 +-- .../index/engine/TranslogHandler.java | 6 +- .../index/shard/IndexShardTestCase.java | 4 +- 16 files changed, 99 insertions(+), 156 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index a78421a2328..15a98077eac 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -523,13 +523,12 @@ public class TransportShardBulkAction extends TransportWriteAction 0L || version == Versions.MATCH_ANY; } - - @Override - public VersionType versionTypeForReplicationAndRecovery() { - // replicas get the version from the primary after increment. The same version is stored in - // the transaction log. -> the should use the external semantics. - return EXTERNAL; - } }, EXTERNAL((byte) 1) { @Override @@ -333,14 +326,6 @@ public enum VersionType implements Writeable { */ public abstract boolean validateVersionForReads(long version); - /** - * Some version types require different semantics for primary and replicas. This version allows - * the type to override the default behavior. - */ - public VersionType versionTypeForReplicationAndRecovery() { - return this; - } - public static VersionType fromString(String versionType) { if ("internal".equals(versionType)) { return INTERNAL; diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 8a560e02fe4..53a7baa60f6 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1168,6 +1168,7 @@ public abstract class Engine implements Closeable { public Index(Term uid, ParsedDocument doc, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin, long startTime, long autoGeneratedIdTimestamp, boolean isRetry) { super(uid, seqNo, primaryTerm, version, versionType, origin, startTime); + assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin; this.doc = doc; this.isRetry = isRetry; this.autoGeneratedIdTimestamp = autoGeneratedIdTimestamp; @@ -1245,6 +1246,7 @@ public abstract class Engine implements Closeable { public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin, long startTime) { super(uid, seqNo, primaryTerm, version, versionType, origin, startTime); + assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin; this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 92c64d415ad..bdcfb2fc731 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -691,7 +691,7 @@ public class InternalEngine extends Engine { return true; case PEER_RECOVERY: case REPLICA: - assert index.version() == 1 && index.versionType() == VersionType.EXTERNAL + assert index.version() == 1 && index.versionType() == null : "version: " + index.version() + " type: " + index.versionType(); return true; case LOCAL_TRANSLOG_RECOVERY: @@ -704,20 +704,6 @@ public class InternalEngine extends Engine { return false; } - private boolean assertVersionType(final Engine.Operation operation) { - if (operation.origin() == Operation.Origin.REPLICA || - operation.origin() == Operation.Origin.PEER_RECOVERY || - operation.origin() == Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { - // ensure that replica operation has expected version type for replication - // ensure that versionTypeForReplicationAndRecovery is idempotent - assert operation.versionType() == operation.versionType().versionTypeForReplicationAndRecovery() - : "unexpected version type in request from [" + operation.origin().name() + "] " + - "found [" + operation.versionType().name() + "] " + - "expected [" + operation.versionType().versionTypeForReplicationAndRecovery().name() + "]"; - } - return true; - } - private boolean assertIncomingSequenceNumber(final Engine.Operation.Origin origin, final long seqNo) { if (origin == Operation.Origin.PRIMARY) { assert assertOriginPrimarySequenceNumber(seqNo); @@ -757,7 +743,6 @@ public class InternalEngine extends Engine { try (ReleasableLock releasableLock = readLock.acquire()) { ensureOpen(); assert assertIncomingSequenceNumber(index.origin(), index.seqNo()); - assert assertVersionType(index); try (Releasable ignored = versionMap.acquireLock(index.uid().bytes()); Releasable indexThrottle = doThrottle ? () -> {} : throttle.acquireThrottle()) { lastWriteNanos = index.startTime(); @@ -860,9 +845,6 @@ public class InternalEngine extends Engine { "max_seqno non-append-only [" + maxSeqNoOfNonAppendOnlyOperations.get() + "], seqno of index [" + index.seqNo() + "]"; } versionMap.enforceSafeAccess(); - // drop out of order operations - assert index.versionType().versionTypeForReplicationAndRecovery() == index.versionType() : - "resolving out of order delivery based on versioning but version type isn't fit for it. got [" + index.versionType() + "]"; // unlike the primary, replicas don't really care to about creation status of documents // this allows to ignore the case where a document was found in the live version maps in // a delete state and return false for the created flag in favor of code simplicity @@ -1096,7 +1078,6 @@ public class InternalEngine extends Engine { public DeleteResult delete(Delete delete) throws IOException { versionMap.enforceSafeAccess(); assert Objects.equals(delete.uid().field(), IdFieldMapper.NAME) : delete.uid().field(); - assert assertVersionType(delete); assert assertIncomingSequenceNumber(delete.origin(), delete.seqNo()); final DeleteResult deleteResult; // NOTE: we don't throttle this when merges fall behind because delete-by-id does not create new segments: @@ -1149,10 +1130,6 @@ public class InternalEngine extends Engine { private DeletionStrategy planDeletionAsNonPrimary(Delete delete) throws IOException { assert delete.origin() != Operation.Origin.PRIMARY : "planing as primary but got " + delete.origin(); - // drop out of order operations - assert delete.versionType().versionTypeForReplicationAndRecovery() == delete.versionType() : - "resolving out of order delivery based on versioning but version type isn't fit for it. got [" - + delete.versionType() + "]"; maxSeqNoOfNonAppendOnlyOperations.updateAndGet(curr -> Math.max(delete.seqNo(), curr)); assert maxSeqNoOfNonAppendOnlyOperations.get() >= delete.seqNo() : "max_seqno of non-append-only was not updated;" + "max_seqno non-append-only [" + maxSeqNoOfNonAppendOnlyOperations.get() + "], seqno of delete [" + delete.seqNo() + "]"; diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 5bd8f9abc6e..b07e22875e8 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -645,22 +645,22 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl public Engine.IndexResult applyIndexOperationOnPrimary(long version, VersionType versionType, SourceToParse sourceToParse, long autoGeneratedTimestamp, boolean isRetry) throws IOException { + assert versionType.validateVersionForWrites(version); return applyIndexOperation(SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm, version, versionType, autoGeneratedTimestamp, isRetry, Engine.Operation.Origin.PRIMARY, sourceToParse); } - public Engine.IndexResult applyIndexOperationOnReplica(long seqNo, long version, VersionType versionType, - long autoGeneratedTimeStamp, boolean isRetry, SourceToParse sourceToParse) + public Engine.IndexResult applyIndexOperationOnReplica(long seqNo, long version, long autoGeneratedTimeStamp, + boolean isRetry, SourceToParse sourceToParse) throws IOException { - return applyIndexOperation(seqNo, primaryTerm, version, versionType, autoGeneratedTimeStamp, isRetry, + return applyIndexOperation(seqNo, primaryTerm, version, null, autoGeneratedTimeStamp, isRetry, Engine.Operation.Origin.REPLICA, sourceToParse); } - private Engine.IndexResult applyIndexOperation(long seqNo, long opPrimaryTerm, long version, VersionType versionType, + private Engine.IndexResult applyIndexOperation(long seqNo, long opPrimaryTerm, long version, @Nullable VersionType versionType, long autoGeneratedTimeStamp, boolean isRetry, Engine.Operation.Origin origin, SourceToParse sourceToParse) throws IOException { assert opPrimaryTerm <= this.primaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.primaryTerm + "]"; - assert versionType.validateVersionForWrites(version); ensureWriteAllowed(origin); Engine.Index operation; try { @@ -736,19 +736,18 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl public Engine.DeleteResult applyDeleteOperationOnPrimary(long version, String type, String id, VersionType versionType) throws IOException { + assert versionType.validateVersionForWrites(version); return applyDeleteOperation(SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm, version, type, id, versionType, Engine.Operation.Origin.PRIMARY); } - public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long version, String type, String id, - VersionType versionType) throws IOException { - return applyDeleteOperation(seqNo, primaryTerm, version, type, id, versionType, Engine.Operation.Origin.REPLICA); + public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long version, String type, String id) throws IOException { + return applyDeleteOperation(seqNo, primaryTerm, version, type, id, null, Engine.Operation.Origin.REPLICA); } private Engine.DeleteResult applyDeleteOperation(long seqNo, long opPrimaryTerm, long version, String type, String id, - VersionType versionType, Engine.Operation.Origin origin) throws IOException { + @Nullable VersionType versionType, Engine.Operation.Origin origin) throws IOException { assert opPrimaryTerm <= this.primaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.primaryTerm + "]"; - assert versionType.validateVersionForWrites(version); ensureWriteAllowed(origin); // When there is a single type, the unique identifier is only composed of the _id, // so there is no way to differenciate foo#1 from bar#1. This is especially an issue @@ -1211,14 +1210,14 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl // we set canHaveDuplicates to true all the time such that we de-optimze the translog case and ensure that all // autoGeneratedID docs that are coming from the primary are updated correctly. result = applyIndexOperation(index.seqNo(), index.primaryTerm(), index.version(), - index.versionType().versionTypeForReplicationAndRecovery(), index.getAutoGeneratedIdTimestamp(), true, origin, + null, index.getAutoGeneratedIdTimestamp(), true, origin, source(shardId.getIndexName(), index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source())).routing(index.routing())); break; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; result = applyDeleteOperation(delete.seqNo(), delete.primaryTerm(), delete.version(), delete.type(), delete.id(), - delete.versionType().versionTypeForReplicationAndRecovery(), origin); + null, origin); break; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) operation; diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index 63055d933e4..31404b7874a 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -1011,7 +1011,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC public static final int FORMAT_6_0 = 8; // since 6.0.0 public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0 - public static final int SERIALIZATION_FORMAT = FORMAT_NO_PARENT; + public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1; + public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE; private final String id; private final long autoGeneratedIdTimestamp; @@ -1019,7 +1020,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC private final long seqNo; private final long primaryTerm; private final long version; - private final VersionType versionType; private final BytesReference source; private final String routing; @@ -1034,8 +1034,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC in.readOptionalString(); // _parent } this.version = in.readLong(); - this.versionType = VersionType.fromValue(in.readByte()); - assert versionType.validateVersionForWrites(this.version) : "invalid version for writes: " + this.version; + if (format < FORMAT_NO_VERSION_TYPE) { + in.readByte(); // _version_type + } this.autoGeneratedIdTimestamp = in.readLong(); seqNo = in.readLong(); primaryTerm = in.readLong(); @@ -1049,15 +1050,14 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC this.seqNo = indexResult.getSeqNo(); this.primaryTerm = index.primaryTerm(); this.version = indexResult.getVersion(); - this.versionType = index.versionType(); this.autoGeneratedIdTimestamp = index.getAutoGeneratedIdTimestamp(); } public Index(String type, String id, long seqNo, long primaryTerm, byte[] source) { - this(type, id, seqNo, primaryTerm, Versions.MATCH_ANY, VersionType.INTERNAL, source, null, -1); + this(type, id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1); } - public Index(String type, String id, long seqNo, long primaryTerm, long version, VersionType versionType, + public Index(String type, String id, long seqNo, long primaryTerm, long version, byte[] source, String routing, long autoGeneratedIdTimestamp) { this.type = type; this.id = id; @@ -1065,7 +1065,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC this.seqNo = seqNo; this.primaryTerm = primaryTerm; this.version = version; - this.versionType = versionType; this.routing = routing; this.autoGeneratedIdTimestamp = autoGeneratedIdTimestamp; } @@ -1110,24 +1109,22 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return this.version; } - public VersionType versionType() { - return versionType; - } - @Override public Source getSource() { return new Source(source, routing); } private void write(final StreamOutput out) throws IOException { - out.writeVInt(SERIALIZATION_FORMAT); + final int format = out.getVersion().onOrAfter(Version.V_7_0_0_alpha1) ? SERIALIZATION_FORMAT : FORMAT_6_0; + out.writeVInt(format); out.writeString(id); out.writeString(type); out.writeBytesReference(source); out.writeOptionalString(routing); out.writeLong(version); - - out.writeByte(versionType.getValue()); + if (format < FORMAT_NO_VERSION_TYPE) { + out.writeByte(VersionType.EXTERNAL.getValue()); + } out.writeLong(autoGeneratedIdTimestamp); out.writeLong(seqNo); out.writeLong(primaryTerm); @@ -1149,7 +1146,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC primaryTerm != index.primaryTerm || id.equals(index.id) == false || type.equals(index.type) == false || - versionType != index.versionType || autoGeneratedIdTimestamp != index.autoGeneratedIdTimestamp || source.equals(index.source) == false) { return false; @@ -1168,7 +1164,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC result = 31 * result + Long.hashCode(seqNo); result = 31 * result + Long.hashCode(primaryTerm); result = 31 * result + Long.hashCode(version); - result = 31 * result + versionType.hashCode(); result = 31 * result + source.hashCode(); result = 31 * result + (routing != null ? routing.hashCode() : 0); result = 31 * result + Long.hashCode(autoGeneratedIdTimestamp); @@ -1194,14 +1189,15 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC public static class Delete implements Operation { private static final int FORMAT_6_0 = 4; // 6.0 - * - public static final int SERIALIZATION_FORMAT = FORMAT_6_0; + public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0 + public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1; + public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE; private final String type, id; private final Term uid; private final long seqNo; private final long primaryTerm; private final long version; - private final VersionType versionType; private Delete(final StreamInput in) throws IOException { final int format = in.readVInt();// SERIALIZATION_FORMAT @@ -1210,29 +1206,29 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC id = in.readString(); uid = new Term(in.readString(), in.readBytesRef()); this.version = in.readLong(); - this.versionType = VersionType.fromValue(in.readByte()); - assert versionType.validateVersionForWrites(this.version); + if (format < FORMAT_NO_VERSION_TYPE) { + in.readByte(); // versionType + } seqNo = in.readLong(); primaryTerm = in.readLong(); } public Delete(Engine.Delete delete, Engine.DeleteResult deleteResult) { - this(delete.type(), delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion(), delete.versionType()); + this(delete.type(), delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion()); } /** utility for testing */ public Delete(String type, String id, long seqNo, long primaryTerm, Term uid) { - this(type, id, uid, seqNo, primaryTerm, Versions.MATCH_ANY, VersionType.INTERNAL); + this(type, id, uid, seqNo, primaryTerm, Versions.MATCH_ANY); } - public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, VersionType versionType) { + public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version) { this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); this.uid = uid; this.seqNo = seqNo; this.primaryTerm = primaryTerm; this.version = version; - this.versionType = versionType; } @Override @@ -1271,23 +1267,22 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return this.version; } - public VersionType versionType() { - return this.versionType; - } - @Override public Source getSource() { throw new IllegalStateException("trying to read doc source from delete operation"); } private void write(final StreamOutput out) throws IOException { - out.writeVInt(SERIALIZATION_FORMAT); + final int format = out.getVersion().onOrAfter(Version.V_7_0_0_alpha1) ? SERIALIZATION_FORMAT : FORMAT_6_0; + out.writeVInt(format); out.writeString(type); out.writeString(id); out.writeString(uid.field()); out.writeBytesRef(uid.bytes()); out.writeLong(version); - out.writeByte(versionType.getValue()); + if (format < FORMAT_NO_VERSION_TYPE) { + out.writeByte(VersionType.EXTERNAL.getValue()); + } out.writeLong(seqNo); out.writeLong(primaryTerm); } @@ -1306,8 +1301,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return version == delete.version && seqNo == delete.seqNo && primaryTerm == delete.primaryTerm && - uid.equals(delete.uid) && - versionType == delete.versionType; + uid.equals(delete.uid); } @Override @@ -1316,7 +1310,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC result = 31 * result + Long.hashCode(seqNo); result = 31 * result + Long.hashCode(primaryTerm); result = 31 * result + Long.hashCode(version); - result = 31 * result + versionType.hashCode(); return result; } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index b89b21c5258..c135facc67f 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -202,9 +202,11 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { if (previous.v1().equals(data) == false) { Translog.Operation newOp = Translog.readOperation(new BufferedChecksumStreamInput(data.streamInput())); Translog.Operation prvOp = Translog.readOperation(new BufferedChecksumStreamInput(previous.v1().streamInput())); - throw new AssertionError( - "seqNo [" + seqNo + "] was processed twice in generation [" + generation + "], with different data. " + - "prvOp [" + prvOp + "], newOp [" + newOp + "]", previous.v2()); + if (newOp.equals(prvOp) == false) { + throw new AssertionError( + "seqNo [" + seqNo + "] was processed twice in generation [" + generation + "], with different data. " + + "prvOp [" + prvOp + "], newOp [" + newOp + "]", previous.v2()); + } } } else { seenSequenceNumbers.put(seqNo, diff --git a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java index 914c2b87422..15b8e1c99d2 100644 --- a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java @@ -21,9 +21,7 @@ package org.elasticsearch.action.resync; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.index.Index; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESTestCase; @@ -38,7 +36,7 @@ public class ResyncReplicationRequestTests extends ESTestCase { public void testSerialization() throws IOException { final byte[] bytes = "{}".getBytes(Charset.forName("UTF-8")); final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(), - Versions.MATCH_ANY, VersionType.INTERNAL, bytes, null, -1); + randomNonNegativeLong(), bytes, null, -1); final ShardId shardId = new ShardId(new Index("index", "uuid"), 0); final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, new Translog.Operation[]{index}); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 2e89a66805c..87b63dfdef8 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1183,7 +1183,7 @@ public class InternalEngineTests extends EngineTestCase { assertThat(indexResult.getVersion(), equalTo(1L)); create = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), create.primaryTerm(), indexResult.getVersion(), - create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + null, REPLICA, 0, -1, false); indexResult = replicaEngine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); } @@ -1197,7 +1197,7 @@ public class InternalEngineTests extends EngineTestCase { create = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), create.primaryTerm(), indexResult.getVersion(), - create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + null, REPLICA, 0, -1, false); indexResult = replicaEngine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); assertTrue(indexResult.isCreated()); @@ -1216,7 +1216,7 @@ public class InternalEngineTests extends EngineTestCase { update = new Engine.Index(newUid(doc), doc, updateResult.getSeqNo(), update.primaryTerm(), updateResult.getVersion(), - update.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + null, REPLICA, 0, -1, false); updateResult = replicaEngine.index(update); assertThat(updateResult.getVersion(), equalTo(2L)); assertFalse(updateResult.isCreated()); @@ -1269,7 +1269,7 @@ public class InternalEngineTests extends EngineTestCase { Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); - index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), null, REPLICA, 0, -1, false); indexResult = replicaEngine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); } @@ -1418,7 +1418,7 @@ public class InternalEngineTests extends EngineTestCase { forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, - forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType, + forReplica ? null : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis(), -1, false ); @@ -1427,7 +1427,7 @@ public class InternalEngineTests extends EngineTestCase { forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, - forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType, + forReplica ? null : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis()); } @@ -3221,7 +3221,7 @@ public class InternalEngineTests extends EngineTestCase { Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); - index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); indexResult = replicaEngine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -3235,7 +3235,7 @@ public class InternalEngineTests extends EngineTestCase { assertEquals(1, topDocs.totalHits); } - index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); indexResult = replicaEngine.index(index); assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.SUCCESS)); replicaEngine.refresh("test"); @@ -3255,7 +3255,7 @@ public class InternalEngineTests extends EngineTestCase { Engine.IndexResult result = engine.index(firstIndexRequest); assertThat(result.getVersion(), equalTo(1L)); - Engine.Index firstIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), firstIndexRequest.primaryTerm(), result.getVersion(), firstIndexRequest.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + Engine.Index firstIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), firstIndexRequest.primaryTerm(), result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); Engine.IndexResult indexReplicaResult = replicaEngine.index(firstIndexRequestReplica); assertThat(indexReplicaResult.getVersion(), equalTo(1L)); @@ -3269,7 +3269,7 @@ public class InternalEngineTests extends EngineTestCase { assertEquals(1, topDocs.totalHits); } - Engine.Index secondIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), secondIndexRequest.primaryTerm(), result.getVersion(), firstIndexRequest.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + Engine.Index secondIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), secondIndexRequest.primaryTerm(), result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); replicaEngine.index(secondIndexRequestReplica); replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { @@ -3292,7 +3292,7 @@ public class InternalEngineTests extends EngineTestCase { } public Engine.Index appendOnlyReplica(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp, final long seqNo) { - return new Engine.Index(newUid(doc), doc, seqNo, 2, 1, VersionType.EXTERNAL, + return new Engine.Index(newUid(doc), doc, seqNo, 2, 1, null, Engine.Operation.Origin.REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, retry); } @@ -3694,7 +3694,7 @@ public class InternalEngineTests extends EngineTestCase { sequenceNumberSupplier.getAsLong(), 1, i, - VersionType.EXTERNAL, + origin == PRIMARY ? VersionType.EXTERNAL : null, origin, System.nanoTime(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -3708,7 +3708,7 @@ public class InternalEngineTests extends EngineTestCase { sequenceNumberSupplier.getAsLong(), 1, i, - VersionType.EXTERNAL, + origin == PRIMARY ? VersionType.EXTERNAL : null, origin, System.nanoTime()); operations.add(delete); @@ -3928,7 +3928,7 @@ public class InternalEngineTests extends EngineTestCase { final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null); final Term uid = newUid(doc); final long time = System.nanoTime(); - actualEngine.index(new Engine.Index(uid, doc, seqNo, 1, 1, VersionType.EXTERNAL, REPLICA, time, time, false)); + actualEngine.index(new Engine.Index(uid, doc, seqNo, 1, 1, null, REPLICA, time, time, false)); if (rarely()) { actualEngine.rollTranslogGeneration(); } @@ -4686,7 +4686,7 @@ public class InternalEngineTests extends EngineTestCase { for (int i = 0; i < seqNos.size(); i++) { ParsedDocument doc = testParsedDocument(Long.toString(seqNos.get(i)), null, testDocument(), new BytesArray("{}"), null); Engine.Index index = new Engine.Index(newUid(doc), doc, seqNos.get(i), 0, - 1, VersionType.EXTERNAL, REPLICA, System.nanoTime(), -1, false); + 1, null, REPLICA, System.nanoTime(), -1, false); engine.index(index); if (randomBoolean()) { engine.flush(); diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index ee97ba14fe0..f01d5e54a2e 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -186,7 +186,6 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC remainingReplica.applyIndexOperationOnReplica( remainingReplica.getLocalCheckpoint() + 1, 1, - VersionType.EXTERNAL, randomNonNegativeLong(), false, SourceToParse.source("index", "type", "replica", new BytesArray("{}"), XContentType.JSON)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index ac52378fc6b..2e07ec950a5 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -72,7 +72,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; @@ -1545,17 +1544,17 @@ public class IndexShardTests extends IndexShardTestCase { * - If flush and then recover from the existing store, delete #1 will be removed while index #0 is still retained and replayed. */ final IndexShard shard = newStartedShard(false); - shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id", VersionType.EXTERNAL); + shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id"); shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation - shard.applyIndexOperationOnReplica(0, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON)); - shard.applyIndexOperationOnReplica(3, 3, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(3, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to skip the translog gen #1 in recovery. shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); - shard.applyIndexOperationOnReplica(2, 3, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(2, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON)); - shard.applyIndexOperationOnReplica(5, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON)); final int translogOps; @@ -1646,8 +1645,7 @@ public class IndexShardTests extends IndexShardTestCase { updateMappings(otherShard, shard.indexSettings().getIndexMetaData()); SourceToParse sourceToParse = SourceToParse.source(shard.shardId().getIndexName(), "_doc", "1", new BytesArray("{}"), XContentType.JSON); - otherShard.applyIndexOperationOnReplica(1, 1, - VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + otherShard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); final ShardRouting primaryShardRouting = shard.routingEntry(); IndexShard newShard = reinitShard(otherShard, ShardRoutingHelper.initWithSameId(primaryShardRouting, @@ -1763,18 +1761,18 @@ public class IndexShardTests extends IndexShardTestCase { final IndexShard shard = newStartedShard(false); final String indexName = shard.shardId().getIndexName(); // Index #0, index #1 - shard.applyIndexOperationOnReplica(0, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); shard.updateGlobalCheckpointOnReplica(0, "test"); // stick the global checkpoint here. - shard.applyIndexOperationOnReplica(1, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1")); // Simulate resync (without rollback): Noop #1, index #2 acquireReplicaOperationPermitBlockingly(shard, shard.primaryTerm + 1); shard.markSeqNoAsNoop(1, "test"); - shard.applyIndexOperationOnReplica(2, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1", "doc-2")); @@ -2104,11 +2102,11 @@ public class IndexShardTests extends IndexShardTestCase { int numCorruptEntries = 0; for (int i = 0; i < numTotalEntries; i++) { if (randomBoolean()) { - operations.add(new Translog.Index("_doc", "1", 0, primary.getPrimaryTerm(), 1, VersionType.INTERNAL, + operations.add(new Translog.Index("_doc", "1", 0, primary.getPrimaryTerm(), 1, "{\"foo\" : \"bar\"}".getBytes(Charset.forName("UTF-8")), null, -1)); } else { // corrupt entry - operations.add(new Translog.Index("_doc", "2", 1, primary.getPrimaryTerm(), 1, VersionType.INTERNAL, + operations.add(new Translog.Index("_doc", "2", 1, primary.getPrimaryTerm(), 1, "{\"foo\" : \"bar}".getBytes(Charset.forName("UTF-8")), null, -1)); numCorruptEntries++; } @@ -2603,8 +2601,7 @@ public class IndexShardTests extends IndexShardTestCase { final String id = Integer.toString(i); SourceToParse sourceToParse = SourceToParse.source(indexShard.shardId().getIndexName(), "_doc", id, new BytesArray("{}"), XContentType.JSON); - indexShard.applyIndexOperationOnReplica(i, - 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + indexShard.applyIndexOperationOnReplica(i, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); if (!gap && i == localCheckpoint + 1) { localCheckpoint++; } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index dbbb38090bc..b255238c864 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -416,9 +416,9 @@ public class TranslogTests extends ESTestCase { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(1)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(163L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(162L)); assertThat(stats.getUncommittedOperations(), equalTo(1)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(163L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(162L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } @@ -426,9 +426,9 @@ public class TranslogTests extends ESTestCase { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(2)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(212L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(210L)); assertThat(stats.getUncommittedOperations(), equalTo(2)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(212L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(210L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } @@ -436,9 +436,9 @@ public class TranslogTests extends ESTestCase { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(3)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(261L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(258L)); assertThat(stats.getUncommittedOperations(), equalTo(3)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(261L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(258L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } @@ -446,13 +446,13 @@ public class TranslogTests extends ESTestCase { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(303L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(300L)); assertThat(stats.getUncommittedOperations(), equalTo(4)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(303L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(300L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } - final long expectedSizeInBytes = 358L; + final long expectedSizeInBytes = 355L; translog.rollGeneration(); { final TranslogStats stats = stats(); @@ -725,14 +725,12 @@ public class TranslogTests extends ESTestCase { assertEquals(expIndexOp.type(), indexOp.type()); assertEquals(expIndexOp.source(), indexOp.source()); assertEquals(expIndexOp.version(), indexOp.version()); - assertEquals(expIndexOp.versionType(), indexOp.versionType()); break; case DELETE: Translog.Delete delOp = (Translog.Delete) op; Translog.Delete expDelOp = (Translog.Delete) expectedOp; assertEquals(expDelOp.uid(), delOp.uid()); assertEquals(expDelOp.version(), delOp.version()); - assertEquals(expDelOp.versionType(), delOp.versionType()); break; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) op; @@ -1478,7 +1476,7 @@ public class TranslogTests extends ESTestCase { try (Translog ignored = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get)) { fail("corrupted"); } catch (IllegalStateException ex) { - assertEquals("Checkpoint file translog-3.ckp already exists but has corrupted content expected: Checkpoint{offset=3080, " + + assertEquals("Checkpoint file translog-3.ckp already exists but has corrupted content expected: Checkpoint{offset=3025, " + "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1, trimmedAboveSeqNo=-2} but got: Checkpoint{offset=0, numOps=0, " + "generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, minTranslogGeneration=0, trimmedAboveSeqNo=-2}", ex.getMessage()); } @@ -1842,8 +1840,7 @@ public class TranslogTests extends ESTestCase { new Term("_uid", threadId + "_" + opCount), seqNoGenerator.getAndIncrement(), primaryTerm.get(), - 1 + randomInt(100000), - randomFrom(VersionType.values())); + 1 + randomInt(100000)); break; case NO_OP: op = new Translog.NoOp(seqNoGenerator.getAndIncrement(), primaryTerm.get(), randomAlphaOfLength(16)); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index 537409f35d1..e7606328c76 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -122,22 +122,22 @@ public class RecoveryTests extends ESIndexLevelReplicationTestCase { final String indexName = orgReplica.shardId().getIndexName(); // delete #1 - orgReplica.applyDeleteOperationOnReplica(1, 2, "type", "id", VersionType.EXTERNAL); + orgReplica.applyDeleteOperationOnReplica(1, 2, "type", "id"); getTranslog(orgReplica).rollGeneration(); // isolate the delete in it's own generation // index #0 - orgReplica.applyIndexOperationOnReplica(0, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON)); // index #3 - orgReplica.applyIndexOperationOnReplica(3, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(3, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to delete the translog gen #1. orgReplica.flush(new FlushRequest().force(true).waitIfOngoing(true)); // index #2 - orgReplica.applyIndexOperationOnReplica(2, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON)); orgReplica.updateGlobalCheckpointOnReplica(3L, "test"); // index #5 -> force NoOp #4. - orgReplica.applyIndexOperationOnReplica(5, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON)); final int translogOps; diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index a23e29b0bcd..f2652224549 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -52,7 +52,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.Mapping; @@ -493,14 +492,12 @@ public abstract class EngineTestCase extends ESTestCase { protected Engine.Index replicaIndexForDoc(ParsedDocument doc, long version, long seqNo, boolean isRetry) { - return new Engine.Index(newUid(doc), doc, seqNo, primaryTerm.get(), version, VersionType.EXTERNAL, - Engine.Operation.Origin.REPLICA, System.nanoTime(), - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, isRetry); + return new Engine.Index(newUid(doc), doc, seqNo, primaryTerm.get(), version, null, Engine.Operation.Origin.REPLICA, + System.nanoTime(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, isRetry); } protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) { - return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, VersionType.EXTERNAL, - Engine.Operation.Origin.REPLICA, startTime); + return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, null, Engine.Operation.Origin.REPLICA, startTime); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java index 53fe89ac17e..9999a3b3748 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java @@ -124,14 +124,12 @@ public class TranslogHandler implements EngineConfig.TranslogRecoveryRunner { source(indexName, index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source())) .routing(index.routing()), index.seqNo(), index.primaryTerm(), - index.version(), index.versionType().versionTypeForReplicationAndRecovery(), origin, - index.getAutoGeneratedIdTimestamp(), true); + index.version(), null, origin, index.getAutoGeneratedIdTimestamp(), true); return engineIndex; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; final Engine.Delete engineDelete = new Engine.Delete(delete.type(), delete.id(), delete.uid(), delete.seqNo(), - delete.primaryTerm(), delete.version(), delete.versionType().versionTypeForReplicationAndRecovery(), - origin, System.nanoTime()); + delete.primaryTerm(), delete.version(), null, origin, System.nanoTime()); return engineDelete; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) operation; diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 5a8e91841c5..e4849be20e1 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -573,7 +573,7 @@ public abstract class IndexShardTestCase extends ESTestCase { shard.getLocalCheckpoint()); } else { result = shard.applyIndexOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0, - VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { throw new TransportReplicationAction.RetryOnReplicaException(shard.shardId, "Mappings are not available on the replica yet, triggered update: " + result.getRequiredMappingUpdate()); @@ -591,7 +591,7 @@ public abstract class IndexShardTestCase extends ESTestCase { if (shard.routingEntry().primary()) { return shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, type, id, VersionType.INTERNAL); } else { - return shard.applyDeleteOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0L, type, id, VersionType.EXTERNAL); + return shard.applyDeleteOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0L, type, id); } } From 93d7468f3a49fba26a4176476b8f219e86ecdef0 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 17 Jul 2018 18:06:16 +0200 Subject: [PATCH 075/260] ESIndexLevelReplicationTestCase doesn't support replicated failures but it's good to know what they are Sometimes we have a test failure that hits an `UnsupportedOperationException` in this infrastructure. When debugging you want to know what caused this unexpected failure, but right now we're silent about it. This commit adds some information to the `UnsupportedOperationException` Relates to #32127 --- .../index/replication/ESIndexLevelReplicationTestCase.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index ab18e359458..9de88216822 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -477,7 +477,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase @Override public void failShard(String message, Exception exception) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("failing a primary isn't supported. failure: " + message, exception); } @Override @@ -550,13 +550,13 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase public void failShardIfNeeded(ShardRouting replica, String message, Exception exception, Runnable onSuccess, Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("failing shard " + replica + " isn't supported. failure: " + message, exception); } @Override public void markShardCopyAsStaleIfNeeded(ShardId shardId, String allocationId, Runnable onSuccess, Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("can't mark " + shardId + ", aid [" + allocationId + "] as stale"); } } From 487cfc3b3f9df22f46e112461429578732844903 Mon Sep 17 00:00:00 2001 From: Toby McLaughlin Date: Wed, 18 Jul 2018 17:07:31 +1000 Subject: [PATCH 076/260] [DOCS] Update TLS on Docker for 6.3 (#32114) Remove references to the `platinum` image and add a self-generated trial licence to the example for TLS on Docker. Fixes elastic/elasticsearch-docker#176 --- .../configuring-tls-docker.asciidoc | 29 ++++++++++++++----- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc b/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc index d93d4e523d9..49913382482 100644 --- a/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc +++ b/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc @@ -1,12 +1,13 @@ [role="xpack"] [[configuring-tls-docker]] -=== Encrypting Communications in an {es} Docker Image +=== Encrypting Communications in an {es} Docker Container Starting with version 6.0.0, {security} (Gold, Platinum or Enterprise subscriptions) https://www.elastic.co/guide/en/elasticsearch/reference/6.0/breaking-6.0.0-xes.html[requires SSL/TLS] encryption for the transport networking layer. This section demonstrates an easy path to get started with SSL/TLS for both -HTTPS and transport using the `elasticsearch-platinum` docker image. +HTTPS and transport using the {es} Docker image. The example uses +Docker Compose to manage the containers. For further details, please refer to {xpack-ref}/encrypting-communications.html[Encrypting Communications] and @@ -17,7 +18,7 @@ https://www.elastic.co/subscriptions[available subscriptions]. <>. -Inside a new, empty, directory create the following **four files**: +Inside a new, empty directory, create the following four files: `instances.yml`: ["source","yaml"] @@ -29,6 +30,7 @@ instances: - localhost ip: - 127.0.0.1 + - name: es02 dns: - es02 @@ -60,6 +62,7 @@ ifeval::["{release-state}"!="unreleased"] ["source","yaml",subs="attributes"] ---- version: '2.2' + services: create_certs: container_name: create_certs @@ -96,6 +99,7 @@ ifeval::["{release-state}"!="unreleased"] ["source","yaml",subs="attributes"] ---- version: '2.2' + services: es01: container_name: es01 @@ -105,9 +109,11 @@ services: - discovery.zen.minimum_master_nodes=2 - ELASTIC_PASSWORD=$ELASTIC_PASSWORD <1> - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + - xpack.license.self_generated.type=trial <2> + - xpack.security.enabled=true - xpack.security.http.ssl.enabled=true - xpack.security.transport.ssl.enabled=true - - xpack.security.transport.ssl.verification_mode=certificate <2> + - xpack.security.transport.ssl.verification_mode=certificate <3> - xpack.ssl.certificate_authorities=$CERTS_DIR/ca/ca.crt - xpack.ssl.certificate=$CERTS_DIR/es01/es01.crt - xpack.ssl.key=$CERTS_DIR/es01/es01.key @@ -119,15 +125,18 @@ services: interval: 30s timeout: 10s retries: 5 + es02: container_name: es02 - image: docker.elastic.co/elasticsearch/elasticsearch-platinum:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:{version} environment: - node.name=es02 - discovery.zen.minimum_master_nodes=2 - ELASTIC_PASSWORD=$ELASTIC_PASSWORD - discovery.zen.ping.unicast.hosts=es01 - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + - xpack.license.self_generated.type=trial + - xpack.security.enabled=true - xpack.security.http.ssl.enabled=true - xpack.security.transport.ssl.enabled=true - xpack.security.transport.ssl.verification_mode=certificate @@ -135,16 +144,20 @@ services: - xpack.ssl.certificate=$CERTS_DIR/es02/es02.crt - xpack.ssl.key=$CERTS_DIR/es02/es02.key volumes: ['esdata_02:/usr/share/elasticsearch/data', './certs:$CERTS_DIR'] + wait_until_ready: - image: docker.elastic.co/elasticsearch/elasticsearch-platinum:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:{version} command: /usr/bin/true depends_on: {"es01": {"condition": "service_healthy"}} + volumes: {"esdata_01": {"driver": "local"}, "esdata_02": {"driver": "local"}} ---- -<1> Bootstrap `elastic` with the password defined in `.env`. See +<1> Bootstrap `elastic` with the password defined in `.env`. See {stack-ov}/built-in-users.html#bootstrap-elastic-passwords[the Elastic Bootstrap Password]. -<2> Disable verification of authenticity for inter-node communication. Allows +<2> Automatically generate and apply a trial subscription, in order to enable +{security}. +<3> Disable verification of authenticity for inter-node communication. Allows creating self-signed certificates without having to pin specific internal IP addresses. endif::[] From 15f95a9f934d7ff4a4c3ab9e70a8877cf046e9ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 18 Jul 2018 09:12:28 +0200 Subject: [PATCH 077/260] Fix `range` queries on `_type` field for singe type indices (#31756) With the introduction of single types in 6.x, the `_type` field is no longer indexed, which leads to certain queries that were working before throw errors now. One such query is the `range` query, that, if performed on a single typer index, currently throws an IAE since the field is not indexed. This change adds special treatment for this case in the TypeFieldMapper, comparing the range queries lower and upper bound to the one existing type and either returns a MatchAllDocs or a MatchNoDocs query. Relates to #31632 Closes #31476 --- .../index/mapper/TypeFieldMapper.java | 27 ++++++++++++++ .../search/query/SearchQueryIT.java | 36 +++++++++++++++++++ 2 files changed, 63 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java index ffb548fd0f1..71bd2e93d30 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java @@ -35,6 +35,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -90,6 +92,8 @@ public class TypeFieldMapper extends MetadataFieldMapper { static final class TypeFieldType extends StringFieldType { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(ESLoggerFactory.getLogger(TypeFieldType.class)); + TypeFieldType() { } @@ -154,6 +158,29 @@ public class TypeFieldMapper extends MetadataFieldMapper { } } + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("range_single_type", + "Running [range] query on [_type] field for an index with a single type. As types are deprecated, this functionality will be removed in future releases."); + Query result = new MatchAllDocsQuery(); + String type = context.getMapperService().documentMapper().type(); + if (type != null) { + BytesRef typeBytes = new BytesRef(type); + if (lowerTerm != null) { + int comp = indexedValueForSearch(lowerTerm).compareTo(typeBytes); + if (comp > 0 || (comp == 0 && includeLower == false)) { + result = new MatchNoDocsQuery("[_type] was lexicographically smaller than lower bound of range"); + } + } + if (upperTerm != null) { + int comp = indexedValueForSearch(upperTerm).compareTo(typeBytes); + if (comp < 0 || (comp == 0 && includeUpper == false)) { + result = new MatchNoDocsQuery("[_type] was lexicographically greater than upper bound of range"); + } + } + } + return result; + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index be71867edd2..1694f86c53e 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1822,4 +1822,40 @@ public class SearchQueryIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get(); assertHitCount(searchResponse, 1); } + + public void testRangeQueryTypeField_31476() throws Exception { + assertAcked(prepareCreate("test").addMapping("foo", "field", "type=keyword")); + + client().prepareIndex("test", "foo", "1").setSource("field", "value").get(); + refresh(); + + RangeQueryBuilder range = new RangeQueryBuilder("_type").from("ape").to("zebra"); + SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 1); + + range = new RangeQueryBuilder("_type").from("monkey").to("zebra"); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("ape").to("donkey"); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(false); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(true); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 1); + + range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(false); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(true); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 1); + } + } From 69cbdc47eb97544dfde51a042860fca110c6a20c Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Wed, 18 Jul 2018 08:23:59 +0000 Subject: [PATCH 078/260] Fix CP for namingConventions when gradle home has spaces (#31914) * Fix CP for namingConventions when gradle home has spaces Closes #31736. Probably not Windows specific, just not common to have spaces on Linux. --- .../precommit/NamingConventionsTask.java | 35 +++++++++++-------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java index cfbb75456bc..297586e9ac6 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java @@ -16,6 +16,8 @@ import org.gradle.api.tasks.SourceSetContainer; import java.io.File; import java.io.FileWriter; import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; import java.util.Objects; /** @@ -30,16 +32,25 @@ public class NamingConventionsTask extends LoggedExec { final Project project = getProject(); SourceSetContainer sourceSets = getJavaSourceSets(); - final FileCollection classpath = project.files( - // This works because the class only depends on one class from junit that will be available from the - // tests compile classpath. It's the most straight forward way of telling Java where to find the main - // class. - NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation().getPath(), - // the tests to be loaded - checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : project.files(), - sourceSets.getByName("test").getCompileClasspath(), - sourceSets.getByName("test").getOutput() - ); + final FileCollection classpath; + try { + URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation(); + if (location.getProtocol().equals("file") == false) { + throw new GradleException("Unexpected location for NamingConventionCheck class: "+ location); + } + classpath = project.files( + // This works because the class only depends on one class from junit that will be available from the + // tests compile classpath. It's the most straight forward way of telling Java where to find the main + // class. + location.toURI().getPath(), + // the tests to be loaded + checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : project.files(), + sourceSets.getByName("test").getCompileClasspath(), + sourceSets.getByName("test").getOutput() + ); + } catch (URISyntaxException e) { + throw new AssertionError(e); + } dependsOn(project.getTasks().matching(it -> "testCompileClasspath".equals(it.getName()))); getInputs().files(classpath); @@ -111,10 +122,6 @@ public class NamingConventionsTask extends LoggedExec { this.successMarker = successMarker; } - public boolean getSkipIntegTestInDisguise() { - return skipIntegTestInDisguise; - } - public boolean isSkipIntegTestInDisguise() { return skipIntegTestInDisguise; } From ef5e8d8d8a82afcc9d9f49a4c8a868d559ffef93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 18 Jul 2018 10:34:42 +0200 Subject: [PATCH 079/260] Fix Java 11 javadoc compile problem Java 11 complains with a "type arguments not allowed here" error when types are used in javadoc links it seems. Simply removing it. --- .../java/org/elasticsearch/painless/PainlessDocGenerator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index 4486a52ccb1..cc596dcc395 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -328,7 +328,7 @@ public class PainlessDocGenerator { } /** - * Pick the javadoc root for a {@link Class}. + * Pick the javadoc root for a {@link Class}. */ private static String javadocRoot(Class clazz) { String classPackage = clazz.getPackage().getName(); From 5856c396ddd9e36094c5471d436be100bb586e60 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 18 Jul 2018 11:30:44 +0200 Subject: [PATCH 080/260] A replica can be promoted and started in one cluster state update (#32042) When a replica is fully recovered (i.e., in `POST_RECOVERY` state) we send a request to the master to start the shard. The master changes the state of the replica and publishes a cluster state to that effect. In certain cases, that cluster state can be processed on the node hosting the replica *together* with a cluster state that promotes that, now started, replica to a primary. This can happen due to cluster state batched processing or if the master died after having committed the cluster state that starts the shard but before publishing it to the node with the replica. If the master also held the primary shard, the new master node will remove the primary (as it failed) and will also immediately promote the replica (thinking it is started). Sadly our code in IndexShard didn't allow for this which caused [assertions](https://github.com/elastic/elasticsearch/blob/13917162ad5c59a96ccb4d6a81a5044546c45c22/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java#L482) to be tripped in some of our tests runs. --- .../elasticsearch/index/shard/IndexShard.java | 19 ++-- .../IndexLevelReplicationTests.java | 2 +- .../index/shard/IndexShardTests.java | 67 +++++--------- ...actIndicesClusterStateServiceTestCase.java | 8 ++ .../PeerRecoveryTargetServiceTests.java | 2 +- .../indices/recovery/RecoveryTests.java | 2 +- .../ESIndexLevelReplicationTestCase.java | 9 +- .../index/shard/IndexShardTestCase.java | 90 +++++++++++++++---- 8 files changed, 126 insertions(+), 73 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index b07e22875e8..fc08438a7d9 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -413,10 +413,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl if (state == IndexShardState.POST_RECOVERY && newRouting.active()) { assert currentRouting.active() == false : "we are in POST_RECOVERY, but our shard routing is active " + currentRouting; - - if (newRouting.primary() && currentRouting.isRelocationTarget() == false) { - replicationTracker.activatePrimaryMode(getLocalCheckpoint()); - } + assert currentRouting.isRelocationTarget() == false || currentRouting.primary() == false || + replicationTracker.isPrimaryMode() : + "a primary relocation is completed by the master, but primary mode is not active " + currentRouting; changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]"); } else if (currentRouting.primary() && currentRouting.relocating() && replicationTracker.isPrimaryMode() == false && @@ -432,7 +431,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl final CountDownLatch shardStateUpdated = new CountDownLatch(1); if (newRouting.primary()) { - if (newPrimaryTerm != primaryTerm) { + if (newPrimaryTerm == primaryTerm) { + if (currentRouting.initializing() && currentRouting.isRelocationTarget() == false && newRouting.active()) { + // the master started a recovering primary, activate primary mode. + replicationTracker.activatePrimaryMode(getLocalCheckpoint()); + } + } else { assert currentRouting.primary() == false : "term is only increased as part of primary promotion"; /* Note that due to cluster state batching an initializing primary shard term can failed and re-assigned * in one state causing it's term to be incremented. Note that if both current shard state and new @@ -521,6 +525,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } // set this last, once we finished updating all internal state. this.shardRouting = newRouting; + + assert this.shardRouting.primary() == false || + this.shardRouting.started() == false || // note that we use started and not active to avoid relocating shards + this.replicationTracker.isPrimaryMode() + : "an started primary must be in primary mode " + this.shardRouting; shardStateUpdated.countDown(); } if (currentRouting != null && currentRouting.active() == false && newRouting.active()) { diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 018548be962..b05b1e5cc5c 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -363,7 +363,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase logger.info("--> Promote replica2 as the primary"); shards.promoteReplicaToPrimary(replica2); logger.info("--> Recover replica3 from replica2"); - recoverReplica(replica3, replica2); + recoverReplica(replica3, replica2, true); try (Translog.Snapshot snapshot = getTranslog(replica3).newSnapshot()) { assertThat(snapshot.totalOperations(), equalTo(initDocs + 1)); assertThat(snapshot.next(), equalTo(op2)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 2e07ec950a5..15e6151457f 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -226,6 +226,7 @@ public class IndexShardTests extends IndexShardTestCase { } public void testFailShard() throws Exception { + allowShardFailures(); IndexShard shard = newStartedShard(); final ShardPath shardPath = shard.shardPath(); assertNotNull(shardPath); @@ -309,7 +310,8 @@ public class IndexShardTests extends IndexShardTestCase { } public void testPrimaryPromotionDelaysOperations() throws IOException, BrokenBarrierException, InterruptedException { - final IndexShard indexShard = newStartedShard(false); + final IndexShard indexShard = newShard(false); + recoveryEmptyReplica(indexShard, randomBoolean()); final int operations = scaledRandomIntBetween(1, 64); final CyclicBarrier barrier = new CyclicBarrier(1 + operations); @@ -353,20 +355,10 @@ public class IndexShardTests extends IndexShardTestCase { barrier.await(); latch.await(); - // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); - final ShardRouting primaryRouting = - newShardRouting( - replicaRouting.shardId(), - replicaRouting.currentNodeId(), - null, - true, - ShardRoutingState.STARTED, - replicaRouting.allocationId()); - indexShard.updateShardState(primaryRouting, indexShard.getPrimaryTerm() + 1, (shard, listener) -> {}, - 0L, Collections.singleton(primaryRouting.allocationId().getId()), - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(), - Collections.emptySet()); + promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); + final int delayedOperations = scaledRandomIntBetween(1, 64); final CyclicBarrier delayedOperationsBarrier = new CyclicBarrier(1 + delayedOperations); @@ -428,8 +420,9 @@ public class IndexShardTests extends IndexShardTestCase { * 1) Internal state (ala ReplicationTracker) have been updated * 2) Primary term is set to the new term */ - public void testPublishingOrderOnPromotion() throws IOException, BrokenBarrierException, InterruptedException { - final IndexShard indexShard = newStartedShard(false); + public void testPublishingOrderOnPromotion() throws IOException, InterruptedException, BrokenBarrierException { + final IndexShard indexShard = newShard(false); + recoveryEmptyReplica(indexShard, randomBoolean()); final long promotedTerm = indexShard.getPrimaryTerm() + 1; final CyclicBarrier barrier = new CyclicBarrier(2); final AtomicBoolean stop = new AtomicBoolean(); @@ -448,18 +441,10 @@ public class IndexShardTests extends IndexShardTestCase { }); thread.start(); - final ShardRouting replicaRouting = indexShard.routingEntry(); - final ShardRouting primaryRouting = newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), null, true, - ShardRoutingState.STARTED, replicaRouting.allocationId()); - - - final Set inSyncAllocationIds = Collections.singleton(primaryRouting.allocationId().getId()); - final IndexShardRoutingTable routingTable = - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(); barrier.await(); - // promote the replica - indexShard.updateShardState(primaryRouting, promotedTerm, (shard, listener) -> {}, 0L, inSyncAllocationIds, routingTable, - Collections.emptySet()); + final ShardRouting replicaRouting = indexShard.routingEntry(); + promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); stop.set(true); thread.join(); @@ -468,7 +453,8 @@ public class IndexShardTests extends IndexShardTestCase { public void testPrimaryFillsSeqNoGapsOnPromotion() throws Exception { - final IndexShard indexShard = newStartedShard(false); + final IndexShard indexShard = newShard(false); + recoveryEmptyReplica(indexShard, randomBoolean()); // most of the time this is large enough that most of the time there will be at least one gap final int operations = 1024 - scaledRandomIntBetween(0, 1024); @@ -479,17 +465,8 @@ public class IndexShardTests extends IndexShardTestCase { // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); - final ShardRouting primaryRouting = - newShardRouting( - replicaRouting.shardId(), - replicaRouting.currentNodeId(), - null, - true, - ShardRoutingState.STARTED, - replicaRouting.allocationId()); - indexShard.updateShardState(primaryRouting, indexShard.getPrimaryTerm() + 1, (shard, listener) -> {}, - 0L, Collections.singleton(primaryRouting.allocationId().getId()), - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(), Collections.emptySet()); + promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); /* * This operation completing means that the delay operation executed as part of increasing the primary term has completed and the @@ -506,7 +483,7 @@ public class IndexShardTests extends IndexShardTestCase { @Override public void onFailure(Exception e) { - throw new RuntimeException(e); + throw new AssertionError(e); } }, ThreadPool.Names.GENERIC, ""); @@ -846,7 +823,7 @@ public class IndexShardTests extends IndexShardTestCase { // add a replica recoverShardFromStore(primaryShard); final IndexShard replicaShard = newShard(shardId, false); - recoverReplica(replicaShard, primaryShard); + recoverReplica(replicaShard, primaryShard, true); final int maxSeqNo = randomIntBetween(0, 128); for (int i = 0; i <= maxSeqNo; i++) { EngineTestCase.generateNewSeqNo(primaryShard.getEngine()); @@ -1625,7 +1602,7 @@ public class IndexShardTests extends IndexShardTestCase { IndexShardTestCase.updateRoutingEntry(primarySource, primarySource.routingEntry().relocate(randomAlphaOfLength(10), -1)); final IndexShard primaryTarget = newShard(primarySource.routingEntry().getTargetRelocatingShard()); updateMappings(primaryTarget, primarySource.indexSettings().getIndexMetaData()); - recoverReplica(primaryTarget, primarySource); + recoverReplica(primaryTarget, primarySource, true); // check that local checkpoint of new primary is properly tracked after primary relocation assertThat(primaryTarget.getLocalCheckpoint(), equalTo(totalOps - 1L)); @@ -2082,7 +2059,7 @@ public class IndexShardTests extends IndexShardTestCase { assertFalse(replica.isSyncNeeded()); return localCheckpoint; } - }, true); + }, true, true); closeShards(primary, replica); } @@ -2189,7 +2166,7 @@ public class IndexShardTests extends IndexShardTestCase { assertTrue(replica.isActive()); return localCheckpoint; } - }, false); + }, false, true); closeShards(primary, replica); } @@ -2241,7 +2218,7 @@ public class IndexShardTests extends IndexShardTestCase { super.finalizeRecovery(globalCheckpoint); assertListenerCalled.accept(replica); } - }, false); + }, false, true); closeShards(primary, replica); } diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index 35bbc497838..5c6b000f7e5 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -357,6 +357,14 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC assertTrue("and active shard must stay active, current: " + this.shardRouting + ", got: " + shardRouting, shardRouting.active()); } + if (this.shardRouting.primary()) { + assertTrue("a primary shard can't be demoted", shardRouting.primary()); + } else if (shardRouting.primary()) { + // note: it's ok for a replica in post recovery to be started and promoted at once + // this can happen when the primary failed after we sent the start shard message + assertTrue("a replica can only be promoted when active. current: " + this.shardRouting + " new: " + shardRouting, + shardRouting.active()); + } this.shardRouting = shardRouting; if (shardRouting.primary()) { term = newPrimaryTerm; diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java index 3b50fa64915..4b1419375e6 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java @@ -43,7 +43,7 @@ public class PeerRecoveryTargetServiceTests extends IndexShardTestCase { try { // Empty store { - recoveryEmptyReplica(replica); + recoveryEmptyReplica(replica, true); final RecoveryTarget recoveryTarget = new RecoveryTarget(replica, null, null, null); assertThat(PeerRecoveryTargetService.getStartingSeqNo(logger, recoveryTarget), equalTo(0L)); recoveryTarget.decRef(); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index e7606328c76..aaba17c3151 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -261,7 +261,7 @@ public class RecoveryTests extends ESIndexLevelReplicationTestCase { } IndexShard replicaShard = newShard(primaryShard.shardId(), false); updateMappings(replicaShard, primaryShard.indexSettings().getIndexMetaData()); - recoverReplica(replicaShard, primaryShard); + recoverReplica(replicaShard, primaryShard, true); List commits = DirectoryReader.listCommits(replicaShard.store().directory()); long maxSeqNo = Long.parseLong(commits.get(0).getUserData().get(SequenceNumbers.MAX_SEQ_NO)); assertThat(maxSeqNo, lessThanOrEqualTo(globalCheckpoint)); diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 9de88216822..5a5ee12065c 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -265,7 +265,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase RecoverySource.PeerRecoverySource.INSTANCE); final IndexShard newReplica = - newShard(shardRouting, shardPath, indexMetaData, null, getEngineFactory(shardRouting), () -> {}, EMPTY_EVENT_LISTENER); + newShard(shardRouting, shardPath, indexMetaData, null, getEngineFactory(shardRouting), () -> {}, EMPTY_EVENT_LISTENER); replicas.add(newReplica); updateAllocationIDsOnPrimary(); return newReplica; @@ -341,8 +341,11 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase IndexShard replica, BiFunction targetSupplier, boolean markAsRecovering) throws IOException { - ESIndexLevelReplicationTestCase.this.recoverReplica(replica, primary, targetSupplier, markAsRecovering, activeIds(), - routingTable(Function.identity())); + final IndexShardRoutingTable routingTable = routingTable(Function.identity()); + final Set inSyncIds = activeIds(); + ESIndexLevelReplicationTestCase.this.recoverUnstartedReplica(replica, primary, targetSupplier, markAsRecovering, inSyncIds, + routingTable); + ESIndexLevelReplicationTestCase.this.startReplicaAfterRecovery(replica, primary, inSyncIds, routingTable); } public synchronized DiscoveryNode getPrimaryNode() { diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index e4849be20e1..0cbc6e44502 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -92,8 +92,10 @@ import java.util.EnumSet; import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; +import java.util.function.Consumer; import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.hamcrest.Matchers.contains; @@ -108,6 +110,14 @@ public abstract class IndexShardTestCase extends ESTestCase { public static final IndexEventListener EMPTY_EVENT_LISTENER = new IndexEventListener() {}; + private static final AtomicBoolean failOnShardFailures = new AtomicBoolean(true); + + private static final Consumer DEFAULT_SHARD_FAILURE_HANDLER = failure -> { + if (failOnShardFailures.get()) { + throw new AssertionError(failure.reason, failure.cause); + } + }; + protected static final PeerRecoveryTargetService.RecoveryListener recoveryListener = new PeerRecoveryTargetService.RecoveryListener() { @Override public void onRecoveryDone(RecoveryState state) { @@ -128,6 +138,7 @@ public abstract class IndexShardTestCase extends ESTestCase { super.setUp(); threadPool = new TestThreadPool(getClass().getName(), threadPoolSettings()); primaryTerm = randomIntBetween(1, 100); // use random but fixed term for creating shards + failOnShardFailures.set(true); } @Override @@ -139,6 +150,15 @@ public abstract class IndexShardTestCase extends ESTestCase { } } + /** + * by default, tests will fail if any shard created by this class fails. Tests that cause failures by design + * can call this method to ignore those failures + * + */ + protected void allowShardFailures() { + failOnShardFailures.set(false); + } + public Settings threadPoolSettings() { return Settings.EMPTY; } @@ -270,7 +290,7 @@ public abstract class IndexShardTestCase extends ESTestCase { /** * creates a new initializing shard. - * @param routing shard routing to use + * @param routing shard routing to use * @param shardPath path to use for shard data * @param indexMetaData indexMetaData for the shard, including any mapping * @param indexSearcherWrapper an optional wrapper to be used during searchers @@ -302,6 +322,7 @@ public abstract class IndexShardTestCase extends ESTestCase { engineFactory, indexEventListener, indexSearcherWrapper, threadPool, BigArrays.NON_RECYCLING_INSTANCE, warmer, Collections.emptyList(), Arrays.asList(listeners), globalCheckpointSyncer, breakerService); + indexShard.addShardFailureCallback(DEFAULT_SHARD_FAILURE_HANDLER); success = true; } finally { if (success == false) { @@ -358,7 +379,7 @@ public abstract class IndexShardTestCase extends ESTestCase { if (primary) { recoverShardFromStore(shard); } else { - recoveryEmptyReplica(shard); + recoveryEmptyReplica(shard, true); } return shard; } @@ -399,11 +420,11 @@ public abstract class IndexShardTestCase extends ESTestCase { inSyncIds, newRoutingTable, Collections.emptySet()); } - protected void recoveryEmptyReplica(IndexShard replica) throws IOException { + protected void recoveryEmptyReplica(IndexShard replica, boolean startReplica) throws IOException { IndexShard primary = null; try { primary = newStartedShard(true); - recoverReplica(replica, primary); + recoverReplica(replica, primary, startReplica); } finally { closeShards(primary); } @@ -415,42 +436,48 @@ public abstract class IndexShardTestCase extends ESTestCase { } /** recovers a replica from the given primary **/ - protected void recoverReplica(IndexShard replica, IndexShard primary) throws IOException { + protected void recoverReplica(IndexShard replica, IndexShard primary, boolean startReplica) throws IOException { recoverReplica(replica, primary, (r, sourceNode) -> new RecoveryTarget(r, sourceNode, recoveryListener, version -> { }), - true); + true, true); } /** recovers a replica from the given primary **/ protected void recoverReplica(final IndexShard replica, final IndexShard primary, final BiFunction targetSupplier, - final boolean markAsRecovering) throws IOException { + final boolean markAsRecovering, final boolean markAsStarted) throws IOException { IndexShardRoutingTable.Builder newRoutingTable = new IndexShardRoutingTable.Builder(replica.shardId()); newRoutingTable.addShard(primary.routingEntry()); if (replica.routingEntry().isRelocationTarget() == false) { newRoutingTable.addShard(replica.routingEntry()); } - recoverReplica(replica, primary, targetSupplier, markAsRecovering, - Collections.singleton(primary.routingEntry().allocationId().getId()), - newRoutingTable.build()); + final Set inSyncIds = Collections.singleton(primary.routingEntry().allocationId().getId()); + final IndexShardRoutingTable routingTable = newRoutingTable.build(); + recoverUnstartedReplica(replica, primary, targetSupplier, markAsRecovering, inSyncIds, routingTable); + if (markAsStarted) { + startReplicaAfterRecovery(replica, primary, inSyncIds, routingTable); + } } /** * Recovers a replica from the give primary, allow the user to supply a custom recovery target. A typical usage of a custom recovery * target is to assert things in the various stages of recovery. + * + * Note: this method keeps the shard in {@link IndexShardState#POST_RECOVERY} and doesn't start it. + * * @param replica the recovery target shard * @param primary the recovery source shard * @param targetSupplier supplies an instance of {@link RecoveryTarget} * @param markAsRecovering set to {@code false} if the replica is marked as recovering */ - protected final void recoverReplica(final IndexShard replica, - final IndexShard primary, - final BiFunction targetSupplier, - final boolean markAsRecovering, - final Set inSyncIds, - final IndexShardRoutingTable routingTable) throws IOException { + protected final void recoverUnstartedReplica(final IndexShard replica, + final IndexShard primary, + final BiFunction targetSupplier, + final boolean markAsRecovering, + final Set inSyncIds, + final IndexShardRoutingTable routingTable) throws IOException { final DiscoveryNode pNode = getFakeDiscoNode(primary.routingEntry().currentNodeId()); final DiscoveryNode rNode = getFakeDiscoNode(replica.routingEntry().currentNodeId()); if (markAsRecovering) { @@ -478,11 +505,15 @@ public abstract class IndexShardTestCase extends ESTestCase { request, (int) ByteSizeUnit.MB.toBytes(1), Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), pNode.getName()).build()); - final ShardRouting initializingReplicaRouting = replica.routingEntry(); primary.updateShardState(primary.routingEntry(), primary.getPrimaryTerm(), null, currentClusterStateVersion.incrementAndGet(), inSyncIds, routingTable, Collections.emptySet()); recovery.recoverToTarget(); recoveryTarget.markAsDone(); + } + + protected void startReplicaAfterRecovery(IndexShard replica, IndexShard primary, Set inSyncIds, + IndexShardRoutingTable routingTable) throws IOException { + ShardRouting initializingReplicaRouting = replica.routingEntry(); IndexShardRoutingTable newRoutingTable = initializingReplicaRouting.isRelocationTarget() ? new IndexShardRoutingTable.Builder(routingTable) @@ -502,6 +533,31 @@ public abstract class IndexShardTestCase extends ESTestCase { currentClusterStateVersion.get(), inSyncIdsWithReplica, newRoutingTable, Collections.emptySet()); } + + /** + * promotes a replica to primary, incrementing it's term and starting it if needed + */ + protected void promoteReplica(IndexShard replica, Set inSyncIds, IndexShardRoutingTable routingTable) throws IOException { + assertThat(inSyncIds, contains(replica.routingEntry().allocationId().getId())); + final ShardRouting routingEntry = newShardRouting( + replica.routingEntry().shardId(), + replica.routingEntry().currentNodeId(), + null, + true, + ShardRoutingState.STARTED, + replica.routingEntry().allocationId()); + + final IndexShardRoutingTable newRoutingTable = new IndexShardRoutingTable.Builder(routingTable) + .removeShard(replica.routingEntry()) + .addShard(routingEntry) + .build(); + replica.updateShardState(routingEntry, replica.getPrimaryTerm() + 1, + (is, listener) -> + listener.onResponse(new PrimaryReplicaSyncer.ResyncTask(1, "type", "action", "desc", null, Collections.emptyMap())), + currentClusterStateVersion.incrementAndGet(), + inSyncIds, newRoutingTable, Collections.emptySet()); + } + private Store.MetadataSnapshot getMetadataSnapshotOrEmpty(IndexShard replica) throws IOException { Store.MetadataSnapshot result; try { From 8235b254abf274e99b7af91d125edc0a9dbe627a Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Wed, 18 Jul 2018 12:18:00 +0200 Subject: [PATCH 081/260] Add EC2 credential test for repository-s3 (#31918) Add EC2 credential test for repository-s3 Relates to #26913 --- .../gradle/test/ClusterConfiguration.groovy | 7 +- .../gradle/test/ClusterFormationTasks.groovy | 29 ++- .../elasticsearch/gradle/test/NodeInfo.groovy | 10 +- plugins/repository-s3/build.gradle | 50 +++- .../repositories/s3/AmazonS3Fixture.java | 226 ++++++++++++---- .../40_repository_ec2_credentials.yml | 243 ++++++++++++++++++ 6 files changed, 485 insertions(+), 80 deletions(-) create mode 100644 plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index 5c363ac043a..d6477e05b15 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -137,7 +137,10 @@ class ClusterConfiguration { this.project = project } - Map systemProperties = new HashMap<>() + // **Note** for systemProperties, settings, keystoreFiles etc: + // value could be a GString that is evaluated to just a String + // there are cases when value depends on task that is not executed yet on configuration stage + Map systemProperties = new HashMap<>() Map settings = new HashMap<>() @@ -157,7 +160,7 @@ class ClusterConfiguration { List dependencies = new ArrayList<>() @Input - void systemProperty(String property, String value) { + void systemProperty(String property, Object value) { systemProperties.put(property, value) } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 0349130076c..4ede349b206 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -609,7 +609,6 @@ class ClusterFormationTasks { /** Adds a task to start an elasticsearch node with the given configuration */ static Task configureStartTask(String name, Project project, Task setup, NodeInfo node) { - // this closure is converted into ant nodes by groovy's AntBuilder Closure antRunner = { AntBuilder ant -> ant.exec(executable: node.executable, spawn: node.config.daemonize, dir: node.cwd, taskname: 'elasticsearch') { @@ -630,13 +629,6 @@ class ClusterFormationTasks { node.writeWrapperScript() } - // we must add debug options inside the closure so the config is read at execution time, as - // gradle task options are not processed until the end of the configuration phase - if (node.config.debug) { - println 'Running elasticsearch in debug mode, suspending until connected on port 8000' - node.env['ES_JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000' - } - node.getCommandString().eachLine { line -> logger.info(line) } if (logger.isInfoEnabled() || node.config.daemonize == false) { @@ -654,6 +646,27 @@ class ClusterFormationTasks { } start.doLast(elasticsearchRunner) start.doFirst { + // Configure ES JAVA OPTS - adds system properties, assertion flags, remote debug etc + List esJavaOpts = [node.env.get('ES_JAVA_OPTS', '')] + String collectedSystemProperties = node.config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") + esJavaOpts.add(collectedSystemProperties) + esJavaOpts.add(node.config.jvmArgs) + if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) { + // put the enable assertions options before other options to allow + // flexibility to disable assertions for specific packages or classes + // in the cluster-specific options + esJavaOpts.add("-ea") + esJavaOpts.add("-esa") + } + // we must add debug options inside the closure so the config is read at execution time, as + // gradle task options are not processed until the end of the configuration phase + if (node.config.debug) { + println 'Running elasticsearch in debug mode, suspending until connected on port 8000' + esJavaOpts.add('-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000') + } + node.env['ES_JAVA_OPTS'] = esJavaOpts.join(" ") + + // project.logger.info("Starting node in ${node.clusterName} distribution: ${node.config.distribution}") } return start diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 5e67dfa55cf..7844ea77fc1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -180,15 +180,7 @@ class NodeInfo { } args.addAll("-E", "node.portsfile=true") - String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") - String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs - if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) { - // put the enable assertions options before other options to allow - // flexibility to disable assertions for specific packages or classes - // in the cluster-specific options - esJavaOpts = String.join(" ", "-ea", "-esa", esJavaOpts) - } - env = ['ES_JAVA_OPTS': esJavaOpts] + env = [:] for (Map.Entry property : System.properties.entrySet()) { if (property.key.startsWith('tests.es.')) { args.add("-E") diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 5af0a412b4c..225d523817e 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -89,18 +89,26 @@ String s3TemporarySessionToken = System.getenv("amazon_s3_session_token_temporar String s3TemporaryBucket = System.getenv("amazon_s3_bucket_temporary") String s3TemporaryBasePath = System.getenv("amazon_s3_base_path_temporary") +String s3EC2Bucket = System.getenv("amazon_s3_bucket_ec2") +String s3EC2BasePath = System.getenv("amazon_s3_base_path_ec2") + // If all these variables are missing then we are testing against the internal fixture instead, which has the following // credentials hard-coded in. -if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath) { +if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath + && !s3EC2Bucket && !s3EC2BasePath) { s3PermanentAccessKey = 's3_integration_test_permanent_access_key' s3PermanentSecretKey = 's3_integration_test_permanent_secret_key' s3PermanentBucket = 'permanent-bucket-test' s3PermanentBasePath = 'integration_test' + s3EC2Bucket = 'ec2-bucket-test' + s3EC2BasePath = 'integration_test' + useFixture = true -} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath) { +} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath + || !s3EC2Bucket || !s3EC2BasePath) { throw new IllegalArgumentException("not all options specified to run against external S3 service") } @@ -274,24 +282,52 @@ if (useFixture && minioDistribution) { integTestMinioRunner.dependsOn(startMinio) integTestMinioRunner.finalizedBy(stopMinio) // Minio only supports a single access key, see https://github.com/minio/minio/pull/5968 - integTestMinioRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/30_repository_temporary_credentials/*' + integTestMinioRunner.systemProperty 'tests.rest.blacklist', [ + 'repository_s3/30_repository_temporary_credentials/*', + 'repository_s3/40_repository_ec2_credentials/*' + ].join(",") project.check.dependsOn(integTestMinio) } +File parentFixtures = new File(project.buildDir, "fixtures") +File s3FixtureFile = new File(parentFixtures, 's3Fixture.properties') + +task s3FixtureProperties { + outputs.file(s3FixtureFile) + def s3FixtureOptions = [ + "tests.seed" : project.testSeed, + "s3Fixture.permanent_bucket_name" : s3PermanentBucket, + "s3Fixture.permanent_key" : s3PermanentAccessKey, + "s3Fixture.temporary_bucket_name" : s3TemporaryBucket, + "s3Fixture.temporary_key" : s3TemporaryAccessKey, + "s3Fixture.temporary_session_token": s3TemporarySessionToken, + "s3Fixture.ec2_bucket_name" : s3EC2Bucket + ] + + doLast { + file(s3FixtureFile).text = s3FixtureOptions.collect { k, v -> "$k = $v" }.join("\n") + } +} + /** A task to start the AmazonS3Fixture which emulates an S3 service **/ task s3Fixture(type: AntFixture) { dependsOn testClasses + dependsOn s3FixtureProperties + inputs.file(s3FixtureFile) + env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" executable = new File(project.runtimeJavaHome, 'bin/java') - args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3PermanentBucket, s3TemporaryBucket + args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3FixtureFile.getAbsolutePath() } Map expansions = [ 'permanent_bucket': s3PermanentBucket, 'permanent_base_path': s3PermanentBasePath, 'temporary_bucket': s3TemporaryBucket, - 'temporary_base_path': s3TemporaryBasePath + 'temporary_base_path': s3TemporaryBasePath, + 'ec2_bucket': s3EC2Bucket, + 'ec2_base_path': s3EC2BasePath ] processTestResources { @@ -319,6 +355,10 @@ integTestCluster { /* Use a closure on the string to delay evaluation until tests are executed */ setting 's3.client.integration_test_permanent.endpoint', "http://${-> s3Fixture.addressAndPort}" setting 's3.client.integration_test_temporary.endpoint', "http://${-> s3Fixture.addressAndPort}" + setting 's3.client.integration_test_ec2.endpoint', "http://${-> s3Fixture.addressAndPort}" + + // to redirect InstanceProfileCredentialsProvider to custom auth point + systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", "http://${-> s3Fixture.addressAndPort}" } else { println "Using an external service to test the repository-s3 plugin" } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java index 9b38669da25..ce6c4723149 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java @@ -18,6 +18,14 @@ */ package org.elasticsearch.repositories.s3; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.common.TriFunction; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.test.fixture.AbstractHttpFixture; import com.amazonaws.util.DateUtils; import org.elasticsearch.common.Strings; @@ -26,20 +34,26 @@ import org.elasticsearch.common.path.PathTrie; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; -import org.elasticsearch.test.fixture.AbstractHttpFixture; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; +import java.util.Properties; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLength; +import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween; import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.Objects.requireNonNull; /** * {@link AmazonS3Fixture} emulates an AWS S3 service @@ -47,63 +61,76 @@ import static java.nio.charset.StandardCharsets.UTF_8; * he implementation is based on official documentation available at https://docs.aws.amazon.com/AmazonS3/latest/API/. */ public class AmazonS3Fixture extends AbstractHttpFixture { + private static final String AUTH = "AUTH"; + private static final String NON_AUTH = "NON_AUTH"; + + private static final String EC2_PROFILE = "ec2Profile"; + + private final Properties properties; + private final Random random; /** List of the buckets stored on this test server **/ private final Map buckets = ConcurrentCollections.newConcurrentMap(); /** Request handlers for the requests made by the S3 client **/ private final PathTrie handlers; - private final String permanentBucketName; - private final String temporaryBucketName; /** * Creates a {@link AmazonS3Fixture} */ - private AmazonS3Fixture(final String workingDir, final String permanentBucketName, final String temporaryBucketName) { + private AmazonS3Fixture(final String workingDir, Properties properties) { super(workingDir); - this.permanentBucketName = permanentBucketName; - this.temporaryBucketName = temporaryBucketName; + this.properties = properties; + this.random = new Random(Long.parseUnsignedLong(requireNonNull(properties.getProperty("tests.seed")), 16)); - this.buckets.put(permanentBucketName, new Bucket(permanentBucketName)); - this.buckets.put(temporaryBucketName, new Bucket(temporaryBucketName)); - this.handlers = defaultHandlers(buckets); + new Bucket("s3Fixture.permanent", false); + new Bucket("s3Fixture.temporary", true); + final Bucket ec2Bucket = new Bucket("s3Fixture.ec2", + randomAsciiAlphanumOfLength(random, 10), randomAsciiAlphanumOfLength(random, 10)); + + this.handlers = defaultHandlers(buckets, ec2Bucket); + } + + private static String nonAuthPath(Request request) { + return nonAuthPath(request.getMethod(), request.getPath()); + } + + private static String nonAuthPath(String method, String path) { + return NON_AUTH + " " + method + " " + path; + } + + private static String authPath(Request request) { + return authPath(request.getMethod(), request.getPath()); + } + + private static String authPath(String method, String path) { + return AUTH + " " + method + " " + path; } @Override protected Response handle(final Request request) throws IOException { - final RequestHandler handler = handlers.retrieve(request.getMethod() + " " + request.getPath(), request.getParameters()); + final String nonAuthorizedPath = nonAuthPath(request); + final RequestHandler nonAuthorizedHandler = handlers.retrieve(nonAuthorizedPath, request.getParameters()); + if (nonAuthorizedHandler != null) { + return nonAuthorizedHandler.handle(request); + } + + final String authorizedPath = authPath(request); + final RequestHandler handler = handlers.retrieve(authorizedPath, request.getParameters()); if (handler != null) { - final String authorization = request.getHeader("Authorization"); - final String permittedBucket; - if (authorization.contains("s3_integration_test_permanent_access_key")) { - final String sessionToken = request.getHeader("x-amz-security-token"); - if (sessionToken != null) { - return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Unexpected session token", ""); - } - permittedBucket = permanentBucketName; - } else if (authorization.contains("s3_integration_test_temporary_access_key")) { - final String sessionToken = request.getHeader("x-amz-security-token"); - if (sessionToken == null) { - return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "No session token", ""); - } - if (sessionToken.equals("s3_integration_test_temporary_session_token") == false) { - return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad session token", ""); - } - permittedBucket = temporaryBucketName; - } else { + final String bucketName = request.getParam("bucket"); + if (bucketName == null) { return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad access key", ""); } - - final String bucket = request.getParam("bucket"); - if (bucket != null && permittedBucket.equals(bucket) == false) { - // allow a null bucket to support the multi-object-delete API which - // passes the bucket name in the host header instead of the URL. - if (buckets.containsKey(bucket)) { - return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad bucket", ""); - } else { - return newBucketNotFoundError(request.getId(), bucket); - } + final Bucket bucket = buckets.get(bucketName); + if (bucket == null) { + return newBucketNotFoundError(request.getId(), bucketName); } + final Response authResponse = authenticateBucket(request, bucket); + if (authResponse != null) { + return authResponse; + } + return handler.handle(request); } else { @@ -111,24 +138,49 @@ public class AmazonS3Fixture extends AbstractHttpFixture { } } - public static void main(final String[] args) throws Exception { - if (args == null || args.length != 3) { - throw new IllegalArgumentException( - "AmazonS3Fixture "); + private Response authenticateBucket(Request request, Bucket bucket) { + final String authorization = request.getHeader("Authorization"); + if (authorization == null) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad access key", ""); } + if (authorization.contains(bucket.key)) { + final String sessionToken = request.getHeader("x-amz-security-token"); + if (bucket.token == null) { + if (sessionToken != null) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Unexpected session token", ""); + } + } else { + if (sessionToken == null) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "No session token", ""); + } + if (sessionToken.equals(bucket.token) == false) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad session token", ""); + } + } + } + return null; + } - final AmazonS3Fixture fixture = new AmazonS3Fixture(args[0], args[1], args[2]); + public static void main(final String[] args) throws Exception { + if (args == null || args.length != 2) { + throw new IllegalArgumentException("AmazonS3Fixture "); + } + final Properties properties = new Properties(); + try (InputStream is = Files.newInputStream(PathUtils.get(args[1]))) { + properties.load(is); + } + final AmazonS3Fixture fixture = new AmazonS3Fixture(args[0], properties); fixture.listen(); } /** Builds the default request handlers **/ - private static PathTrie defaultHandlers(final Map buckets) { + private PathTrie defaultHandlers(final Map buckets, final Bucket ec2Bucket) { final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER); // HEAD Object // // https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectHEAD.html - objectsPaths("HEAD /{bucket}").forEach(path -> + objectsPaths(authPath(HttpHead.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String bucketName = request.getParam("bucket"); @@ -150,7 +202,7 @@ public class AmazonS3Fixture extends AbstractHttpFixture { // PUT Object // // https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html - objectsPaths("PUT /{bucket}").forEach(path -> + objectsPaths(authPath(HttpPut.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String destBucketName = request.getParam("bucket"); @@ -200,7 +252,7 @@ public class AmazonS3Fixture extends AbstractHttpFixture { // DELETE Object // // https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectDELETE.html - objectsPaths("DELETE /{bucket}").forEach(path -> + objectsPaths(authPath(HttpDelete.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String bucketName = request.getParam("bucket"); @@ -218,7 +270,7 @@ public class AmazonS3Fixture extends AbstractHttpFixture { // GET Object // // https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html - objectsPaths("GET /{bucket}").forEach(path -> + objectsPaths(authPath(HttpGet.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String bucketName = request.getParam("bucket"); @@ -239,7 +291,7 @@ public class AmazonS3Fixture extends AbstractHttpFixture { // HEAD Bucket // // https://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketHEAD.html - handlers.insert("HEAD /{bucket}", (request) -> { + handlers.insert(authPath(HttpHead.METHOD_NAME, "/{bucket}"), (request) -> { String bucket = request.getParam("bucket"); if (Strings.hasText(bucket) && buckets.containsKey(bucket)) { return new Response(RestStatus.OK.getStatus(), TEXT_PLAIN_CONTENT_TYPE, EMPTY_BYTE); @@ -251,7 +303,7 @@ public class AmazonS3Fixture extends AbstractHttpFixture { // GET Bucket (List Objects) Version 1 // // https://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html - handlers.insert("GET /{bucket}/", (request) -> { + handlers.insert(authPath(HttpGet.METHOD_NAME, "/{bucket}/"), (request) -> { final String bucketName = request.getParam("bucket"); final Bucket bucket = buckets.get(bucketName); @@ -269,7 +321,7 @@ public class AmazonS3Fixture extends AbstractHttpFixture { // Delete Multiple Objects // // https://docs.aws.amazon.com/AmazonS3/latest/API/multiobjectdeleteapi.html - handlers.insert("POST /", (request) -> { + handlers.insert(nonAuthPath(HttpPost.METHOD_NAME, "/"), (request) -> { final List deletes = new ArrayList<>(); final List errors = new ArrayList<>(); @@ -292,7 +344,12 @@ public class AmazonS3Fixture extends AbstractHttpFixture { boolean found = false; for (Bucket bucket : buckets.values()) { - if (bucket.objects.remove(objectName) != null) { + if (bucket.objects.containsKey(objectName)) { + final Response authResponse = authenticateBucket(request, bucket); + if (authResponse != null) { + return authResponse; + } + bucket.objects.remove(objectName); found = true; } } @@ -311,23 +368,80 @@ public class AmazonS3Fixture extends AbstractHttpFixture { return newInternalError(request.getId(), "Something is wrong with this POST multiple deletes request"); }); + // non-authorized requests + + TriFunction credentialResponseFunction = (profileName, key, token) -> { + final Date expiration = new Date(new Date().getTime() + TimeUnit.DAYS.toMillis(1)); + final String response = "{" + + "\"AccessKeyId\": \"" + key + "\"," + + "\"Expiration\": \"" + DateUtils.formatISO8601Date(expiration) + "\"," + + "\"RoleArn\": \"" + randomAsciiAlphanumOfLengthBetween(random, 1, 20) + "\"," + + "\"SecretAccessKey\": \"" + randomAsciiAlphanumOfLengthBetween(random, 1, 20) + "\"," + + "\"Token\": \"" + token + "\"" + + "}"; + + final Map headers = new HashMap<>(contentType("application/json")); + return new Response(RestStatus.OK.getStatus(), headers, response.getBytes(UTF_8)); + }; + + // GET + // + // http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/latest/meta-data/iam/security-credentials/"), (request) -> { + final String response = EC2_PROFILE; + + final Map headers = new HashMap<>(contentType("text/plain")); + return new Response(RestStatus.OK.getStatus(), headers, response.getBytes(UTF_8)); + }); + + // GET + // + // http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/latest/meta-data/iam/security-credentials/{profileName}"), (request) -> { + final String profileName = request.getParam("profileName"); + if (EC2_PROFILE.equals(profileName) == false) { + return new Response(RestStatus.NOT_FOUND.getStatus(), new HashMap<>(), "unknown credentials".getBytes(UTF_8)); + } + return credentialResponseFunction.apply(profileName, ec2Bucket.key, ec2Bucket.token); + }); + return handlers; } + private static String prop(Properties properties, String propertyName) { + return requireNonNull(properties.getProperty(propertyName), + "property '" + propertyName + "' is missing"); + } + /** * Represents a S3 bucket. */ - static class Bucket { + class Bucket { /** Bucket name **/ final String name; + final String key; + + final String token; + /** Blobs contained in the bucket **/ final Map objects; - Bucket(final String name) { - this.name = Objects.requireNonNull(name); + private Bucket(final String prefix, final boolean tokenRequired) { + this(prefix, prop(properties, prefix + "_key"), + tokenRequired ? prop(properties, prefix + "_session_token") : null); + } + + private Bucket(final String prefix, final String key, final String token) { + this.name = prop(properties, prefix + "_bucket_name"); + this.key = key; + this.token = token; + this.objects = ConcurrentCollections.newConcurrentMap(); + if (buckets.put(name, this) != null) { + throw new IllegalArgumentException("bucket " + name + " is already registered"); + } } } diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml new file mode 100644 index 00000000000..2df3b8290a1 --- /dev/null +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml @@ -0,0 +1,243 @@ +# Integration tests for repository-s3 + +--- +setup: + + # Register repository with ec2 credentials + - do: + snapshot.create_repository: + repository: repository_ec2 + body: + type: s3 + settings: + bucket: ${ec2_bucket} + client: integration_test_ec2 + base_path: ${ec2_base_path} + canned_acl: private + storage_class: standard + +--- +"Snapshot and Restore with repository-s3 using ec2 credentials": + + # Get repository + - do: + snapshot.get_repository: + repository: repository_ec2 + + - match: { repository_ec2.settings.bucket : ${ec2_bucket} } + - match: { repository_ec2.settings.client : "integration_test_ec2" } + - match: { repository_ec2.settings.base_path : ${ec2_base_path} } + - match: { repository_ec2.settings.canned_acl : "private" } + - match: { repository_ec2.settings.storage_class : "standard" } + - is_false: repository_ec2.settings.access_key + - is_false: repository_ec2.settings.secret_key + - is_false: repository_ec2.settings.session_token + + # Index documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 1 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 2 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 3 + - snapshot: one + + - do: + count: + index: docs + + - match: {count: 3} + + # Create a first snapshot + - do: + snapshot.create: + repository: repository_ec2 + snapshot: snapshot-one + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-one } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.include_global_state: true } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.status: + repository: repository_ec2 + snapshot: snapshot-one + + - is_true: snapshots + - match: { snapshots.0.snapshot: snapshot-one } + - match: { snapshots.0.state : SUCCESS } + + # Index more documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 4 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 5 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 6 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 7 + - snapshot: two + + - do: + count: + index: docs + + - match: {count: 7} + + # Create a second snapshot + - do: + snapshot.create: + repository: repository_ec2 + snapshot: snapshot-two + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-two } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.get: + repository: repository_ec2 + snapshot: snapshot-one,snapshot-two + + - is_true: snapshots + - match: { snapshots.0.state : SUCCESS } + - match: { snapshots.1.state : SUCCESS } + + # Delete the index + - do: + indices.delete: + index: docs + + # Restore the second snapshot + - do: + snapshot.restore: + repository: repository_ec2 + snapshot: snapshot-two + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 7} + + # Delete the index again + - do: + indices.delete: + index: docs + + # Restore the first snapshot + - do: + snapshot.restore: + repository: repository_ec2 + snapshot: snapshot-one + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 3} + + # Remove the snapshots + - do: + snapshot.delete: + repository: repository_ec2 + snapshot: snapshot-two + + - do: + snapshot.delete: + repository: repository_ec2 + snapshot: snapshot-one + +--- +"Register a repository with a non existing bucket": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_ec2 + body: + type: s3 + settings: + bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE + client: integration_test_temporary + +--- +"Register a repository with a non existing client": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_ec2 + body: + type: s3 + settings: + bucket: repository_ec2 + client: unknown + +--- +"Get a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.get: + repository: repository_ec2 + snapshot: missing + +--- +"Delete a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.delete: + repository: repository_ec2 + snapshot: missing + +--- +"Restore a non existing snapshot": + + - do: + catch: /snapshot_restore_exception/ + snapshot.restore: + repository: repository_ec2 + snapshot: missing + wait_for_completion: true + +--- +teardown: + + # Remove our repository + - do: + snapshot.delete_repository: + repository: repository_ec2 From 1924f5d07c9f739821bc8c749a5314a074dab36b Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 18 Jul 2018 12:42:07 +0200 Subject: [PATCH 082/260] Add more contexts to painless execute api (#30511) This change adds two contexts the execute scripts against: * SEARCH_SCRIPT: Allows to run scripts in a search script context. This context is used in `function_score` query's script function, script fields, script sorting and `terms_set` query. * FILTER_SCRIPT: Allows to run scripts in a filter script context. This context is used in the `script` query. In both contexts a index name needs to be specified and a sample document. The document is needed to create an in-memory index that the script can access via the `doc[...]` and other notations. The index name is needed because a mapping is needed to index the document. Examples: ``` POST /_scripts/painless/_execute { "script": { "source": "doc['field'].value.length()" }, "context" : { "search_script": { "document": { "field": "four" }, "index": "my-index" } } } ``` Returns: ``` { "result": 4 } ``` POST /_scripts/painless/_execute { "script": { "source": "doc['field'].value.length() <= params.max_length", "params": { "max_length": 4 } }, "context" : { "filter_script": { "document": { "field": "four" }, "index": "my-index" } } } Returns: ``` { "result": true } ``` Also changed PainlessExecuteAction.TransportAction to use TransportSingleShardAction instead of HandledAction, because now in case score or filter contexts are used the request needs to be redirected to a node that has an active IndexService for the index being referenced (a node with a shard copy for that index). --- .../painless/painless-execute-script.asciidoc | 133 +++++- .../painless/PainlessExecuteAction.java | 439 +++++++++++++++--- .../painless/PainlessExecuteApiTests.java | 113 +++++ .../painless/PainlessExecuteRequestTests.java | 44 +- .../painless/70_execute_painless_scripts.yml | 52 ++- 5 files changed, 694 insertions(+), 87 deletions(-) create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java diff --git a/docs/painless/painless-execute-script.asciidoc b/docs/painless/painless-execute-script.asciidoc index a3ac5b578d7..2aca9597786 100644 --- a/docs/painless/painless-execute-script.asciidoc +++ b/docs/painless/painless-execute-script.asciidoc @@ -9,23 +9,24 @@ The Painless execute API allows an arbitrary script to be executed and a result .Parameters [options="header"] |====== -| Name | Required | Default | Description -| `script` | yes | - | The script to execute -| `context` | no | `painless_test` | The context the script should be executed in. +| Name | Required | Default | Description +| `script` | yes | - | The script to execute +| `context` | no | `painless_test` | The context the script should be executed in. +| `context_setup` | no | - | Additional parameters to the context. |====== ==== Contexts Contexts control how scripts are executed, what variables are available at runtime and what the return type is. -===== Painless test script context +===== Painless test context The `painless_test` context executes scripts as is and do not add any special parameters. The only variable that is available is `params`, which can be used to access user defined values. The result of the script is always converted to a string. If no context is specified then this context is used by default. -==== Example +====== Example Request: @@ -52,4 +53,124 @@ Response: "result": "0.1" } -------------------------------------------------- -// TESTRESPONSE \ No newline at end of file +// TESTRESPONSE + +===== Filter context + +The `filter` context executes scripts as if they were executed inside a `script` query. +For testing purposes a document must be provided that will be indexed temporarily in-memory and +is accessible to the script being tested. Because of this the _source, stored fields and doc values +are available in the script being tested. + +The following parameters may be specified in `context_setup` for a filter context: + +document:: Contains the document that will be temporarily indexed in-memory and is accessible from the script. +index:: The name of an index containing a mapping that is compatable with the document being indexed. + +====== Example + +[source,js] +---------------------------------------------------------------- +PUT /my-index +{ + "mappings": { + "_doc": { + "properties": { + "field": { + "type": "keyword" + } + } + } + } +} + +POST /_scripts/painless/_execute +{ + "script": { + "source": "doc['field'].value.length() <= params.max_length", + "params": { + "max_length": 4 + } + }, + "context": "filter", + "context_setup": { + "index": "my-index", + "document": { + "field": "four" + } + } +} +---------------------------------------------------------------- +// CONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "result": true +} +-------------------------------------------------- +// TESTRESPONSE + + +===== Score context + +The `score` context executes scripts as if they were executed inside a `script_score` function in +`function_score` query. + +The following parameters may be specified in `context_setup` for a score context: + +document:: Contains the document that will be temporarily indexed in-memory and is accessible from the script. +index:: The name of an index containing a mapping that is compatable with the document being indexed. +query:: If `_score` is used in the script then a query can specified that will be used to compute a score. + +====== Example + +[source,js] +---------------------------------------------------------------- +PUT /my-index +{ + "mappings": { + "_doc": { + "properties": { + "field": { + "type": "keyword" + }, + "rank": { + "type": "long" + } + } + } + } +} + + +POST /_scripts/painless/_execute +{ + "script": { + "source": "doc['rank'].value / params.max_rank", + "params": { + "max_rank": 5.0 + } + }, + "context": "score", + "context_setup": { + "index": "my-index", + "document": { + "rank": 4 + } + } +} +---------------------------------------------------------------- +// CONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "result": 0.8 +} +-------------------------------------------------- +// TESTRESPONSE diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 01139f6cf2e..229c919a2e6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -18,41 +18,75 @@ */ package org.elasticsearch.painless; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; +import org.apache.lucene.store.RAMDirectory; +import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.script.FilterScript; +import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.Locale; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -75,40 +109,181 @@ public class PainlessExecuteAction extends Action implements ToXContent { private static final ParseField SCRIPT_FIELD = new ParseField("script"); private static final ParseField CONTEXT_FIELD = new ParseField("context"); + private static final ParseField CONTEXT_SETUP_FIELD = new ParseField("context_setup"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "painless_execute_request", args -> new Request((Script) args[0], (SupportedContext) args[1])); + "painless_execute_request", args -> new Request((Script) args[0], (String) args[1], (ContextSetup) args[2])); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> Script.parse(p), SCRIPT_FIELD); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { - // For now only accept an empty json object: - XContentParser.Token token = p.nextToken(); - assert token == XContentParser.Token.FIELD_NAME; - String contextType = p.currentName(); - token = p.nextToken(); - assert token == XContentParser.Token.START_OBJECT; - token = p.nextToken(); - assert token == XContentParser.Token.END_OBJECT; - token = p.nextToken(); - assert token == XContentParser.Token.END_OBJECT; - return SupportedContext.valueOf(contextType.toUpperCase(Locale.ROOT)); - }, CONTEXT_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), CONTEXT_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ContextSetup::parse, CONTEXT_SETUP_FIELD); + } + + static final Map> SUPPORTED_CONTEXTS; + + static { + Map> supportedContexts = new HashMap<>(); + supportedContexts.put("painless_test", PainlessTestScript.CONTEXT); + supportedContexts.put("filter", FilterScript.CONTEXT); + supportedContexts.put("score", ScoreScript.CONTEXT); + SUPPORTED_CONTEXTS = Collections.unmodifiableMap(supportedContexts); + } + + static ScriptContext fromScriptContextName(String name) { + ScriptContext scriptContext = SUPPORTED_CONTEXTS.get(name); + if (scriptContext == null) { + throw new UnsupportedOperationException("unsupported script context name [" + name + "]"); + } + return scriptContext; + } + + static class ContextSetup implements Writeable, ToXContentObject { + + private static final ParseField INDEX_FIELD = new ParseField("index"); + private static final ParseField DOCUMENT_FIELD = new ParseField("document"); + private static final ParseField QUERY_FIELD = new ParseField("query"); + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("execute_script_context", + args -> new ContextSetup((String) args[0], (BytesReference) args[1], (QueryBuilder) args[2])); + + static { + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), INDEX_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { + try (XContentBuilder b = XContentBuilder.builder(p.contentType().xContent())) { + b.copyCurrentStructure(p); + return BytesReference.bytes(b); + } + }, DOCUMENT_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> + AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY_FIELD); + } + + private final String index; + private final BytesReference document; + private final QueryBuilder query; + + private XContentType xContentType; + + static ContextSetup parse(XContentParser parser, Void context) throws IOException { + ContextSetup contextSetup = PARSER.parse(parser, null); + contextSetup.setXContentType(parser.contentType()); + return contextSetup; + } + + ContextSetup(String index, BytesReference document, QueryBuilder query) { + this.index = index; + this.document = document; + this.query = query; + } + + ContextSetup(StreamInput in) throws IOException { + index = in.readOptionalString(); + document = in.readOptionalBytesReference(); + String xContentType = in.readOptionalString(); + if (xContentType != null) { + this.xContentType = XContentType.fromMediaType(xContentType); + } + query = in.readOptionalNamedWriteable(QueryBuilder.class); + } + + public String getIndex() { + return index; + } + + public BytesReference getDocument() { + return document; + } + + public QueryBuilder getQuery() { + return query; + } + + public XContentType getXContentType() { + return xContentType; + } + + public void setXContentType(XContentType xContentType) { + this.xContentType = xContentType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ContextSetup that = (ContextSetup) o; + return Objects.equals(index, that.index) && + Objects.equals(document, that.document) && + Objects.equals(query, that.query); + } + + @Override + public int hashCode() { + return Objects.hash(index, document, query); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(index); + out.writeOptionalBytesReference(document); + out.writeOptionalString(xContentType != null ? xContentType.mediaType(): null); + out.writeOptionalNamedWriteable(query); + } + + @Override + public String toString() { + return "ContextSetup{" + + ", index='" + index + '\'' + + ", document=" + document + + ", query=" + query + + ", xContentType=" + xContentType + + '}'; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (index != null) { + builder.field(INDEX_FIELD.getPreferredName(), index); + } + if (document != null) { + builder.field(DOCUMENT_FIELD.getPreferredName()); + try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, document, xContentType)) { + builder.generator().copyCurrentStructure(parser); + } + } + if (query != null) { + builder.field(QUERY_FIELD.getPreferredName(), query); + } + } + builder.endObject(); + return builder; + } + } private Script script; - private SupportedContext context; + private ScriptContext context = PainlessTestScript.CONTEXT; + private ContextSetup contextSetup; static Request parse(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } - Request(Script script, SupportedContext context) { + Request(Script script, String scriptContextName, ContextSetup setup) { this.script = Objects.requireNonNull(script); - this.context = context != null ? context : SupportedContext.PAINLESS_TEST; + if (scriptContextName != null) { + this.context = fromScriptContextName(scriptContextName); + } + if (setup != null) { + this.contextSetup = setup; + index(contextSetup.index); + } } Request() { @@ -118,16 +293,28 @@ public class PainlessExecuteAction extends Action getContext() { return context; } + public ContextSetup getContextSetup() { + return contextSetup; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (script.getType() != ScriptType.INLINE) { validationException = addValidationError("only inline scripts are supported", validationException); } + if (needDocumentAndIndex(context)) { + if (contextSetup.index == null) { + validationException = addValidationError("index is a required parameter for current context", validationException); + } + if (contextSetup.document == null) { + validationException = addValidationError("document is a required parameter for current context", validationException); + } + } return validationException; } @@ -135,26 +322,35 @@ public class PainlessExecuteAction extends Action scriptContext = request.context; + if (scriptContext == PainlessTestScript.CONTEXT) { + PainlessTestScript.Factory factory = scriptService.compile(request.script, PainlessTestScript.CONTEXT); + PainlessTestScript painlessTestScript = factory.newInstance(request.script.getParams()); + String result = Objects.toString(painlessTestScript.execute()); + return new Response(result); + } else if (scriptContext == FilterScript.CONTEXT) { + return prepareRamIndex(request, (context, leafReaderContext) -> { + FilterScript.Factory factory = scriptService.compile(request.script, FilterScript.CONTEXT); + FilterScript.LeafFactory leafFactory = + factory.newFactory(request.getScript().getParams(), context.lookup()); + FilterScript filterScript = leafFactory.newInstance(leafReaderContext); + filterScript.setDocument(0); + boolean result = filterScript.execute(); + return new Response(result); + }, indexService); + } else if (scriptContext == ScoreScript.CONTEXT) { + return prepareRamIndex(request, (context, leafReaderContext) -> { + ScoreScript.Factory factory = scriptService.compile(request.script, ScoreScript.CONTEXT); + ScoreScript.LeafFactory leafFactory = + factory.newFactory(request.getScript().getParams(), context.lookup()); + ScoreScript scoreScript = leafFactory.newInstance(leafReaderContext); + scoreScript.setDocument(0); + + if (request.contextSetup.query != null) { + Query luceneQuery = request.contextSetup.query.rewrite(context).toQuery(context); + IndexSearcher indexSearcher = new IndexSearcher(leafReaderContext.reader()); + luceneQuery = indexSearcher.rewrite(luceneQuery); + Weight weight = indexSearcher.createWeight(luceneQuery, true, 1f); + Scorer scorer = weight.scorer(indexSearcher.getIndexReader().leaves().get(0)); + // Consume the first (and only) match. + int docID = scorer.iterator().nextDoc(); + assert docID == scorer.docID(); + scoreScript.setScorer(scorer); + } + + double result = scoreScript.execute(); + return new Response(result); + }, indexService); + } else { + throw new UnsupportedOperationException("unsupported context [" + scriptContext.name + "]"); } } + private static Response prepareRamIndex(Request request, + CheckedBiFunction handler, + IndexService indexService) throws IOException { + + Analyzer defaultAnalyzer = indexService.getIndexAnalyzers().getDefaultIndexAnalyzer(); + + try (RAMDirectory ramDirectory = new RAMDirectory()) { + try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(defaultAnalyzer))) { + String index = indexService.index().getName(); + String type = indexService.mapperService().documentMapper().type(); + BytesReference document = request.contextSetup.document; + XContentType xContentType = request.contextSetup.xContentType; + SourceToParse sourceToParse = SourceToParse.source(index, type, "_id", document, xContentType); + ParsedDocument parsedDocument = indexService.mapperService().documentMapper().parse(sourceToParse); + indexWriter.addDocuments(parsedDocument.docs()); + try (IndexReader indexReader = DirectoryReader.open(indexWriter)) { + final long absoluteStartMillis = System.currentTimeMillis(); + QueryShardContext context = + indexService.newQueryShardContext(0, indexReader, () -> absoluteStartMillis, null); + return handler.apply(context, indexReader.leaves().get(0)); + } + } + } + } } static class RestAction extends BaseRestHandler { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java new file mode 100644 index 00000000000..ce92a224f4e --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.painless; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.painless.PainlessExecuteAction.Request; +import org.elasticsearch.painless.PainlessExecuteAction.Response; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptException; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.singletonMap; +import static org.elasticsearch.painless.PainlessExecuteAction.TransportAction.innerShardOperation; +import static org.hamcrest.Matchers.equalTo; + +public class PainlessExecuteApiTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Collections.singleton(PainlessPlugin.class); + } + + public void testDefaults() throws IOException { + ScriptService scriptService = getInstanceFromNode(ScriptService.class); + Request request = new Request(new Script("100.0 / 1000.0"), null, null); + Response response = innerShardOperation(request, scriptService, null); + assertThat(response.getResult(), equalTo("0.1")); + + Map params = new HashMap<>(); + params.put("count", 100.0D); + params.put("total", 1000.0D); + request = new Request(new Script(ScriptType.INLINE, "painless", "params.count / params.total", params), null, null); + response = innerShardOperation(request, scriptService, null); + assertThat(response.getResult(), equalTo("0.1")); + + Exception e = expectThrows(ScriptException.class, + () -> { + Request r = new Request(new Script(ScriptType.INLINE, + "painless", "params.count / params.total + doc['constant']", params), null, null); + innerShardOperation(r, scriptService, null); + }); + assertThat(e.getCause().getMessage(), equalTo("Variable [doc] is not defined.")); + } + + public void testFilterExecutionContext() throws IOException { + ScriptService scriptService = getInstanceFromNode(ScriptService.class); + IndexService indexService = createIndex("index", Settings.EMPTY, "doc", "field", "type=long"); + + Request.ContextSetup contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 3}"), null); + contextSetup.setXContentType(XContentType.JSON); + Request request = new Request(new Script("doc['field'].value >= 3"), "filter", contextSetup); + Response response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(true)); + + contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 3}"), null); + contextSetup.setXContentType(XContentType.JSON); + request = new Request(new Script(ScriptType.INLINE, "painless", "doc['field'].value >= params.max", + singletonMap("max", 3)), "filter", contextSetup); + response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(true)); + + contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 2}"), null); + contextSetup.setXContentType(XContentType.JSON); + request = new Request(new Script(ScriptType.INLINE, "painless", "doc['field'].value >= params.max", + singletonMap("max", 3)), "filter", contextSetup); + response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(false)); + } + + public void testScoreExecutionContext() throws IOException { + ScriptService scriptService = getInstanceFromNode(ScriptService.class); + IndexService indexService = createIndex("index", Settings.EMPTY, "doc", "rank", "type=long", "text", "type=text"); + + Request.ContextSetup contextSetup = new Request.ContextSetup("index", + new BytesArray("{\"rank\": 4.0, \"text\": \"quick brown fox\"}"), new MatchQueryBuilder("text", "fox")); + contextSetup.setXContentType(XContentType.JSON); + Request request = new Request(new Script(ScriptType.INLINE, "painless", + "Math.round((_score + (doc['rank'].value / params.max_rank)) * 100.0) / 100.0", singletonMap("max_rank", 5.0)), "score", + contextSetup); + Response response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(1.09D)); + } + +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java index 488ae0e1643..44cd6b5304d 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java @@ -18,9 +18,18 @@ */ package org.elasticsearch.painless; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.painless.PainlessExecuteAction.Request.ContextSetup; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import java.io.IOException; @@ -28,12 +37,22 @@ import java.util.Collections; public class PainlessExecuteRequestTests extends AbstractStreamableXContentTestCase { + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedWriteables()); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents()); + } + @Override protected PainlessExecuteAction.Request createTestInstance() { Script script = new Script(randomAlphaOfLength(10)); - PainlessExecuteAction.Request.SupportedContext context = randomBoolean() ? - PainlessExecuteAction.Request.SupportedContext.PAINLESS_TEST : null; - return new PainlessExecuteAction.Request(script, context); + ScriptContext context = randomBoolean() ? randomFrom(PainlessExecuteAction.Request.SUPPORTED_CONTEXTS.values()) : null; + ContextSetup contextSetup = randomBoolean() ? randomContextSetup() : null; + return new PainlessExecuteAction.Request(script, context != null ? context.name : null, contextSetup); } @Override @@ -53,9 +72,26 @@ public class PainlessExecuteRequestTests extends AbstractStreamableXContentTestC public void testValidate() { Script script = new Script(ScriptType.STORED, null, randomAlphaOfLength(10), Collections.emptyMap()); - PainlessExecuteAction.Request request = new PainlessExecuteAction.Request(script, null); + PainlessExecuteAction.Request request = new PainlessExecuteAction.Request(script, null, null); Exception e = request.validate(); assertNotNull(e); assertEquals("Validation Failed: 1: only inline scripts are supported;", e.getMessage()); } + + private static ContextSetup randomContextSetup() { + String index = randomBoolean() ? randomAlphaOfLength(4) : null; + QueryBuilder query = randomBoolean() ? new MatchAllQueryBuilder() : null; + // TODO: pass down XContextType to createTestInstance() method. + // otherwise the document itself is different causing test failures. + // This should be done in a seperate change as the test instance is created before xcontent type is randomly picked and + // all the createTestInstance() methods need to be changed, which will make this a big chnage +// BytesReference doc = randomBoolean() ? new BytesArray("{}") : null; + BytesReference doc = null; + + ContextSetup contextSetup = new ContextSetup(index, doc, query); +// if (doc != null) { +// contextSetup.setXContentType(XContentType.JSON); +// } + return contextSetup; + } } diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml index 7b915cc38db..1e34a776189 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml @@ -1,3 +1,18 @@ +setup: + - do: + indices.create: + index: my-index + body: + mappings: + doc: + properties: + rank: + type: long + field: + type: keyword + text: + type: text + --- "Execute with defaults": - do: @@ -11,7 +26,7 @@ - match: { result: "0.1" } --- -"Execute with execute_api_script context": +"Execute with painless_test context": - do: scripts_painless_execute: body: @@ -20,6 +35,37 @@ params: var1: 10 var2: 100 - context: - painless_test: {} + context: "painless_test" - match: { result: "-90" } + +--- +"Execute with filter context": + - do: + scripts_painless_execute: + body: + script: + source: "doc['field'].value.length() <= params.max_length" + params: + max_length: 4 + context: "filter" + context_setup: + document: + field: "four" + index: "my-index" + - match: { result: true } + +--- +"Execute with score context": + - do: + scripts_painless_execute: + body: + script: + source: "doc['rank'].value / params.max_rank" + params: + max_rank: 5.0 + context: "score" + context_setup: + document: + rank: 4 + index: "my-index" + - match: { result: 0.8 } From 53ab470264261b25e799696085c134df09ddaf77 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 18 Jul 2018 13:33:57 +0200 Subject: [PATCH 083/260] use before instead of onOrBefore --- .../org/elasticsearch/painless/PainlessExecuteAction.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 229c919a2e6..094a62d188b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -322,7 +322,7 @@ public class PainlessExecuteAction extends Action Date: Wed, 18 Jul 2018 12:58:17 +0100 Subject: [PATCH 084/260] Improve docs for search preferences (#32159) Today it is unclear what guarantees are offered by the search preference feature, and we claim a guarantee that is stronger than what we really offer: > A custom value will be used to guarantee that the same shards will be used > for the same custom value. This commit clarifies this documentation. Forward-port of #32098 to `master`. --- .../search/request/preference.asciidoc | 67 +++++++++++++------ 1 file changed, 45 insertions(+), 22 deletions(-) diff --git a/docs/reference/search/request/preference.asciidoc b/docs/reference/search/request/preference.asciidoc index 4fd801c5f76..5f3fcb2efa6 100644 --- a/docs/reference/search/request/preference.asciidoc +++ b/docs/reference/search/request/preference.asciidoc @@ -1,38 +1,55 @@ [[search-request-preference]] === Preference -Controls a `preference` of which shard copies on which to execute the -search. By default, the operation is randomized among the available shard -copies, unless allocation awareness is used. +Controls a `preference` of the shard copies on which to execute the search. By +default, Elasticsearch selects from the available shard copies in an +unspecified order, taking the <> and +<> configuration into +account. However, it may sometimes be desirable to try and route certain +searches to certain sets of shard copies, for instance to make better use of +per-copy caches. The `preference` is a query string parameter which can be set to: [horizontal] -`_local`:: - The operation will prefer to be executed on a local - allocated shard if possible. +`_only_local`:: + The operation will be executed only on shards allocated to the local + node. + +`_local`:: + The operation will be executed on shards allocated to the local node if + possible, and will fall back to other shards if not. `_prefer_nodes:abc,xyz`:: - Prefers execution on the nodes with the provided - node ids (`abc` or `xyz` in this case) if applicable. + The operation will be executed on nodes with one of the provided node + ids (`abc` or `xyz` in this case) if possible. If suitable shard copies + exist on more than one of the selected nodes then the order of + preference between these copies is unspecified. -`_shards:2,3`:: - Restricts the operation to the specified shards. (`2` - and `3` in this case). This preference can be combined with other - preferences but it has to appear first: `_shards:2,3|_local` +`_shards:2,3`:: + Restricts the operation to the specified shards. (`2` and `3` in this + case). This preference can be combined with other preferences but it + has to appear first: `_shards:2,3|_local` -`_only_nodes`:: - Restricts the operation to nodes specified in <> +`_only_nodes:abc*,x*yz,...`:: + Restricts the operation to nodes specified according to the + <>. If suitable shard copies exist on more + than one of the selected nodes then the order of preference between + these copies is unspecified. -Custom (string) value:: - A custom value will be used to guarantee that - the same shards will be used for the same custom value. This can help - with "jumping values" when hitting different shards in different refresh - states. A sample value can be something like the web session id, or the - user name. +Custom (string) value:: + Any value that does not start with `_`. If two searches both give the same + custom string value for their preference and the underlying cluster state + does not change then the same ordering of shards will be used for the + searches. This does not guarantee that the exact same shards will be used + each time: the cluster state, and therefore the selected shards, may change + for a number of reasons including shard relocations and shard failures, and + nodes may sometimes reject searches causing fallbacks to alternative nodes. + However, in practice the ordering of shards tends to remain stable for long + periods of time. A good candidate for a custom preference value is something + like the web session id or the user name. -For instance, use the user's session ID to ensure consistent ordering of results -for the user: +For instance, use the user's session ID `xyzabc123` as follows: [source,js] ------------------------------------------------ @@ -47,3 +64,9 @@ GET /_search?preference=xyzabc123 ------------------------------------------------ // CONSOLE +NOTE: The `_only_local` preference guarantees only to use shard copies on the +local node, which is sometimes useful for troubleshooting. All other options do +not _fully_ guarantee that any particular shard copies are used in a search, +and on a changing index this may mean that repeated searches may yield +different results if they are executed on different shard copies which are in +different refresh states. From 6de1f96cadd8d187e3732b710eb10264f733d422 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 18 Jul 2018 15:32:36 +0200 Subject: [PATCH 085/260] Fix BwC Tests looking for UUID Pre 6.4 (#32158) * UUID field was added for #31791 and only went into 6.4 and 7.0 * Fixes #32119 --- qa/mixed-cluster/build.gradle | 1 - .../resources/rest-api-spec/test/indices.stats/10_index.yml | 4 ++++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index db743cb12b1..ac57d51def7 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -57,7 +57,6 @@ for (Version version : bwcVersions.wireCompatible) { tasks.getByName("${baseName}#mixedClusterTestRunner").configure { /* To support taking index snapshots, we have to set path.repo setting */ systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") - systemProperty 'tests.rest.blacklist', ['indices.stats/10_index/Index - all'].join(',') } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml index 42847b05cd1..564a482727f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml @@ -41,6 +41,10 @@ setup: --- "Index - all": + - skip: + version: " - 6.3.99" + reason: "uuid is only available from 6.4.0 on" + - do: indices.stats: { index: _all } From cfb30144c9175d1f1538af0aa5c1841f72159128 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 18 Jul 2018 14:43:20 +0100 Subject: [PATCH 086/260] Call setReferences() on custom referring tokenfilters in _analyze (#32157) When building custom tokenfilters without an index in the _analyze endpoint, we need to ensure that referring filters are correctly built by calling their #setReferences() method Fixes #32154 --- .../test/analysis-common/40_token_filters.yml | 15 +++++++++++ .../analyze/TransportAnalyzeAction.java | 26 ++++++++++++++++++- 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml index 3dca3bfd777..150fa39dcb9 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml @@ -1557,3 +1557,18 @@ filter: [my_bengali_stem] - length: { tokens: 1 } - match: { tokens.0.token: কর } + +--- +"multiplexer": + - do: + indices.analyze: + body: + text: "The quick fox" + tokenizer: "standard" + filter: + - type: multiplexer + filters: [ lowercase, uppercase ] + preserve_original: false + - length: { tokens: 6 } + - match: { tokens.0.token: the } + - match: { tokens.1.token: THE } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 35f1f725b65..5c5da62571f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.analysis.CustomAnalyzerProvider; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.MultiTermAwareComponent; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.ReferringFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -574,6 +575,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokenizerFactory, List charFilterFactoryList, boolean normalizer) throws IOException { List tokenFilterFactoryList = new ArrayList<>(); + List referringFilters = new ArrayList<>(); if (request.tokenFilters() != null && request.tokenFilters().size() > 0) { List tokenFilters = request.tokenFilters(); for (AnalyzeRequest.NameOrDefinition tokenFilter : tokenFilters) { @@ -594,7 +596,9 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokenFilterFactoryFactory; @@ -629,6 +633,26 @@ public class TransportAnalyzeAction extends TransportSingleShardAction prebuiltFilters = analysisRegistry.buildTokenFilterFactories(indexSettings); + for (ReferringFilterFactory rff : referringFilters) { + rff.setReferences(prebuiltFilters); + } + + } return tokenFilterFactoryList; } From 605dc49c4828fe74d039e361cbe283bbf19a9a49 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 18 Jul 2018 09:29:52 -0700 Subject: [PATCH 087/260] Painless: Fix caching bug and clean up addPainlessClass. (#32142) This change cleans up the addPainlessClass methods by doing the following things: * Rename many variable names to match the new conventions described in the JavaDocs for PainlessLookup * Decouples Whitelist.Class from adding a PainlessClass directly * Adds a second version of addPainlessClass that is intended for use to add future defaults in a follow PR This change also fixes the method and field caches by storing Classes instead of Strings since it would technically be possible now that the whitelists are extendable to have different Classes with the same name. It was convenient to add this change together since some of the new constants are shared. Note the changes are largely mechanical again where all the code behavior should remain the same. --- .../lookup/PainlessLookupBuilder.java | 527 +++++++++++------- 1 file changed, 327 insertions(+), 200 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 2150c0b210a..ecf15c7ad2c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -30,236 +30,249 @@ import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.reflect.Modifier; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Stack; import java.util.regex.Pattern; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_PAINLESS_CLASS_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.anyTypeNameToPainlessTypeName; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; public class PainlessLookupBuilder { - private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); - private static final Map methodCache = new HashMap<>(); - private static final Map fieldCache = new HashMap<>(); + private static class PainlessMethodCacheKey { - private static String buildMethodCacheKey(String structName, String methodName, List> arguments) { - StringBuilder key = new StringBuilder(); - key.append(structName); - key.append(methodName); + private final Class javaClass; + private final String methodName; + private final List> painlessTypeParameters; - for (Class argument : arguments) { - key.append(argument.getName()); + private PainlessMethodCacheKey(Class javaClass, String methodName, List> painlessTypeParameters) { + this.javaClass = javaClass; + this.methodName = methodName; + this.painlessTypeParameters = Collections.unmodifiableList(painlessTypeParameters); } - return key.toString(); + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessMethodCacheKey that = (PainlessMethodCacheKey)object; + + return Objects.equals(javaClass, that.javaClass) && + Objects.equals(methodName, that.methodName) && + Objects.equals(painlessTypeParameters, that.painlessTypeParameters); + } + + @Override + public int hashCode() { + return Objects.hash(javaClass, methodName, painlessTypeParameters); + } } - private static String buildFieldCacheKey(String structName, String fieldName, String typeName) { - return structName + fieldName + typeName; + private static class PainlessFieldCacheKey { + + private final Class javaClass; + private final String fieldName; + private final Class painlessType; + + private PainlessFieldCacheKey(Class javaClass, String fieldName, Class painlessType) { + this.javaClass = javaClass; + this.fieldName = fieldName; + this.painlessType = painlessType; + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessFieldCacheKey that = (PainlessFieldCacheKey) object; + + return Objects.equals(javaClass, that.javaClass) && + Objects.equals(fieldName, that.fieldName) && + Objects.equals(painlessType, that.painlessType); + } + + @Override + public int hashCode() { + return Objects.hash(javaClass, fieldName, painlessType); + } } - private final Map> painlessTypesToJavaClasses; + private static final Map painlessMethodCache = new HashMap<>(); + private static final Map painlessFieldCache = new HashMap<>(); + + private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); + private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); + private static final Pattern FIELD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); + + private static String anyTypesArrayToCanonicalString(Class[] anyTypesArray, boolean toPainlessTypes) { + return anyTypesListToCanonicalString(Arrays.asList(anyTypesArray), toPainlessTypes); + } + + private static String anyTypesListToCanonicalString(List> anyTypesList, boolean toPainlessTypes) { + StringBuilder anyTypesCanonicalStringBuilder = new StringBuilder("["); + + int anyTypesSize = anyTypesList.size(); + int anyTypesIndex = 0; + + for (Class anyType : anyTypesList) { + String anyTypeCanonicalName = anyType.getCanonicalName(); + + if (toPainlessTypes) { + anyTypeCanonicalName = anyTypeNameToPainlessTypeName(anyTypeCanonicalName); + } + + anyTypesCanonicalStringBuilder.append(anyTypeCanonicalName); + + if (++anyTypesIndex < anyTypesSize) { + anyTypesCanonicalStringBuilder.append(","); + } + } + + anyTypesCanonicalStringBuilder.append("]"); + + return anyTypesCanonicalStringBuilder.toString(); + } + + private final List whitelists; + + private final Map> painlessClassNamesToJavaClasses; private final Map, PainlessClassBuilder> javaClassesToPainlessClassBuilders; public PainlessLookupBuilder(List whitelists) { - painlessTypesToJavaClasses = new HashMap<>(); + this.whitelists = whitelists; + + painlessClassNamesToJavaClasses = new HashMap<>(); javaClassesToPainlessClassBuilders = new HashMap<>(); - String origin = null; - - painlessTypesToJavaClasses.put("def", def.class); - javaClassesToPainlessClassBuilders.put(def.class, new PainlessClassBuilder("def", Object.class, Type.getType(Object.class))); - - try { - // first iteration collects all the Painless type names that - // are used for validation during the second iteration - for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { - String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - PainlessClassBuilder painlessStruct = - javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(painlessTypeName)); - - if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { - throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + - "[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]"); - } - - origin = whitelistStruct.origin; - addStruct(whitelist.javaClassLoader, whitelistStruct); - - painlessStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(painlessTypeName)); - javaClassesToPainlessClassBuilders.put(painlessStruct.clazz, painlessStruct); - } - } - - // second iteration adds all the constructors, methods, and fields that will - // be available in Painless along with validating they exist and all their types have - // been white-listed during the first iteration - for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { - String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - - for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) { - origin = whitelistConstructor.origin; - addConstructor(painlessTypeName, whitelistConstructor); - } - - for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) { - origin = whitelistMethod.origin; - addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod); - } - - for (WhitelistField whitelistField : whitelistStruct.whitelistFields) { - origin = whitelistField.origin; - addField(painlessTypeName, whitelistField); - } - } - } - } catch (Exception exception) { - throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception); - } - - // goes through each Painless struct and determines the inheritance list, - // and then adds all inherited types to the Painless struct's whitelist - for (Class javaClass : javaClassesToPainlessClassBuilders.keySet()) { - PainlessClassBuilder painlessStruct = javaClassesToPainlessClassBuilders.get(javaClass); - - List painlessSuperStructs = new ArrayList<>(); - Class javaSuperClass = painlessStruct.clazz.getSuperclass(); - - Stack> javaInteraceLookups = new Stack<>(); - javaInteraceLookups.push(painlessStruct.clazz); - - // adds super classes to the inheritance list - if (javaSuperClass != null && javaSuperClass.isInterface() == false) { - while (javaSuperClass != null) { - PainlessClassBuilder painlessSuperStruct = javaClassesToPainlessClassBuilders.get(javaSuperClass); - - if (painlessSuperStruct != null) { - painlessSuperStructs.add(painlessSuperStruct.name); - } - - javaInteraceLookups.push(javaSuperClass); - javaSuperClass = javaSuperClass.getSuperclass(); - } - } - - // adds all super interfaces to the inheritance list - while (javaInteraceLookups.isEmpty() == false) { - Class javaInterfaceLookup = javaInteraceLookups.pop(); - - for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) { - PainlessClassBuilder painlessInterfaceStruct = javaClassesToPainlessClassBuilders.get(javaSuperInterface); - - if (painlessInterfaceStruct != null) { - String painlessInterfaceStructName = painlessInterfaceStruct.name; - - if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) { - painlessSuperStructs.add(painlessInterfaceStructName); - } - - for (Class javaPushInterface : javaInterfaceLookup.getInterfaces()) { - javaInteraceLookups.push(javaPushInterface); - } - } - } - } - - // copies methods and fields from super structs to the parent struct - copyStruct(painlessStruct.name, painlessSuperStructs); - - // copies methods and fields from Object into interface types - if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) { - PainlessClassBuilder painlessObjectStruct = javaClassesToPainlessClassBuilders.get(Object.class); - - if (painlessObjectStruct != null) { - copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name)); - } - } - } - - // precompute runtime classes - for (PainlessClassBuilder painlessStruct : javaClassesToPainlessClassBuilders.values()) { - addRuntimeClass(painlessStruct); - } + painlessClassNamesToJavaClasses.put(DEF_PAINLESS_CLASS_NAME, def.class); + javaClassesToPainlessClassBuilders.put(def.class, + new PainlessClassBuilder(DEF_PAINLESS_CLASS_NAME, Object.class, Type.getType(Object.class))); } - private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) { - String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - String importedPainlessTypeName = painlessTypeName; + private Class painlessTypeNameToPainlessType(String painlessTypeName) { + return PainlessLookupUtility.painlessTypeNameToPainlessType(painlessTypeName, painlessClassNamesToJavaClasses); + } - if (TYPE_NAME_PATTERN.matcher(painlessTypeName).matches() == false) { - throw new IllegalArgumentException("invalid struct type name [" + painlessTypeName + "]"); + private void validatePainlessType(Class painlessType) { + PainlessLookupUtility.validatePainlessType(painlessType, javaClassesToPainlessClassBuilders.keySet()); + } + + public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importPainlessClassName) { + Objects.requireNonNull(classLoader); + Objects.requireNonNull(javaClassName); + + String painlessClassName = anyTypeNameToPainlessTypeName(javaClassName); + + if (CLASS_NAME_PATTERN.matcher(painlessClassName).matches() == false) { + throw new IllegalArgumentException("invalid painless class name [" + painlessClassName + "]"); } - int index = whitelistStruct.javaClassName.lastIndexOf('.'); - - if (index != -1) { - importedPainlessTypeName = whitelistStruct.javaClassName.substring(index + 1).replace('$', '.'); - } + String importedPainlessClassName = anyTypeNameToPainlessTypeName(javaClassName.substring(javaClassName.lastIndexOf('.') + 1)); Class javaClass; - if ("void".equals(whitelistStruct.javaClassName)) javaClass = void.class; - else if ("boolean".equals(whitelistStruct.javaClassName)) javaClass = boolean.class; - else if ("byte".equals(whitelistStruct.javaClassName)) javaClass = byte.class; - else if ("short".equals(whitelistStruct.javaClassName)) javaClass = short.class; - else if ("char".equals(whitelistStruct.javaClassName)) javaClass = char.class; - else if ("int".equals(whitelistStruct.javaClassName)) javaClass = int.class; - else if ("long".equals(whitelistStruct.javaClassName)) javaClass = long.class; - else if ("float".equals(whitelistStruct.javaClassName)) javaClass = float.class; - else if ("double".equals(whitelistStruct.javaClassName)) javaClass = double.class; + if ("void".equals(javaClassName)) javaClass = void.class; + else if ("boolean".equals(javaClassName)) javaClass = boolean.class; + else if ("byte".equals(javaClassName)) javaClass = byte.class; + else if ("short".equals(javaClassName)) javaClass = short.class; + else if ("char".equals(javaClassName)) javaClass = char.class; + else if ("int".equals(javaClassName)) javaClass = int.class; + else if ("long".equals(javaClassName)) javaClass = long.class; + else if ("float".equals(javaClassName)) javaClass = float.class; + else if ("double".equals(javaClassName)) javaClass = double.class; else { try { - javaClass = Class.forName(whitelistStruct.javaClassName, true, whitelistClassLoader); + javaClass = Class.forName(javaClassName, true, classLoader); + + if (javaClass == def.class) { + throw new IllegalArgumentException("cannot add reserved painless class [" + DEF_PAINLESS_CLASS_NAME + "]"); + } + + if (javaClass.isArray()) { + throw new IllegalArgumentException("cannot add an array type java class [" + javaClassName + "] as a painless class"); + } } catch (ClassNotFoundException cnfe) { - throw new IllegalArgumentException("invalid java class name [" + whitelistStruct.javaClassName + "]" + - " for struct [" + painlessTypeName + "]"); + throw new IllegalArgumentException("java class [" + javaClassName + "] not found", cnfe); } } - PainlessClassBuilder existingStruct = javaClassesToPainlessClassBuilders.get(javaClass); + addPainlessClass(painlessClassName, importedPainlessClassName, javaClass, importPainlessClassName); + } - if (existingStruct == null) { - PainlessClassBuilder struct = new PainlessClassBuilder(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass)); - painlessTypesToJavaClasses.put(painlessTypeName, javaClass); - javaClassesToPainlessClassBuilders.put(javaClass, struct); - } else if (existingStruct.clazz.equals(javaClass) == false) { - throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " + - "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " + - "[" + existingStruct.clazz.getName() + "]"); + public void addPainlessClass(Class javaClass, boolean importPainlessClassName) { + Objects.requireNonNull(javaClass); + + if (javaClass == def.class) { + throw new IllegalArgumentException("cannot specify reserved painless class [" + DEF_PAINLESS_CLASS_NAME + "]"); } - if (painlessTypeName.equals(importedPainlessTypeName)) { - if (whitelistStruct.onlyFQNJavaClassName == false) { - throw new IllegalArgumentException("must use only_fqn parameter on type [" + painlessTypeName + "] with no package"); + String javaClassName = javaClass.getCanonicalName(); + String painlessClassName = anyTypeNameToPainlessTypeName(javaClassName); + String importedPainlessClassName = anyTypeNameToPainlessTypeName(javaClassName.substring(javaClassName.lastIndexOf('.') + 1)); + + addPainlessClass(painlessClassName, importedPainlessClassName, javaClass, importPainlessClassName); + } + + private void addPainlessClass( + String painlessClassName, String importedPainlessClassName, Class javaClass, boolean importPainlessClassName) { + PainlessClassBuilder existingPainlessClassBuilder = javaClassesToPainlessClassBuilders.get(javaClass); + + if (existingPainlessClassBuilder == null) { + PainlessClassBuilder painlessClassBuilder = new PainlessClassBuilder(painlessClassName, javaClass, Type.getType(javaClass)); + painlessClassNamesToJavaClasses.put(painlessClassName, javaClass); + javaClassesToPainlessClassBuilders.put(javaClass, painlessClassBuilder); + } else if (existingPainlessClassBuilder.clazz.equals(javaClass) == false) { + throw new IllegalArgumentException("painless class [" + painlessClassName + "] illegally represents multiple java classes " + + "[" + javaClass.getCanonicalName() + "] and [" + existingPainlessClassBuilder.clazz.getCanonicalName() + "]"); + } + + if (painlessClassName.equals(importedPainlessClassName)) { + if (importPainlessClassName == true) { + throw new IllegalArgumentException( + "must use only_fqn parameter on painless class [" + painlessClassName + "] with no package"); } } else { - Class importedJavaClass = painlessTypesToJavaClasses.get(importedPainlessTypeName); + Class importedJavaClass = painlessClassNamesToJavaClasses.get(importedPainlessClassName); if (importedJavaClass == null) { - if (whitelistStruct.onlyFQNJavaClassName == false) { - if (existingStruct != null) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]"); + if (importPainlessClassName) { + if (existingPainlessClassBuilder != null) { + throw new IllegalArgumentException( + "inconsistent only_fqn parameters found for painless class [" + painlessClassName + "]"); } - painlessTypesToJavaClasses.put(importedPainlessTypeName, javaClass); + painlessClassNamesToJavaClasses.put(importedPainlessClassName, javaClass); } } else if (importedJavaClass.equals(javaClass) == false) { - throw new IllegalArgumentException("imported name [" + painlessTypeName + "] is used to " + - "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] " + - "and [" + importedJavaClass.getName() + "]"); - } else if (whitelistStruct.onlyFQNJavaClassName) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]"); + throw new IllegalArgumentException("painless class [" + importedPainlessClassName + "] illegally represents multiple " + + "java classes [" + javaClass.getCanonicalName() + "] and [" + importedJavaClass.getCanonicalName() + "]"); + } else if (importPainlessClassName == false) { + throw new IllegalArgumentException( + "inconsistent only_fqn parameters found for painless class [" + painlessClassName + "]"); } } } private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) { - PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " + @@ -273,7 +286,7 @@ public class PainlessLookupBuilder { String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount); try { - Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName); + Class painlessParameterClass = painlessTypeNameToPainlessType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); javaClassParameters[parameterCount] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessParameterClass); @@ -307,7 +320,8 @@ public class PainlessLookupBuilder { " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames); } - painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "", painlessParametersTypes), + painlessConstructor = painlessMethodCache.computeIfAbsent( + new PainlessMethodCacheKey(ownerStruct.clazz, "", painlessParametersTypes), key -> new PainlessMethod("", ownerStruct.clazz, null, void.class, painlessParametersTypes, asmConstructor, javaConstructor.getModifiers(), javaHandle)); ownerStruct.constructors.put(painlessMethodKey, painlessConstructor); @@ -319,14 +333,14 @@ public class PainlessLookupBuilder { } private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) { - PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + "name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); } - if (TYPE_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) { + if (METHOD_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) { throw new IllegalArgumentException("invalid method name" + " [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "]."); } @@ -358,7 +372,7 @@ public class PainlessLookupBuilder { String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount); try { - Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName); + Class painlessParameterClass = painlessTypeNameToPainlessType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); javaClassParameters[parameterCount + augmentedOffset] = @@ -384,7 +398,7 @@ public class PainlessLookupBuilder { Class painlessReturnClass; try { - painlessReturnClass = getJavaClassFromPainlessType(whitelistMethod.painlessReturnTypeName); + painlessReturnClass = painlessTypeNameToPainlessType(whitelistMethod.painlessReturnTypeName); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " + "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + @@ -415,8 +429,8 @@ public class PainlessLookupBuilder { "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); } - painlessMethod = methodCache.computeIfAbsent( - buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes), + painlessMethod = painlessMethodCache.computeIfAbsent( + new PainlessMethodCacheKey(ownerStruct.clazz, whitelistMethod.javaMethodName, painlessParametersTypes), key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, null, painlessReturnClass, painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod); @@ -441,8 +455,8 @@ public class PainlessLookupBuilder { "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); } - painlessMethod = methodCache.computeIfAbsent( - buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes), + painlessMethod = painlessMethodCache.computeIfAbsent( + new PainlessMethodCacheKey(ownerStruct.clazz, whitelistMethod.javaMethodName, painlessParametersTypes), key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, javaAugmentedClass, painlessReturnClass, painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); ownerStruct.methods.put(painlessMethodKey, painlessMethod); @@ -457,14 +471,14 @@ public class PainlessLookupBuilder { } private void addField(String ownerStructName, WhitelistField whitelistField) { - PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + "name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName); } - if (TYPE_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) { + if (FIELD_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) { throw new IllegalArgumentException("invalid field name " + "[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "]."); } @@ -481,7 +495,7 @@ public class PainlessLookupBuilder { Class painlessFieldClass; try { - painlessFieldClass = getJavaClassFromPainlessType(whitelistField.painlessFieldTypeName); + painlessFieldClass = painlessTypeNameToPainlessType(whitelistField.painlessFieldTypeName); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " + "with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae); @@ -496,8 +510,8 @@ public class PainlessLookupBuilder { PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName); if (painlessField == null) { - painlessField = fieldCache.computeIfAbsent( - buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()), + painlessField = painlessFieldCache.computeIfAbsent( + new PainlessFieldCacheKey(ownerStruct.clazz, whitelistField.javaFieldName, painlessFieldClass), key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), null, null)); ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField); @@ -525,8 +539,8 @@ public class PainlessLookupBuilder { PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName); if (painlessField == null) { - painlessField = fieldCache.computeIfAbsent( - buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()), + painlessField = painlessFieldCache.computeIfAbsent( + new PainlessFieldCacheKey(ownerStruct.clazz, whitelistField.javaFieldName, painlessFieldClass), key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter)); ownerStruct.members.put(whitelistField.javaFieldName, painlessField); @@ -538,14 +552,15 @@ public class PainlessLookupBuilder { } private void copyStruct(String struct, List children) { - final PainlessClassBuilder owner = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(struct)); + final PainlessClassBuilder owner = javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(struct)); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); } for (int count = 0; count < children.size(); ++count) { - final PainlessClassBuilder child = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(children.get(count))); + final PainlessClassBuilder child = + javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(children.get(count))); if (child == null) { throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + @@ -710,6 +725,122 @@ public class PainlessLookupBuilder { } public PainlessLookup build() { + String origin = "internal error"; + + try { + // first iteration collects all the Painless type names that + // are used for validation during the second iteration + for (Whitelist whitelist : whitelists) { + for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { + String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); + PainlessClassBuilder painlessStruct = + javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(painlessTypeName)); + + if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { + throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + + "[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]"); + } + + origin = whitelistStruct.origin; + addPainlessClass( + whitelist.javaClassLoader, whitelistStruct.javaClassName, whitelistStruct.onlyFQNJavaClassName == false); + + painlessStruct = javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(painlessTypeName)); + javaClassesToPainlessClassBuilders.put(painlessStruct.clazz, painlessStruct); + } + } + + // second iteration adds all the constructors, methods, and fields that will + // be available in Painless along with validating they exist and all their types have + // been white-listed during the first iteration + for (Whitelist whitelist : whitelists) { + for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { + String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); + + for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) { + origin = whitelistConstructor.origin; + addConstructor(painlessTypeName, whitelistConstructor); + } + + for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) { + origin = whitelistMethod.origin; + addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod); + } + + for (WhitelistField whitelistField : whitelistStruct.whitelistFields) { + origin = whitelistField.origin; + addField(painlessTypeName, whitelistField); + } + } + } + } catch (Exception exception) { + throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception); + } + + // goes through each Painless struct and determines the inheritance list, + // and then adds all inherited types to the Painless struct's whitelist + for (Class javaClass : javaClassesToPainlessClassBuilders.keySet()) { + PainlessClassBuilder painlessStruct = javaClassesToPainlessClassBuilders.get(javaClass); + + List painlessSuperStructs = new ArrayList<>(); + Class javaSuperClass = painlessStruct.clazz.getSuperclass(); + + Stack> javaInteraceLookups = new Stack<>(); + javaInteraceLookups.push(painlessStruct.clazz); + + // adds super classes to the inheritance list + if (javaSuperClass != null && javaSuperClass.isInterface() == false) { + while (javaSuperClass != null) { + PainlessClassBuilder painlessSuperStruct = javaClassesToPainlessClassBuilders.get(javaSuperClass); + + if (painlessSuperStruct != null) { + painlessSuperStructs.add(painlessSuperStruct.name); + } + + javaInteraceLookups.push(javaSuperClass); + javaSuperClass = javaSuperClass.getSuperclass(); + } + } + + // adds all super interfaces to the inheritance list + while (javaInteraceLookups.isEmpty() == false) { + Class javaInterfaceLookup = javaInteraceLookups.pop(); + + for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) { + PainlessClassBuilder painlessInterfaceStruct = javaClassesToPainlessClassBuilders.get(javaSuperInterface); + + if (painlessInterfaceStruct != null) { + String painlessInterfaceStructName = painlessInterfaceStruct.name; + + if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) { + painlessSuperStructs.add(painlessInterfaceStructName); + } + + for (Class javaPushInterface : javaInterfaceLookup.getInterfaces()) { + javaInteraceLookups.push(javaPushInterface); + } + } + } + } + + // copies methods and fields from super structs to the parent struct + copyStruct(painlessStruct.name, painlessSuperStructs); + + // copies methods and fields from Object into interface types + if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) { + PainlessClassBuilder painlessObjectStruct = javaClassesToPainlessClassBuilders.get(Object.class); + + if (painlessObjectStruct != null) { + copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name)); + } + } + } + + // precompute runtime classes + for (PainlessClassBuilder painlessStruct : javaClassesToPainlessClassBuilders.values()) { + addRuntimeClass(painlessStruct); + } + Map, PainlessClass> javaClassesToPainlessClasses = new HashMap<>(); // copy all structs to make them unmodifiable for outside users: @@ -718,10 +849,6 @@ public class PainlessLookupBuilder { javaClassesToPainlessClasses.put(entry.getKey(), entry.getValue().build()); } - return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessClasses); - } - - public Class getJavaClassFromPainlessType(String painlessType) { - return PainlessLookupUtility.painlessTypeNameToPainlessType(painlessType, painlessTypesToJavaClasses); + return new PainlessLookup(painlessClassNamesToJavaClasses, javaClassesToPainlessClasses); } } From 15ff3da653c5dd9eae7f7a654a9ee6a931d478b7 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Wed, 18 Jul 2018 09:33:09 -0700 Subject: [PATCH 088/260] Add support for field aliases. (#32172) * Add basic support for field aliases in index mappings. (#31287) * Allow for aliases when fetching stored fields. (#31411) * Add tests around accessing field aliases in scripts. (#31417) * Add documentation around field aliases. (#31538) * Add validation for field alias mappings. (#31518) * Return both concrete fields and aliases in DocumentFieldMappers#getMapper. (#31671) * Make sure that field-level security is enforced when using field aliases. (#31807) * Add more comprehensive tests for field aliases in queries + aggregations. (#31565) * Remove the deprecated method DocumentFieldMappers#getFieldMapper. (#32148) --- docs/reference/indices/clearcache.asciidoc | 3 +- docs/reference/mapping.asciidoc | 6 +- docs/reference/mapping/types.asciidoc | 4 + docs/reference/mapping/types/alias.asciidoc | 101 ++++++++ .../script/expression/ExpressionTests.java | 67 +++++- .../percolator/PercolateQuery.java | 4 + .../percolator/PercolateQueryBuilder.java | 12 +- .../percolator/PercolatorFieldMapper.java | 18 +- .../percolator/CandidateQueryTests.java | 6 +- .../PercolateQueryBuilderTests.java | 24 +- .../PercolatorFieldMapperTests.java | 6 +- .../TransportGetFieldMappingsIndexAction.java | 18 +- ...TransportFieldCapabilitiesIndexAction.java | 4 +- .../org/elasticsearch/index/IndexWarmer.java | 16 +- .../fieldvisitor/CustomFieldsVisitor.java | 18 +- .../index/get/ShardGetService.java | 4 +- .../index/mapper/DocumentFieldMappers.java | 28 ++- .../index/mapper/DocumentMapper.java | 5 +- .../index/mapper/DocumentParser.java | 98 ++++---- .../index/mapper/FieldAliasMapper.java | 132 +++++++++++ .../index/mapper/FieldMapper.java | 5 + .../index/mapper/FieldTypeLookup.java | 106 +++++++-- .../elasticsearch/index/mapper/Mapper.java | 5 + .../index/mapper/MapperMergeValidator.java | 214 +++++++++++++++++ .../index/mapper/MapperService.java | 133 ++--------- .../index/mapper/MapperUtils.java | 17 +- .../index/mapper/ObjectMapper.java | 5 + .../index/query/ExistsQueryBuilder.java | 14 +- .../query/SpanMultiTermQueryBuilder.java | 14 +- .../index/query/SpanNearQueryBuilder.java | 29 ++- .../index/query/TermsSetQueryBuilder.java | 21 +- .../index/search/QueryParserHelper.java | 28 ++- .../elasticsearch/indices/IndicesModule.java | 3 + .../SignificantTermsAggregatorFactory.java | 15 +- .../SignificantTextAggregationBuilder.java | 7 +- .../SignificantTextAggregatorFactory.java | 30 ++- .../search/fetch/FetchPhase.java | 124 +++++----- .../subphase/highlight/HighlightPhase.java | 8 +- .../search/lookup/LeafFieldsLookup.java | 2 +- .../search/suggest/SuggestionBuilder.java | 3 +- .../completion/context/GeoContextMapping.java | 6 +- .../index/analysis/PreBuiltAnalyzerTests.java | 19 +- .../index/mapper/BinaryFieldMapperTests.java | 27 ++- .../index/mapper/BooleanFieldMapperTests.java | 3 +- .../mapper/CompletionFieldMapperTests.java | 98 +++++--- .../index/mapper/CopyToMapperTests.java | 12 +- .../index/mapper/DateFieldMapperTests.java | 6 +- .../mapper/DocumentFieldMapperTests.java | 8 +- .../mapper/DocumentMapperMergeTests.java | 65 +++-- .../index/mapper/DocumentParserTests.java | 108 ++++++++- .../index/mapper/DoubleIndexingDocTests.java | 19 +- .../index/mapper/DynamicMappingTests.java | 8 +- .../index/mapper/DynamicTemplatesTests.java | 21 +- .../index/mapper/FieldAliasMapperTests.java | 167 +++++++++++++ .../index/mapper/FieldTypeLookupTests.java | 178 ++++++++++++-- .../GenericStoreDynamicTemplateTests.java | 20 +- .../mapper/GeoPointFieldMapperTests.java | 6 +- .../mapper/GeoShapeFieldMapperTests.java | 34 +-- .../mapper/JavaMultiFieldMergeTests.java | 98 ++++---- .../mapper/MapperMergeValidatorTests.java | 118 +++++++++ .../index/mapper/MapperServiceTests.java | 35 +++ .../index/mapper/MultiFieldTests.java | 65 ++--- .../index/mapper/PathMapperTests.java | 1 - .../mapper/PathMatchDynamicTemplateTests.java | 28 +-- .../mapper/StoredNumericValuesTests.java | 9 +- .../index/mapper/TextFieldMapperTests.java | 53 +++-- .../query/CommonTermsQueryBuilderTests.java | 23 +- .../index/query/ExistsQueryBuilderTests.java | 4 +- .../FieldMaskingSpanQueryBuilderTests.java | 7 +- .../index/query/FuzzyQueryBuilderTests.java | 8 +- .../GeoBoundingBoxQueryBuilderTests.java | 11 +- .../query/GeoDistanceQueryBuilderTests.java | 9 +- .../query/GeoPolygonQueryBuilderTests.java | 3 +- .../MatchPhrasePrefixQueryBuilderTests.java | 8 +- .../query/MatchPhraseQueryBuilderTests.java | 8 +- .../index/query/MatchQueryBuilderTests.java | 21 +- .../query/MoreLikeThisQueryBuilderTests.java | 2 +- .../query/MultiMatchQueryBuilderTests.java | 7 +- .../index/query/PrefixQueryBuilderTests.java | 8 +- .../query/QueryStringQueryBuilderTests.java | 16 +- .../index/query/RandomQueryBuilder.java | 11 +- .../index/query/RangeQueryBuilderTests.java | 31 +-- .../index/query/RegexpQueryBuilderTests.java | 6 +- .../query/SimpleQueryStringBuilderTests.java | 3 +- .../query/SpanMultiTermQueryBuilderTests.java | 16 +- .../query/SpanTermQueryBuilderTests.java | 18 +- .../index/query/TermQueryBuilderTests.java | 7 +- .../index/query/TermsQueryBuilderTests.java | 13 +- .../query/TermsSetQueryBuilderTests.java | 25 +- .../query/WildcardQueryBuilderTests.java | 25 +- .../index/similarity/SimilarityTests.java | 92 ++++---- .../mapping/SimpleGetFieldMappingsIT.java | 40 +++- .../search/aggregations/bucket/RangeIT.java | 83 +++++++ .../aggregations/bucket/ReverseNestedIT.java | 28 +++ .../bucket/nested/NestedAggregatorTests.java | 58 +++++ .../nested/ReverseNestedAggregatorTests.java | 76 ++++++ .../SignificantTermsAggregatorTests.java | 82 ++++++- .../SignificantTextAggregatorTests.java | 133 ++++++++--- .../bucket/terms/TermsAggregatorTests.java | 8 +- .../search/aggregations/metrics/SumIT.java | 92 +++++++- .../ScriptedMetricAggregatorTests.java | 5 +- .../support/ValuesSourceConfigTests.java | 23 ++ .../highlight/HighlighterSearchIT.java | 100 ++++++++ .../search/fieldcaps/FieldCapabilitiesIT.java | 151 ++++++++++++ .../search/fields/SearchFieldsIT.java | 163 +++++++++++++ .../search/geo/GeoPolygonIT.java | 23 +- .../search/geo/GeoShapeQueryTests.java | 42 +++- .../search/lookup/LeafDocLookupTests.java | 75 ++++++ .../search/lookup/LeafFieldsLookupTests.java | 92 ++++++++ .../search/morelikethis/MoreLikeThisIT.java | 31 +++ .../elasticsearch/search/query/ExistsIT.java | 86 +++++++ .../search/query/QueryStringIT.java | 65 +++++ .../search/query/SearchQueryIT.java | 78 ++++++ .../search/query/SimpleQueryStringIT.java | 62 +++++ .../search/sort/FieldSortIT.java | 57 +++++ .../AbstractSuggestionBuilderTestCase.java | 8 +- .../suggest/CompletionSuggestSearchIT.java | 28 ++- .../search/suggest/SuggestSearchIT.java | 29 +++ .../CategoryContextMappingTests.java | 49 ++-- .../CompletionSuggesterBuilderTests.java | 3 +- .../completion/GeoContextMappingTests.java | 58 ++--- .../search/query/all-query-index.json | 8 + .../aggregations/AggregatorTestCase.java | 58 ++++- .../test/AbstractBuilderTestCase.java | 60 +++-- .../test/AbstractQueryTestCase.java | 13 +- .../integration/FieldLevelSecurityTests.java | 223 +++++++++++++++--- 126 files changed, 4090 insertions(+), 1051 deletions(-) create mode 100644 docs/reference/mapping/types/alias.asciidoc create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java create mode 100644 server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java diff --git a/docs/reference/indices/clearcache.asciidoc b/docs/reference/indices/clearcache.asciidoc index 6a7240dc958..6b90f9d49cb 100644 --- a/docs/reference/indices/clearcache.asciidoc +++ b/docs/reference/indices/clearcache.asciidoc @@ -16,7 +16,8 @@ explicitly by setting `query`, `fielddata` or `request`. All caches relating to a specific field(s) can also be cleared by specifying `fields` parameter with a comma delimited list of the -relevant fields. +relevant fields. Note that the provided names must refer to concrete +fields -- objects and field aliases are not supported. [float] === Multi Index diff --git a/docs/reference/mapping.asciidoc b/docs/reference/mapping.asciidoc index 6f8f1b38d6f..31957344baf 100644 --- a/docs/reference/mapping.asciidoc +++ b/docs/reference/mapping.asciidoc @@ -124,8 +124,10 @@ fields to an existing index with the <>. Other than where documented, *existing field mappings cannot be updated*. Changing the mapping would mean invalidating already indexed -documents. Instead, you should create a new index with the correct mappings -and <> your data into that index. +documents. Instead, you should create a new index with the correct mappings +and <> your data into that index. If you only wish +to rename a field and not change its mappings, it may make sense to introduce +an <> field. [float] == Example mapping diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index 9b71d7e7404..fbd8181d095 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -40,6 +40,8 @@ string:: <> and <> <>:: Defines parent/child relation for documents within the same index +<>:: Defines an alias to an existing field. + <>:: Record numeric features to boost hits at query time. <>:: Record numeric feature vectors to boost hits at query time. @@ -58,6 +60,8 @@ the <>, the This is the purpose of _multi-fields_. Most datatypes support multi-fields via the <> parameter. +include::types/alias.asciidoc[] + include::types/array.asciidoc[] include::types/binary.asciidoc[] diff --git a/docs/reference/mapping/types/alias.asciidoc b/docs/reference/mapping/types/alias.asciidoc new file mode 100644 index 00000000000..d2b5ccdce8a --- /dev/null +++ b/docs/reference/mapping/types/alias.asciidoc @@ -0,0 +1,101 @@ +[[alias]] +=== Alias datatype + +An `alias` mapping defines an alternate name for a field in the index. +The alias can be used in place of the target field in <> requests, +and selected other APIs like <>. + +[source,js] +-------------------------------- +PUT trips +{ + "mappings": { + "_doc": { + "properties": { + "distance": { + "type": "long" + }, + "route_length_miles": { + "type": "alias", + "path": "distance" // <1> + }, + "transit_mode": { + "type": "keyword" + } + } + } + } +} + +GET _search +{ + "query": { + "range" : { + "route_length_miles" : { + "gte" : 39 + } + } + } +} +-------------------------------- +// CONSOLE + +<1> The path to the target field. Note that this must be the full path, including any parent +objects (e.g. `object1.object2.field`). + +Almost all components of the search request accept field aliases. In particular, aliases can be +used in queries, aggregations, and sort fields, as well as when requesting `docvalue_fields`, +`stored_fields`, suggestions, and highlights. Scripts also support aliases when accessing +field values. Please see the section on <> for exceptions. + +In some parts of the search request and when requesting field capabilities, field wildcard patterns can be +provided. In these cases, the wildcard pattern will match field aliases in addition to concrete fields: + +[source,js] +-------------------------------- +GET trips/_field_caps?fields=route_*,transit_mode +-------------------------------- +// CONSOLE +// TEST[continued] + +[[alias-targets]] +==== Alias targets + +There are a few restrictions on the target of an alias: + + * The target must be a concrete field, and not an object or another field alias. + * The target field must exist at the time the alias is created. + * If nested objects are defined, a field alias must have the same nested scope as its target. + +Additionally, a field alias can only have one target. This means that it is not possible to use a +field alias to query over multiple target fields in a single clause. + +[[unsupported-apis]] +==== Unsupported APIs + +Writes to field aliases are not supported: attempting to use an alias in an index or update request +will result in a failure. Likewise, aliases cannot be used as the target of `copy_to`. + +Because alias names are not present in the document source, aliases cannot be used when performing +source filtering. For example, the following request will return an empty result for `_source`: + +[source,js] +-------------------------------- +GET /_search +{ + "query" : { + "match_all": {} + }, + "_source": "route_length_miles" +} +-------------------------------- +// CONSOLE +// TEST[continued] + +Currently only the search and field capabilities APIs will accept and resolve field aliases. +Other APIs that accept field names, such as <>, cannot be used +with field aliases. + +Finally, some queries, such as `terms`, `geo_shape`, and `more_like_this`, allow for fetching query +information from an indexed document. Because field aliases aren't supported when fetching documents, +the part of the query that specifies the lookup path cannot refer to a field by its alias. \ No newline at end of file diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java index 591cc9ce477..33e6239002e 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTests.java @@ -20,27 +20,52 @@ package org.elasticsearch.script.expression; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.fielddata.AtomicNumericFieldData; +import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.text.ParseException; import java.util.Collections; -public class ExpressionTests extends ESSingleNodeTestCase { - ExpressionScriptEngine service; - SearchLookup lookup; +import static org.mockito.Matchers.anyInt; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ExpressionTests extends ESTestCase { + private ExpressionScriptEngine service; + private SearchLookup lookup; @Override public void setUp() throws Exception { super.setUp(); - IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double"); + + NumberFieldType fieldType = new NumberFieldType(NumberType.DOUBLE); + MapperService mapperService = mock(MapperService.class); + when(mapperService.fullName("field")).thenReturn(fieldType); + when(mapperService.fullName("alias")).thenReturn(fieldType); + + SortedNumericDoubleValues doubleValues = mock(SortedNumericDoubleValues.class); + when(doubleValues.advanceExact(anyInt())).thenReturn(true); + when(doubleValues.nextValue()).thenReturn(2.718); + + AtomicNumericFieldData atomicFieldData = mock(AtomicNumericFieldData.class); + when(atomicFieldData.getDoubleValues()).thenReturn(doubleValues); + + IndexNumericFieldData fieldData = mock(IndexNumericFieldData.class); + when(fieldData.getFieldName()).thenReturn("field"); + when(fieldData.load(anyObject())).thenReturn(atomicFieldData); + service = new ExpressionScriptEngine(Settings.EMPTY); - QueryShardContext shardContext = index.newQueryShardContext(0, null, () -> 0, null); - lookup = new SearchLookup(index.mapperService(), shardContext::getForField, null); + lookup = new SearchLookup(mapperService, ignored -> fieldData, null); } private SearchScript.LeafFactory compile(String expression) { @@ -50,22 +75,38 @@ public class ExpressionTests extends ESSingleNodeTestCase { public void testNeedsScores() { assertFalse(compile("1.2").needs_score()); - assertFalse(compile("doc['d'].value").needs_score()); + assertFalse(compile("doc['field'].value").needs_score()); assertTrue(compile("1/_score").needs_score()); - assertTrue(compile("doc['d'].value * _score").needs_score()); + assertTrue(compile("doc['field'].value * _score").needs_score()); } public void testCompileError() { ScriptException e = expectThrows(ScriptException.class, () -> { - compile("doc['d'].value * *@#)(@$*@#$ + 4"); + compile("doc['field'].value * *@#)(@$*@#$ + 4"); }); assertTrue(e.getCause() instanceof ParseException); } public void testLinkError() { ScriptException e = expectThrows(ScriptException.class, () -> { - compile("doc['e'].value * 5"); + compile("doc['nonexistent'].value * 5"); }); assertTrue(e.getCause() instanceof ParseException); } + + public void testFieldAccess() throws IOException { + SearchScript script = compile("doc['field'].value").newInstance(null); + script.setDocument(1); + + double result = script.runAsDouble(); + assertEquals(2.718, result, 0.0); + } + + public void testFieldAccessWithFieldAlias() throws IOException { + SearchScript script = compile("doc['alias'].value").newInstance(null); + script.setDocument(1); + + double result = script.runAsDouble(); + assertEquals(2.718, result, 0.0); + } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index f24a9710d29..5bbf998883e 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -194,6 +194,10 @@ final class PercolateQuery extends Query implements Accountable { return candidateMatchesQuery; } + Query getVerifiedMatchesQuery() { + return verifiedMatchesQuery; + } + // Comparing identity here to avoid being cached // Note that in theory if the same instance gets used multiple times it could still get cached, // however since we create a new query instance each time we this query this shouldn't happen and thus diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index c1063795193..e28fbead29a 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -618,13 +618,13 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder queryShardContext; private KeywordFieldMapper queryTermsField; private KeywordFieldMapper extractionResultField; @@ -348,14 +355,9 @@ public class PercolatorFieldMapper extends FieldMapper { this.extractionResultField = extractionResultField; this.queryBuilderField = queryBuilderField; this.minimumShouldMatchFieldMapper = minimumShouldMatchFieldMapper; - this.mapUnmappedFieldAsText = getMapUnmappedFieldAsText(indexSettings); this.rangeFieldMapper = rangeFieldMapper; } - private static boolean getMapUnmappedFieldAsText(Settings indexSettings) { - return INDEX_MAP_UNMAPPED_FIELDS_AS_TEXT_SETTING.get(indexSettings); - } - @Override public FieldMapper updateFieldType(Map fullNameToFieldType) { PercolatorFieldMapper updated = (PercolatorFieldMapper) super.updateFieldType(fullNameToFieldType); @@ -402,7 +404,7 @@ public class PercolatorFieldMapper extends FieldMapper { Version indexVersion = context.mapperService().getIndexSettings().getIndexVersionCreated(); createQueryBuilderField(indexVersion, queryBuilderField, queryBuilder, context); - Query query = toQuery(queryShardContext, mapUnmappedFieldAsText, queryBuilder); + Query query = toQuery(queryShardContext, isMapUnmappedFieldAsText(), queryBuilder); processQuery(query, context); return null; } @@ -522,7 +524,7 @@ public class PercolatorFieldMapper extends FieldMapper { } boolean isMapUnmappedFieldAsText() { - return mapUnmappedFieldAsText; + return ((FieldType) fieldType).mapUnmappedFieldsAsText; } /** diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 106358b6cf0..e6d637aabb1 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -194,8 +194,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { } Collections.sort(intValues); - MappedFieldType intFieldType = mapperService.documentMapper("type").mappers() - .getMapper("int_field").fieldType(); + MappedFieldType intFieldType = mapperService.fullName("int_field"); List> queryFunctions = new ArrayList<>(); queryFunctions.add(MatchNoDocsQuery::new); @@ -327,8 +326,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { stringValues.add("value2"); stringValues.add("value3"); - MappedFieldType intFieldType = mapperService.documentMapper("type").mappers() - .getMapper("int_field").fieldType(); + MappedFieldType intFieldType = mapperService.fullName("int_field"); List ranges = new ArrayList<>(); ranges.add(new int[]{-5, 5}); ranges.add(new int[]{0, 10}); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 3b3ff4ed15c..e7163edef94 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -75,7 +75,8 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase fieldPredicate, - String field, FieldMapper fieldMapper, Map fieldMappings, + String field, Mapper fieldMapper, Map fieldMappings, boolean includeDefaults) { if (fieldMappings.containsKey(field)) { return; @@ -207,7 +207,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc try { BytesReference bytes = XContentHelper.toXContent(fieldMapper, XContentType.JSON, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS, false); - fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().name(), bytes)); + fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.name(), bytes)); } catch (IOException e) { throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index f1a1dc45140..18f33ab397f 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -83,8 +83,8 @@ public class TransportFieldCapabilitiesIndexAction extends TransportSingleShardA for (String field : fieldNames) { MappedFieldType ft = mapperService.fullName(field); if (ft != null) { - FieldCapabilities fieldCap = new FieldCapabilities(field, ft.typeName(), ft.isSearchable(), ft.isAggregatable()); - if (indicesService.isMetaDataField(field) || fieldPredicate.test(field)) { + if (indicesService.isMetaDataField(field) || fieldPredicate.test(ft.name())) { + FieldCapabilities fieldCap = new FieldCapabilities(field, ft.typeName(), ft.isSearchable(), ft.isAggregatable()); responseMap.put(field, fieldCap); } } diff --git a/server/src/main/java/org/elasticsearch/index/IndexWarmer.java b/server/src/main/java/org/elasticsearch/index/IndexWarmer.java index e06dc5d2e81..98716e9545d 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexWarmer.java +++ b/server/src/main/java/org/elasticsearch/index/IndexWarmer.java @@ -27,8 +27,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.IndexShard; @@ -121,16 +119,12 @@ public final class IndexWarmer extends AbstractComponent { public TerminationHandle warmReader(final IndexShard indexShard, final Engine.Searcher searcher) { final MapperService mapperService = indexShard.mapperService(); final Map warmUpGlobalOrdinals = new HashMap<>(); - DocumentMapper docMapper = mapperService.documentMapper(); - if (docMapper != null) { - for (FieldMapper fieldMapper : docMapper.mappers()) { - final MappedFieldType fieldType = fieldMapper.fieldType(); - final String indexName = fieldType.name(); - if (fieldType.eagerGlobalOrdinals() == false) { - continue; - } - warmUpGlobalOrdinals.put(indexName, fieldType); + for (MappedFieldType fieldType : mapperService.fieldTypes()) { + final String indexName = fieldType.name(); + if (fieldType.eagerGlobalOrdinals() == false) { + continue; } + warmUpGlobalOrdinals.put(indexName, fieldType); } final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size()); for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) { diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java index bd1fd69eb74..c5d5a688ed9 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/CustomFieldsVisitor.java @@ -19,11 +19,8 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; -import org.elasticsearch.common.regex.Regex; import java.io.IOException; -import java.util.Collections; -import java.util.List; import java.util.Set; /** @@ -35,16 +32,10 @@ import java.util.Set; public class CustomFieldsVisitor extends FieldsVisitor { private final Set fields; - private final List patterns; - - public CustomFieldsVisitor(Set fields, List patterns, boolean loadSource) { - super(loadSource); - this.fields = fields; - this.patterns = patterns; - } public CustomFieldsVisitor(Set fields, boolean loadSource) { - this(fields, Collections.emptyList(), loadSource); + super(loadSource); + this.fields = fields; } @Override @@ -55,11 +46,6 @@ public class CustomFieldsVisitor extends FieldsVisitor { if (fields.contains(fieldInfo.name)) { return Status.YES; } - for (String pattern : patterns) { - if (Regex.simpleMatch(pattern, fieldInfo.name)) { - return Status.YES; - } - } return Status.NO; } } diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index a759f6a6767..5c5554cddcf 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -39,7 +39,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; @@ -202,7 +202,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { if (gFields != null && gFields.length > 0) { for (String field : gFields) { - FieldMapper fieldMapper = docMapper.mappers().getMapper(field); + Mapper fieldMapper = docMapper.mappers().getMapper(field); if (fieldMapper == null) { if (docMapper.objectMappers().get(field) != null) { // Only fail if we know it is a object field, missing paths / fields shouldn't fail. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index 9193ca209ba..f70c0038464 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -28,10 +28,10 @@ import java.util.HashMap; import java.util.Iterator; import java.util.Map; -public final class DocumentFieldMappers implements Iterable { +public final class DocumentFieldMappers implements Iterable { /** Full field name to mapper */ - private final Map fieldMappers; + private final Map fieldMappers; private final FieldNameAnalyzer indexAnalyzer; private final FieldNameAnalyzer searchAnalyzer; @@ -44,8 +44,12 @@ public final class DocumentFieldMappers implements Iterable { analyzers.put(key, value); } - public DocumentFieldMappers(Collection mappers, Analyzer defaultIndex, Analyzer defaultSearch, Analyzer defaultSearchQuote) { - Map fieldMappers = new HashMap<>(); + public DocumentFieldMappers(Collection mappers, + Collection aliasMappers, + Analyzer defaultIndex, + Analyzer defaultSearch, + Analyzer defaultSearchQuote) { + Map fieldMappers = new HashMap<>(); Map indexAnalyzers = new HashMap<>(); Map searchAnalyzers = new HashMap<>(); Map searchQuoteAnalyzers = new HashMap<>(); @@ -56,14 +60,24 @@ public final class DocumentFieldMappers implements Iterable { put(searchAnalyzers, fieldType.name(), fieldType.searchAnalyzer(), defaultSearch); put(searchQuoteAnalyzers, fieldType.name(), fieldType.searchQuoteAnalyzer(), defaultSearchQuote); } + + for (FieldAliasMapper aliasMapper : aliasMappers) { + fieldMappers.put(aliasMapper.name(), aliasMapper); + } + this.fieldMappers = Collections.unmodifiableMap(fieldMappers); this.indexAnalyzer = new FieldNameAnalyzer(indexAnalyzers); this.searchAnalyzer = new FieldNameAnalyzer(searchAnalyzers); this.searchQuoteAnalyzer = new FieldNameAnalyzer(searchQuoteAnalyzers); } - /** Returns the mapper for the given field */ - public FieldMapper getMapper(String field) { + /** + * Returns the leaf mapper associated with this field name. Note that the returned mapper + * could be either a concrete {@link FieldMapper}, or a {@link FieldAliasMapper}. + * + * To access a field's type information, {@link MapperService#fullName} should be used instead. + */ + public Mapper getMapper(String field) { return fieldMappers.get(field); } @@ -87,7 +101,7 @@ public final class DocumentFieldMappers implements Iterable { return this.searchQuoteAnalyzer; } - public Iterator iterator() { + public Iterator iterator() { return fieldMappers.values().iterator(); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 3ec0f544355..87942260742 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -133,15 +133,18 @@ public class DocumentMapper implements ToXContentFragment { // collect all the mappers for this type List newObjectMappers = new ArrayList<>(); List newFieldMappers = new ArrayList<>(); + List newFieldAliasMappers = new ArrayList<>(); for (MetadataFieldMapper metadataMapper : this.mapping.metadataMappers) { if (metadataMapper instanceof FieldMapper) { newFieldMappers.add(metadataMapper); } } - MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers); + MapperUtils.collect(this.mapping.root, + newObjectMappers, newFieldMappers, newFieldAliasMappers); final IndexAnalyzers indexAnalyzers = mapperService.getIndexAnalyzers(); this.fieldMappers = new DocumentFieldMappers(newFieldMappers, + newFieldAliasMappers, indexAnalyzers.getDefaultIndexAnalyzer(), indexAnalyzers.getDefaultSearchAnalyzer(), indexAnalyzers.getDefaultSearchQuoteAnalyzer()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 61ff4a4ff3d..0fd156c0905 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -459,13 +459,18 @@ final class DocumentParser { private static void parseObjectOrField(ParseContext context, Mapper mapper) throws IOException { if (mapper instanceof ObjectMapper) { parseObjectOrNested(context, (ObjectMapper) mapper); - } else { - FieldMapper fieldMapper = (FieldMapper)mapper; + } else if (mapper instanceof FieldMapper) { + FieldMapper fieldMapper = (FieldMapper) mapper; Mapper update = fieldMapper.parse(context); if (update != null) { context.addDynamicMapper(update); } parseCopyFields(context, fieldMapper.copyTo().copyToFields()); + } else if (mapper instanceof FieldAliasMapper) { + throw new IllegalArgumentException("Cannot write to a field alias [" + mapper.name() + "]."); + } else { + throw new IllegalStateException("The provided mapper [" + mapper.name() + "] has an unrecognized type [" + + mapper.getClass().getSimpleName() + "]."); } } @@ -827,9 +832,16 @@ final class DocumentParser { /** Creates an copy of the current field with given field name and boost */ private static void parseCopy(String field, ParseContext context) throws IOException { - FieldMapper fieldMapper = context.docMapper().mappers().getMapper(field); - if (fieldMapper != null) { - fieldMapper.parse(context); + Mapper mapper = context.docMapper().mappers().getMapper(field); + if (mapper != null) { + if (mapper instanceof FieldMapper) { + ((FieldMapper) mapper).parse(context); + } else if (mapper instanceof FieldAliasMapper) { + throw new IllegalArgumentException("Cannot copy to a field alias [" + mapper.name() + "]."); + } else { + throw new IllegalStateException("The provided mapper [" + mapper.name() + + "] has an unrecognized type [" + mapper.getClass().getSimpleName() + "]."); + } } else { // The path of the dest field might be completely different from the current one so we need to reset it context = context.overridePath(new ContentPath(0)); @@ -837,8 +849,8 @@ final class DocumentParser { final String[] paths = splitAndValidatePath(field); final String fieldName = paths[paths.length-1]; Tuple parentMapperTuple = getDynamicParentMapper(context, paths, null); - ObjectMapper mapper = parentMapperTuple.v2(); - parseDynamicValue(context, mapper, fieldName, context.parser().currentToken()); + ObjectMapper objectMapper = parentMapperTuple.v2(); + parseDynamicValue(context, objectMapper, fieldName, context.parser().currentToken()); for (int i = 0; i < parentMapperTuple.v1(); i++) { context.path().remove(); } @@ -849,46 +861,46 @@ final class DocumentParser { ObjectMapper currentParent) { ObjectMapper mapper = currentParent == null ? context.root() : currentParent; int pathsAdded = 0; - ObjectMapper parent = mapper; - for (int i = 0; i < paths.length-1; i++) { - String currentPath = context.path().pathAsText(paths[i]); - FieldMapper existingFieldMapper = context.docMapper().mappers().getMapper(currentPath); - if (existingFieldMapper != null) { - throw new MapperParsingException( - "Could not dynamically add mapping for field [{}]. Existing mapping for [{}] must be of type object but found [{}].", - null, String.join(".", paths), currentPath, existingFieldMapper.fieldType.typeName()); - } - mapper = context.docMapper().objectMappers().get(currentPath); - if (mapper == null) { - // One mapping is missing, check if we are allowed to create a dynamic one. - ObjectMapper.Dynamic dynamic = dynamicOrDefault(parent, context); + ObjectMapper parent = mapper; + for (int i = 0; i < paths.length-1; i++) { + String currentPath = context.path().pathAsText(paths[i]); + Mapper existingFieldMapper = context.docMapper().mappers().getMapper(currentPath); + if (existingFieldMapper != null) { + throw new MapperParsingException( + "Could not dynamically add mapping for field [{}]. Existing mapping for [{}] must be of type object but found [{}].", + null, String.join(".", paths), currentPath, existingFieldMapper.typeName()); + } + mapper = context.docMapper().objectMappers().get(currentPath); + if (mapper == null) { + // One mapping is missing, check if we are allowed to create a dynamic one. + ObjectMapper.Dynamic dynamic = dynamicOrDefault(parent, context); - switch (dynamic) { - case STRICT: - throw new StrictDynamicMappingException(parent.fullPath(), paths[i]); - case TRUE: - Mapper.Builder builder = context.root().findTemplateBuilder(context, paths[i], XContentFieldType.OBJECT); - if (builder == null) { - builder = new ObjectMapper.Builder(paths[i]).enabled(true); - } - Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); - mapper = (ObjectMapper) builder.build(builderContext); - if (mapper.nested() != ObjectMapper.Nested.NO) { - throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) - + "]) through `copy_to` or dots in field names"); - } - context.addDynamicMapper(mapper); - break; - case FALSE: - // Should not dynamically create any more mappers so return the last mapper - return new Tuple<>(pathsAdded, parent); + switch (dynamic) { + case STRICT: + throw new StrictDynamicMappingException(parent.fullPath(), paths[i]); + case TRUE: + Mapper.Builder builder = context.root().findTemplateBuilder(context, paths[i], XContentFieldType.OBJECT); + if (builder == null) { + builder = new ObjectMapper.Builder(paths[i]).enabled(true); + } + Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); + mapper = (ObjectMapper) builder.build(builderContext); + if (mapper.nested() != ObjectMapper.Nested.NO) { + throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + + "]) through `copy_to` or dots in field names"); + } + context.addDynamicMapper(mapper); + break; + case FALSE: + // Should not dynamically create any more mappers so return the last mapper + return new Tuple<>(pathsAdded, parent); - } } - context.path().add(paths[i]); - pathsAdded++; - parent = mapper; } + context.path().add(paths[i]); + pathsAdded++; + parent = mapper; + } return new Tuple<>(pathsAdded, mapper); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java new file mode 100644 index 00000000000..8d87b4f73ec --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java @@ -0,0 +1,132 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; + +import java.io.IOException; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; + +/** + * A mapper for field aliases. + * + * A field alias has no concrete field mappings of its own, but instead points to another field by + * its path. Once defined, an alias can be used in place of the concrete field name in search requests. + */ +public final class FieldAliasMapper extends Mapper { + public static final String CONTENT_TYPE = "alias"; + + public static class Names { + public static final String PATH = "path"; + } + + private final String name; + private final String path; + + public FieldAliasMapper(String simpleName, + String name, + String path) { + super(simpleName); + this.name = name; + this.path = path; + } + + @Override + public String name() { + return name; + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + public String path() { + return path; + } + + @Override + public Mapper merge(Mapper mergeWith) { + if (!(mergeWith instanceof FieldAliasMapper)) { + throw new IllegalArgumentException("Cannot merge a field alias mapping [" + + name() + "] with a mapping that is not for a field alias."); + } + return mergeWith; + } + + @Override + public Mapper updateFieldType(Map fullNameToFieldType) { + return this; + } + + @Override + public Iterator iterator() { + return Collections.emptyIterator(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject(simpleName()) + .field("type", CONTENT_TYPE) + .field(Names.PATH, path) + .endObject(); + } + + public static class TypeParser implements Mapper.TypeParser { + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { + FieldAliasMapper.Builder builder = new FieldAliasMapper.Builder(name); + Object pathField = node.remove(Names.PATH); + String path = XContentMapValues.nodeStringValue(pathField, null); + if (path == null) { + throw new MapperParsingException("The [path] property must be specified for field [" + name + "]."); + } + return builder.path(path); + } + } + + public static class Builder extends Mapper.Builder { + private String name; + private String path; + + protected Builder(String name) { + super(name); + this.name = name; + } + + public String name() { + return this.name; + } + + public Builder path(String path) { + this.path = path; + return this; + } + + public FieldAliasMapper build(BuilderContext context) { + String fullName = context.path().pathAsText(name); + return new FieldAliasMapper(name, fullName, path); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 977b930c41e..cbb008c9d00 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -247,6 +247,11 @@ public abstract class FieldMapper extends Mapper implements Cloneable { return fieldType().name(); } + @Override + public String typeName() { + return fieldType.typeName(); + } + public MappedFieldType fieldType() { return fieldType; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index 069468ddb7a..c7d92e9f829 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -35,64 +35,115 @@ import java.util.Set; */ class FieldTypeLookup implements Iterable { - /** Full field name to field type */ final CopyOnWriteHashMap fullNameToFieldType; + private final CopyOnWriteHashMap aliasToConcreteName; - /** Create a new empty instance. */ FieldTypeLookup() { fullNameToFieldType = new CopyOnWriteHashMap<>(); + aliasToConcreteName = new CopyOnWriteHashMap<>(); } - private FieldTypeLookup(CopyOnWriteHashMap fullName) { - this.fullNameToFieldType = fullName; + private FieldTypeLookup(CopyOnWriteHashMap fullNameToFieldType, + CopyOnWriteHashMap aliasToConcreteName) { + this.fullNameToFieldType = fullNameToFieldType; + this.aliasToConcreteName = aliasToConcreteName; } /** * Return a new instance that contains the union of this instance and the field types - * from the provided fields. If a field already exists, the field type will be updated - * to use the new mappers field type. + * from the provided mappers. If a field already exists, its field type will be updated + * to use the new type from the given field mapper. Similarly if an alias already + * exists, it will be updated to reference the field type from the new mapper. */ - public FieldTypeLookup copyAndAddAll(String type, Collection fieldMappers) { + public FieldTypeLookup copyAndAddAll(String type, + Collection fieldMappers, + Collection fieldAliasMappers) { Objects.requireNonNull(type, "type must not be null"); if (MapperService.DEFAULT_MAPPING.equals(type)) { throw new IllegalArgumentException("Default mappings should not be added to the lookup"); } CopyOnWriteHashMap fullName = this.fullNameToFieldType; + CopyOnWriteHashMap aliases = this.aliasToConcreteName; for (FieldMapper fieldMapper : fieldMappers) { MappedFieldType fieldType = fieldMapper.fieldType(); MappedFieldType fullNameFieldType = fullName.get(fieldType.name()); - if (fullNameFieldType == null) { - // introduction of a new field - fullName = fullName.copyAndPut(fieldType.name(), fieldMapper.fieldType()); - } else { - // modification of an existing field - checkCompatibility(fullNameFieldType, fieldType); - if (fieldType.equals(fullNameFieldType) == false) { - fullName = fullName.copyAndPut(fieldType.name(), fieldMapper.fieldType()); - } + if (!Objects.equals(fieldType, fullNameFieldType)) { + validateField(fullNameFieldType, fieldType, aliases); + fullName = fullName.copyAndPut(fieldType.name(), fieldType); } } - return new FieldTypeLookup(fullName); + + for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) { + String aliasName = fieldAliasMapper.name(); + String path = fieldAliasMapper.path(); + + validateAlias(aliasName, path, aliases, fullName); + aliases = aliases.copyAndPut(aliasName, path); + } + + return new FieldTypeLookup(fullName, aliases); } /** - * Checks if the given field type is compatible with an existing field type. - * An IllegalArgumentException is thrown in case of incompatibility. + * Checks that the new field type is valid. */ - private void checkCompatibility(MappedFieldType existingFieldType, MappedFieldType newFieldType) { - List conflicts = new ArrayList<>(); - existingFieldType.checkCompatibility(newFieldType, conflicts); - if (conflicts.isEmpty() == false) { - throw new IllegalArgumentException("Mapper for [" + newFieldType.name() + "] conflicts with existing mapping:\n" + conflicts.toString()); + private void validateField(MappedFieldType existingFieldType, + MappedFieldType newFieldType, + CopyOnWriteHashMap aliasToConcreteName) { + String fieldName = newFieldType.name(); + if (aliasToConcreteName.containsKey(fieldName)) { + throw new IllegalArgumentException("The name for field [" + fieldName + "] has already" + + " been used to define a field alias."); + } + + if (existingFieldType != null) { + List conflicts = new ArrayList<>(); + existingFieldType.checkCompatibility(newFieldType, conflicts); + if (conflicts.isEmpty() == false) { + throw new IllegalArgumentException("Mapper for [" + fieldName + + "] conflicts with existing mapping:\n" + conflicts.toString()); + } + } + } + + /** + * Checks that the new field alias is valid. + * + * Note that this method assumes that new concrete fields have already been processed, so that it + * can verify that an alias refers to an existing concrete field. + */ + private void validateAlias(String aliasName, + String path, + CopyOnWriteHashMap aliasToConcreteName, + CopyOnWriteHashMap fullNameToFieldType) { + if (fullNameToFieldType.containsKey(aliasName)) { + throw new IllegalArgumentException("The name for field alias [" + aliasName + "] has already" + + " been used to define a concrete field."); + } + + if (path.equals(aliasName)) { + throw new IllegalArgumentException("Invalid [path] value [" + path + "] for field alias [" + + aliasName + "]: an alias cannot refer to itself."); + } + + if (aliasToConcreteName.containsKey(path)) { + throw new IllegalArgumentException("Invalid [path] value [" + path + "] for field alias [" + + aliasName + "]: an alias cannot refer to another alias."); + } + + if (!fullNameToFieldType.containsKey(path)) { + throw new IllegalArgumentException("Invalid [path] value [" + path + "] for field alias [" + + aliasName + "]: an alias must refer to an existing field in the mappings."); } } /** Returns the field for the given field */ public MappedFieldType get(String field) { - return fullNameToFieldType.get(field); + String concreteField = aliasToConcreteName.getOrDefault(field, field); + return fullNameToFieldType.get(concreteField); } /** @@ -105,6 +156,11 @@ class FieldTypeLookup implements Iterable { fields.add(fieldType.name()); } } + for (String aliasName : aliasToConcreteName.keySet()) { + if (Regex.simpleMatch(pattern, aliasName)) { + fields.add(aliasName); + } + } return fields; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java index 051ac9da7f2..4d17afae614 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -173,6 +173,11 @@ public abstract class Mapper implements ToXContentFragment, Iterable { /** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */ public abstract String name(); + /** + * Returns a name representing the the type of this mapper. + */ + public abstract String typeName(); + /** Return the merge of {@code mergeWith} into this. * Both {@code this} and {@code mergeWith} will be left unmodified. */ public abstract Mapper merge(Mapper mergeWith); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java new file mode 100644 index 00000000000..440be98ad9e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java @@ -0,0 +1,214 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Stream; + +/** + * A utility class that helps validate certain aspects of a mappings update. + */ +class MapperMergeValidator { + + /** + * Validates the overall structure of the mapping addition, including whether + * duplicate fields are present, and if the provided fields have already been + * defined with a different data type. + * + * @param type The mapping type, for use in error messages. + * @param objectMappers The newly added object mappers. + * @param fieldMappers The newly added field mappers. + * @param fieldAliasMappers The newly added field alias mappers. + * @param fullPathObjectMappers All object mappers, indexed by their full path. + * @param fieldTypes All field and field alias mappers, collected into a lookup structure. + */ + public static void validateMapperStructure(String type, + Collection objectMappers, + Collection fieldMappers, + Collection fieldAliasMappers, + Map fullPathObjectMappers, + FieldTypeLookup fieldTypes) { + checkFieldUniqueness(type, objectMappers, fieldMappers, + fieldAliasMappers, fullPathObjectMappers, fieldTypes); + checkObjectsCompatibility(objectMappers, fullPathObjectMappers); + } + + private static void checkFieldUniqueness(String type, + Collection objectMappers, + Collection fieldMappers, + Collection fieldAliasMappers, + Map fullPathObjectMappers, + FieldTypeLookup fieldTypes) { + + // first check within mapping + Set objectFullNames = new HashSet<>(); + for (ObjectMapper objectMapper : objectMappers) { + String fullPath = objectMapper.fullPath(); + if (objectFullNames.add(fullPath) == false) { + throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice in mapping for type [" + type + "]"); + } + } + + Set fieldNames = new HashSet<>(); + Stream.concat(fieldMappers.stream(), fieldAliasMappers.stream()) + .forEach(mapper -> { + String name = mapper.name(); + if (objectFullNames.contains(name)) { + throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field in [" + type + "]"); + } else if (fieldNames.add(name) == false) { + throw new IllegalArgumentException("Field [" + name + "] is defined twice in [" + type + "]"); + } + }); + + // then check other types + for (String fieldName : fieldNames) { + if (fullPathObjectMappers.containsKey(fieldName)) { + throw new IllegalArgumentException("[" + fieldName + "] is defined as a field in mapping [" + type + + "] but this name is already used for an object in other types"); + } + } + + for (String objectPath : objectFullNames) { + if (fieldTypes.get(objectPath) != null) { + throw new IllegalArgumentException("[" + objectPath + "] is defined as an object in mapping [" + type + + "] but this name is already used for a field in other types"); + } + } + } + + private static void checkObjectsCompatibility(Collection objectMappers, + Map fullPathObjectMappers) { + for (ObjectMapper newObjectMapper : objectMappers) { + ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath()); + if (existingObjectMapper != null) { + // simulate a merge and ignore the result, we are just interested + // in exceptions here + existingObjectMapper.merge(newObjectMapper); + } + } + } + + /** + * Verifies that each field reference, e.g. the value of copy_to or the target + * of a field alias, corresponds to a valid part of the mapping. + * + * @param fieldMappers The newly added field mappers. + * @param fieldAliasMappers The newly added field alias mappers. + * @param fullPathObjectMappers All object mappers, indexed by their full path. + * @param fieldTypes All field and field alias mappers, collected into a lookup structure. + */ + public static void validateFieldReferences(List fieldMappers, + List fieldAliasMappers, + Map fullPathObjectMappers, + FieldTypeLookup fieldTypes) { + validateCopyTo(fieldMappers, fullPathObjectMappers, fieldTypes); + validateFieldAliasTargets(fieldAliasMappers, fullPathObjectMappers); + } + + private static void validateCopyTo(List fieldMappers, + Map fullPathObjectMappers, + FieldTypeLookup fieldTypes) { + for (FieldMapper mapper : fieldMappers) { + if (mapper.copyTo() != null && mapper.copyTo().copyToFields().isEmpty() == false) { + String sourceParent = parentObject(mapper.name()); + if (sourceParent != null && fieldTypes.get(sourceParent) != null) { + throw new IllegalArgumentException("[copy_to] may not be used to copy from a multi-field: [" + mapper.name() + "]"); + } + + final String sourceScope = getNestedScope(mapper.name(), fullPathObjectMappers); + for (String copyTo : mapper.copyTo().copyToFields()) { + String copyToParent = parentObject(copyTo); + if (copyToParent != null && fieldTypes.get(copyToParent) != null) { + throw new IllegalArgumentException("[copy_to] may not be used to copy to a multi-field: [" + copyTo + "]"); + } + + if (fullPathObjectMappers.containsKey(copyTo)) { + throw new IllegalArgumentException("Cannot copy to field [" + copyTo + "] since it is mapped as an object"); + } + + final String targetScope = getNestedScope(copyTo, fullPathObjectMappers); + checkNestedScopeCompatibility(sourceScope, targetScope); + } + } + } + } + + private static void validateFieldAliasTargets(List fieldAliasMappers, + Map fullPathObjectMappers) { + for (FieldAliasMapper mapper : fieldAliasMappers) { + String aliasName = mapper.name(); + String path = mapper.path(); + + String aliasScope = getNestedScope(aliasName, fullPathObjectMappers); + String pathScope = getNestedScope(path, fullPathObjectMappers); + + if (!Objects.equals(aliasScope, pathScope)) { + StringBuilder message = new StringBuilder("Invalid [path] value [" + path + "] for field alias [" + + aliasName + "]: an alias must have the same nested scope as its target. "); + message.append(aliasScope == null + ? "The alias is not nested" + : "The alias's nested scope is [" + aliasScope + "]"); + message.append(", but "); + message.append(pathScope == null + ? "the target is not nested." + : "the target's nested scope is [" + pathScope + "]."); + throw new IllegalArgumentException(message.toString()); + } + } + } + + private static String getNestedScope(String path, Map fullPathObjectMappers) { + for (String parentPath = parentObject(path); parentPath != null; parentPath = parentObject(parentPath)) { + ObjectMapper objectMapper = fullPathObjectMappers.get(parentPath); + if (objectMapper != null && objectMapper.nested().isNested()) { + return parentPath; + } + } + return null; + } + + private static void checkNestedScopeCompatibility(String source, String target) { + boolean targetIsParentOfSource; + if (source == null || target == null) { + targetIsParentOfSource = target == null; + } else { + targetIsParentOfSource = source.equals(target) || source.startsWith(target + "."); + } + if (targetIsParentOfSource == false) { + throw new IllegalArgumentException( + "Illegal combination of [copy_to] and [nested] mappings: [copy_to] may only copy data to the current nested " + + "document or any of its parents, however one [copy_to] directive is trying to copy data from nested object [" + + source + "] to [" + target + "]"); + } + } + + private static String parentObject(String field) { + int lastDot = field.lastIndexOf('.'); + if (lastDot == -1) { + return null; + } + return field.substring(0, lastDot); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 8988238d927..936e7334002 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; @@ -395,15 +394,17 @@ public class MapperService extends AbstractIndexComponent implements Closeable { // check basic sanity of the new mapping List objectMappers = new ArrayList<>(); List fieldMappers = new ArrayList<>(); + List fieldAliasMappers = new ArrayList<>(); Collections.addAll(fieldMappers, newMapper.mapping().metadataMappers); - MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers); - checkFieldUniqueness(newMapper.type(), objectMappers, fieldMappers, fullPathObjectMappers, fieldTypes); - checkObjectsCompatibility(objectMappers, fullPathObjectMappers); + MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers, fieldAliasMappers); + + MapperMergeValidator.validateMapperStructure(newMapper.type(), objectMappers, fieldMappers, + fieldAliasMappers, fullPathObjectMappers, fieldTypes); checkPartitionedIndexConstraints(newMapper); // update lookup data-structures // this will in particular make sure that the merged fields are compatible with other types - fieldTypes = fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers); + fieldTypes = fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers, fieldAliasMappers); for (ObjectMapper objectMapper : objectMappers) { if (fullPathObjectMappers == this.fullPathObjectMappers) { @@ -417,7 +418,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } - validateCopyTo(fieldMappers, fullPathObjectMappers, fieldTypes); + MapperMergeValidator.validateFieldReferences(fieldMappers, fieldAliasMappers, + fullPathObjectMappers, fieldTypes); if (reason == MergeReason.MAPPING_UPDATE) { // this check will only be performed on the master node when there is @@ -482,7 +484,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { if (mapper != null) { List fieldMappers = new ArrayList<>(); Collections.addAll(fieldMappers, mapper.mapping().metadataMappers); - MapperUtils.collect(mapper.root(), new ArrayList<>(), fieldMappers); + MapperUtils.collect(mapper.root(), new ArrayList<>(), fieldMappers, new ArrayList<>()); for (FieldMapper fieldMapper : fieldMappers) { assert fieldMapper.fieldType() == fieldTypes.get(fieldMapper.name()) : fieldMapper.name(); } @@ -503,56 +505,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return true; } - private static void checkFieldUniqueness(String type, Collection objectMappers, Collection fieldMappers, - Map fullPathObjectMappers, FieldTypeLookup fieldTypes) { - - // first check within mapping - final Set objectFullNames = new HashSet<>(); - for (ObjectMapper objectMapper : objectMappers) { - final String fullPath = objectMapper.fullPath(); - if (objectFullNames.add(fullPath) == false) { - throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice in mapping for type [" + type + "]"); - } - } - - final Set fieldNames = new HashSet<>(); - for (FieldMapper fieldMapper : fieldMappers) { - final String name = fieldMapper.name(); - if (objectFullNames.contains(name)) { - throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field in [" + type + "]"); - } else if (fieldNames.add(name) == false) { - throw new IllegalArgumentException("Field [" + name + "] is defined twice in [" + type + "]"); - } - } - - // then check other types - for (String fieldName : fieldNames) { - if (fullPathObjectMappers.containsKey(fieldName)) { - throw new IllegalArgumentException("[" + fieldName + "] is defined as a field in mapping [" + type - + "] but this name is already used for an object in other types"); - } - } - - for (String objectPath : objectFullNames) { - if (fieldTypes.get(objectPath) != null) { - throw new IllegalArgumentException("[" + objectPath + "] is defined as an object in mapping [" + type - + "] but this name is already used for a field in other types"); - } - } - } - - private static void checkObjectsCompatibility(Collection objectMappers, - Map fullPathObjectMappers) { - for (ObjectMapper newObjectMapper : objectMappers) { - ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath()); - if (existingObjectMapper != null) { - // simulate a merge and ignore the result, we are just interested - // in exceptions here - existingObjectMapper.merge(newObjectMapper); - } - } - } - private void checkNestedFieldsLimit(Map fullPathObjectMappers) { long allowedNestedFields = indexSettings.getValue(INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING); long actualNestedFields = 0; @@ -609,66 +561,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } - private static void validateCopyTo(List fieldMappers, Map fullPathObjectMappers, - FieldTypeLookup fieldTypes) { - for (FieldMapper mapper : fieldMappers) { - if (mapper.copyTo() != null && mapper.copyTo().copyToFields().isEmpty() == false) { - String sourceParent = parentObject(mapper.name()); - if (sourceParent != null && fieldTypes.get(sourceParent) != null) { - throw new IllegalArgumentException("[copy_to] may not be used to copy from a multi-field: [" + mapper.name() + "]"); - } - - final String sourceScope = getNestedScope(mapper.name(), fullPathObjectMappers); - for (String copyTo : mapper.copyTo().copyToFields()) { - String copyToParent = parentObject(copyTo); - if (copyToParent != null && fieldTypes.get(copyToParent) != null) { - throw new IllegalArgumentException("[copy_to] may not be used to copy to a multi-field: [" + copyTo + "]"); - } - - if (fullPathObjectMappers.containsKey(copyTo)) { - throw new IllegalArgumentException("Cannot copy to field [" + copyTo + "] since it is mapped as an object"); - } - - final String targetScope = getNestedScope(copyTo, fullPathObjectMappers); - checkNestedScopeCompatibility(sourceScope, targetScope); - } - } - } - } - - private static String getNestedScope(String path, Map fullPathObjectMappers) { - for (String parentPath = parentObject(path); parentPath != null; parentPath = parentObject(parentPath)) { - ObjectMapper objectMapper = fullPathObjectMappers.get(parentPath); - if (objectMapper != null && objectMapper.nested().isNested()) { - return parentPath; - } - } - return null; - } - - private static void checkNestedScopeCompatibility(String source, String target) { - boolean targetIsParentOfSource; - if (source == null || target == null) { - targetIsParentOfSource = target == null; - } else { - targetIsParentOfSource = source.equals(target) || source.startsWith(target + "."); - } - if (targetIsParentOfSource == false) { - throw new IllegalArgumentException( - "Illegal combination of [copy_to] and [nested] mappings: [copy_to] may only copy data to the current nested " + - "document or any of its parents, however one [copy_to] directive is trying to copy data from nested object [" + - source + "] to [" + target + "]"); - } - } - - private static String parentObject(String field) { - int lastDot = field.lastIndexOf('.'); - if (lastDot == -1) { - return null; - } - return field.substring(0, lastDot); - } - public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { return documentParser.parse(mappingType, mappingSource, applyDefault ? defaultMappingSource : null); } @@ -729,6 +621,13 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return fieldTypes.simpleMatchToFullName(pattern); } + /** + * Returns all mapped field types. + */ + public Iterable fieldTypes() { + return fieldTypes; + } + public ObjectMapper getObjectMapper(String name) { return fullPathObjectMappers.get(name); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java index ad57d72b345..70da6b73f31 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java @@ -24,17 +24,28 @@ import java.util.Collection; enum MapperUtils { ; - /** Split mapper and its descendants into object and field mappers. */ - public static void collect(Mapper mapper, Collection objectMappers, Collection fieldMappers) { + /** + * Splits the provided mapper and its descendants into object, field, and field alias mappers. + */ + public static void collect(Mapper mapper, Collection objectMappers, + Collection fieldMappers, + Collection fieldAliasMappers) { if (mapper instanceof RootObjectMapper) { // root mapper isn't really an object mapper } else if (mapper instanceof ObjectMapper) { objectMappers.add((ObjectMapper)mapper); } else if (mapper instanceof FieldMapper) { fieldMappers.add((FieldMapper)mapper); + } else if (mapper instanceof FieldAliasMapper) { + fieldAliasMappers.add((FieldAliasMapper) mapper); + } else { + throw new IllegalStateException("Unrecognized mapper type [" + + mapper.getClass().getSimpleName() + "]."); } + + for (Mapper child : mapper) { - collect(child, objectMappers, fieldMappers); + collect(child, objectMappers, fieldMappers, fieldAliasMappers); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index c3e3e41798d..99ad3936700 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -359,6 +359,11 @@ public class ObjectMapper extends Mapper implements Cloneable { return this.fullPath; } + @Override + public String typeName() { + return CONTENT_TYPE; + } + public boolean isEnabled() { return this.enabled; } diff --git a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java index 280df7cfa6a..7a2373e5ad8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java @@ -149,7 +149,7 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder } if (context.indexVersionCreated().before(Version.V_6_1_0)) { - return newLegacyExistsQuery(fields); + return newLegacyExistsQuery(context, fields); } if (fields.size() == 1) { @@ -164,22 +164,28 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder return new ConstantScoreQuery(boolFilterBuilder.build()); } - private static Query newLegacyExistsQuery(Collection fields) { + private static Query newLegacyExistsQuery(QueryShardContext context, Collection fields) { // We create TermsQuery directly here rather than using FieldNamesFieldType.termsQuery() // so we don't end up with deprecation warnings if (fields.size() == 1) { - Query filter = new TermQuery(new Term(FieldNamesFieldMapper.NAME, fields.iterator().next())); + Query filter = newLegacyExistsQuery(context, fields.iterator().next()); return new ConstantScoreQuery(filter); } BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); for (String field : fields) { - Query filter = new TermQuery(new Term(FieldNamesFieldMapper.NAME, field)); + Query filter = newLegacyExistsQuery(context, field); boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD); } return new ConstantScoreQuery(boolFilterBuilder.build()); } + private static Query newLegacyExistsQuery(QueryShardContext context, String field) { + MappedFieldType fieldType = context.fieldMapper(field); + String fieldName = fieldType != null ? fieldType.name() : field; + return new TermQuery(new Term(FieldNamesFieldMapper.NAME, fieldName)); + } + private static Query newFieldExistsQuery(QueryShardContext context, String field) { MappedFieldType fieldType = context.getMapperService().fullName(field); if (fieldType == null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 1f410a2564c..637d9321291 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; @@ -202,14 +203,18 @@ public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder(prefixQuery); } else { - String origFieldName = ((PrefixQueryBuilder) multiTermQueryBuilder).fieldName(); - SpanTermQuery spanTermQuery = new SpanTermQuery(((TermQuery) subQuery).getTerm()); /** * Prefixes are indexed in a different field so we mask the term query with the original field * name. This is required because span_near and span_or queries don't work across different field. * The masking is safe because the prefix field is indexed using the same content than the original field * and the prefix analyzer preserves positions. */ - spanQuery = new FieldMaskingSpanQuery(spanTermQuery, origFieldName); + SpanTermQuery spanTermQuery = new SpanTermQuery(((TermQuery) subQuery).getTerm()); + spanQuery = new FieldMaskingSpanQuery(spanTermQuery, fieldName); } } else { if (subQuery instanceof MultiTermQuery == false) { diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java index d4333fa0bc5..ceeef6112ae 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; import java.util.ArrayList; @@ -218,7 +219,8 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder queries = createTermQueries(context); + LongValuesSource longValuesSource = createValuesSource(context); + return new CoveringQuery(queries, longValuesSource); + } + + /** + * Visible only for testing purposes. + */ + List createTermQueries(QueryShardContext context) { final MappedFieldType fieldType = context.fieldMapper(fieldName); final List queries = new ArrayList<>(values.size()); for (Object value : values) { @@ -239,7 +248,11 @@ public final class TermsSetQueryBuilder extends AbstractQueryBuilder params = new HashMap<>(); params.putAll(minimumShouldMatchScript.getParams()); - params.put("num_terms", queries.size()); + params.put("num_terms", values.size()); SearchScript.LeafFactory leafFactory = factory.newFactory(params, context.lookup()); longValuesSource = new ScriptLongValueSource(minimumShouldMatchScript, leafFactory); } else { throw new IllegalStateException("No minimum should match has been specified"); } - return new CoveringQuery(queries, longValuesSource); + return longValuesSource; } static final class ScriptLongValueSource extends LongValuesSource { diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java index b3751afbc9c..df96ff87ec2 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java @@ -25,6 +25,8 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -88,10 +90,10 @@ public final class QueryParserHelper { * @param mapperService The mapper service where to find the mapping. * @param field The field name to search. */ - public static FieldMapper getFieldMapper(MapperService mapperService, String field) { + public static Mapper getFieldMapper(MapperService mapperService, String field) { DocumentMapper mapper = mapperService.documentMapper(); if (mapper != null) { - FieldMapper fieldMapper = mapper.mappers().getMapper(field); + Mapper fieldMapper = mapper.mappers().getMapper(field); if (fieldMapper != null) { return fieldMapper; } @@ -167,23 +169,27 @@ public final class QueryParserHelper { if (fieldSuffix != null && context.fieldMapper(fieldName + fieldSuffix) != null) { fieldName = fieldName + fieldSuffix; } - FieldMapper mapper = getFieldMapper(context.getMapperService(), fieldName); - if (mapper == null) { - // Unmapped fields are not ignored - fields.put(fieldOrPattern, weight); - continue; - } - if (acceptMetadataField == false && mapper instanceof MetadataFieldMapper) { - // Ignore metadata fields + + MappedFieldType fieldType = context.getMapperService().fullName(fieldName); + if (fieldType == null) { + // Note that we don't ignore unmapped fields. + fields.put(fieldName, weight); continue; } + // Ignore fields that are not in the allowed mapper types. Some // types do not support term queries, and thus we cannot generate // a special query for them. - String mappingType = mapper.fieldType().typeName(); + String mappingType = fieldType.typeName(); if (acceptAllTypes == false && ALLOWED_QUERY_MAPPER_TYPES.contains(mappingType) == false) { continue; } + + // Ignore metadata fields. + Mapper mapper = getFieldMapper(context.getMapperService(), fieldName); + if (acceptMetadataField == false && mapper instanceof MetadataFieldMapper) { + continue; + } fields.put(fieldName, weight); } checkForTooManyFields(fields); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java index 2d41491e3a7..a1038853c06 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.FieldAliasMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; @@ -129,7 +130,9 @@ public class IndicesModule extends AbstractModule { mappers.put(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); + mappers.put(FieldAliasMapper.CONTENT_TYPE, new FieldAliasMapper.TypeParser()); mappers.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index abdc195b514..df1bd115e2b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -85,6 +85,12 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); + + if (!config.unmapped()) { + this.fieldType = config.fieldContext().fieldType(); + this.indexedFieldName = fieldType.name(); + } + this.includeExclude = includeExclude; this.executionHint = executionHint; this.filter = filterBuilder == null @@ -98,15 +104,6 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac : searcher.count(filter); this.bucketCountThresholds = bucketCountThresholds; this.significanceHeuristic = significanceHeuristic; - setFieldInfo(context); - - } - - private void setFieldInfo(SearchContext context) { - if (!config.unmapped()) { - this.indexedFieldName = config.fieldContext().field(); - fieldType = context.smartNameFieldType(indexedFieldName); - } } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregationBuilder.java index 5e8bc2f4c18..f0b85f979c2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregationBuilder.java @@ -343,13 +343,10 @@ public class SignificantTextAggregationBuilder extends AbstractAggregationBuilde protected AggregatorFactory doBuild(SearchContext context, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { SignificanceHeuristic executionHeuristic = this.significanceHeuristic.rewrite(context); - String[] execFieldNames = sourceFieldNames; - if (execFieldNames == null) { - execFieldNames = new String[] { fieldName }; - } + return new SignificantTextAggregatorFactory(name, includeExclude, filterBuilder, bucketCountThresholds, executionHeuristic, context, parent, subFactoriesBuilder, - fieldName, execFieldNames, filterDuplicateText, metaData); + fieldName, sourceFieldNames, filterDuplicateText, metaData); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java index c35b0bfd2d0..ea9a8a91aea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java @@ -71,12 +71,19 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory metaData) throws IOException { super(name, context, parent, subFactoriesBuilder, metaData); + + // Note that if the field is unmapped (its field type is null), we don't fail, + // and just use the given field name as a placeholder. + this.fieldType = context.getQueryShardContext().fieldMapper(fieldName); + this.indexedFieldName = fieldType != null ? fieldType.name() : fieldName; + this.sourceFieldNames = sourceFieldNames == null + ? new String[] { indexedFieldName } + : sourceFieldNames; + this.includeExclude = includeExclude; this.filter = filterBuilder == null ? null : filterBuilder.toQuery(context.getQueryShardContext()); - this.indexedFieldName = fieldName; - this.sourceFieldNames = sourceFieldNames; this.filterDuplicateText = filterDuplicateText; IndexSearcher searcher = context.searcher(); // Important - need to use the doc count that includes deleted docs @@ -86,11 +93,8 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory pipelineAggregators, Map metaData) - throws IOException { + throws IOException { if (collectsFromSingleBucket == false) { return asMultiBucketAggregator(this, context, parent); } - + numberOfAggregatorsCreated++; BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds); if (bucketCountThresholds.getShardSize() == SignificantTextAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { @@ -166,7 +170,7 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory fieldNames = null; - List fieldNamePatterns = null; + Map> storedToRequestedFields = new HashMap<>(); StoredFieldsContext storedFieldsContext = context.storedFieldsContext(); if (storedFieldsContext == null) { @@ -98,39 +96,36 @@ public class FetchPhase implements SearchPhase { // disable stored fields entirely fieldsVisitor = null; } else { - for (String fieldName : context.storedFieldsContext().fieldNames()) { - if (fieldName.equals(SourceFieldMapper.NAME)) { + for (String fieldNameOrPattern : context.storedFieldsContext().fieldNames()) { + if (fieldNameOrPattern.equals(SourceFieldMapper.NAME)) { FetchSourceContext fetchSourceContext = context.hasFetchSourceContext() ? context.fetchSourceContext() - : FetchSourceContext.FETCH_SOURCE; + : FetchSourceContext.FETCH_SOURCE; context.fetchSourceContext(new FetchSourceContext(true, fetchSourceContext.includes(), fetchSourceContext.excludes())); continue; } - if (Regex.isSimpleMatchPattern(fieldName)) { - if (fieldNamePatterns == null) { - fieldNamePatterns = new ArrayList<>(); - } - fieldNamePatterns.add(fieldName); - } else { + + Collection fieldNames = context.mapperService().simpleMatchToFullName(fieldNameOrPattern); + for (String fieldName : fieldNames) { MappedFieldType fieldType = context.smartNameFieldType(fieldName); if (fieldType == null) { // Only fail if we know it is a object field, missing paths / fields shouldn't fail. if (context.getObjectMapper(fieldName) != null) { throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field"); } + } else { + String storedField = fieldType.name(); + Set requestedFields = storedToRequestedFields.computeIfAbsent( + storedField, key -> new HashSet<>()); + requestedFields.add(fieldName); } - if (fieldNames == null) { - fieldNames = new HashSet<>(); - } - fieldNames.add(fieldName); } } boolean loadSource = context.sourceRequested(); - if (fieldNames == null && fieldNamePatterns == null) { + if (storedToRequestedFields.isEmpty()) { // empty list specified, default to disable _source if no explicit indication fieldsVisitor = new FieldsVisitor(loadSource); } else { - fieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames, - fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, loadSource); + fieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), loadSource); } } @@ -149,10 +144,11 @@ public class FetchPhase implements SearchPhase { final SearchHit searchHit; int rootDocId = findRootDocumentIfNested(context, subReaderContext, subDocId); if (rootDocId != -1) { - searchHit = createNestedSearchHit(context, docId, subDocId, rootDocId, fieldNames, fieldNamePatterns, - subReaderContext); + searchHit = createNestedSearchHit(context, docId, subDocId, rootDocId, + storedToRequestedFields, subReaderContext); } else { - searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId, subReaderContext); + searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId, + storedToRequestedFields, subReaderContext); } hits[index] = searchHit; @@ -190,21 +186,18 @@ public class FetchPhase implements SearchPhase { return -1; } - private SearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, + private SearchHit createSearchHit(SearchContext context, + FieldsVisitor fieldsVisitor, + int docId, + int subDocId, + Map> storedToRequestedFields, LeafReaderContext subReaderContext) { if (fieldsVisitor == null) { return new SearchHit(docId); } - loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); - fieldsVisitor.postProcess(context.mapperService()); - Map searchFields = null; - if (!fieldsVisitor.fields().isEmpty()) { - searchFields = new HashMap<>(fieldsVisitor.fields().size()); - for (Map.Entry> entry : fieldsVisitor.fields().entrySet()) { - searchFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue())); - } - } + Map searchFields = getSearchFields(context, fieldsVisitor, subDocId, + storedToRequestedFields, subReaderContext); DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type()); Text typeText; @@ -223,9 +216,40 @@ public class FetchPhase implements SearchPhase { return searchHit; } - private SearchHit createNestedSearchHit(SearchContext context, int nestedTopDocId, int nestedSubDocId, - int rootSubDocId, Set fieldNames, - List fieldNamePatterns, LeafReaderContext subReaderContext) throws IOException { + private Map getSearchFields(SearchContext context, + FieldsVisitor fieldsVisitor, + int subDocId, + Map> storedToRequestedFields, + LeafReaderContext subReaderContext) { + loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); + fieldsVisitor.postProcess(context.mapperService()); + + if (fieldsVisitor.fields().isEmpty()) { + return null; + } + + Map searchFields = new HashMap<>(fieldsVisitor.fields().size()); + for (Map.Entry> entry : fieldsVisitor.fields().entrySet()) { + String storedField = entry.getKey(); + List storedValues = entry.getValue(); + + if (storedToRequestedFields.containsKey(storedField)) { + for (String requestedField : storedToRequestedFields.get(storedField)) { + searchFields.put(requestedField, new DocumentField(requestedField, storedValues)); + } + } else { + searchFields.put(storedField, new DocumentField(storedField, storedValues)); + } + } + return searchFields; + } + + private SearchHit createNestedSearchHit(SearchContext context, + int nestedTopDocId, + int nestedSubDocId, + int rootSubDocId, + Map> storedToRequestedFields, + LeafReaderContext subReaderContext) throws IOException { // Also if highlighting is requested on nested documents we need to fetch the _source from the root document, // otherwise highlighting will attempt to fetch the _source from the nested doc, which will fail, // because the entire _source is only stored with the root document. @@ -244,9 +268,13 @@ public class FetchPhase implements SearchPhase { source = null; } + Map searchFields = null; + if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) { + FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false); + searchFields = getSearchFields(context, nestedFieldsVisitor, nestedSubDocId, + storedToRequestedFields, subReaderContext); + } - Map searchFields = - getSearchFields(context, nestedSubDocId, fieldNames, fieldNamePatterns, subReaderContext); DocumentMapper documentMapper = context.mapperService().documentMapper(uid.type()); SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, nestedSubDocId); @@ -307,26 +335,6 @@ public class FetchPhase implements SearchPhase { return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, searchFields); } - private Map getSearchFields(SearchContext context, int nestedSubDocId, Set fieldNames, - List fieldNamePatterns, LeafReaderContext subReaderContext) { - Map searchFields = null; - if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) { - FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames, - fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, false); - if (nestedFieldsVisitor != null) { - loadStoredFields(context, subReaderContext, nestedFieldsVisitor, nestedSubDocId); - nestedFieldsVisitor.postProcess(context.mapperService()); - if (!nestedFieldsVisitor.fields().isEmpty()) { - searchFields = new HashMap<>(nestedFieldsVisitor.fields().size()); - for (Map.Entry> entry : nestedFieldsVisitor.fields().entrySet()) { - searchFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue())); - } - } - } - } - return searchFields; - } - private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId, LeafReaderContext subReaderContext, MapperService mapperService, diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index e5ff7abc68b..11e46061d67 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -100,7 +100,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { if (highlightQuery == null) { highlightQuery = context.parsedQuery().query(); } - HighlighterContext highlighterContext = new HighlighterContext(fieldName, + HighlighterContext highlighterContext = new HighlighterContext(fieldType.name(), field, fieldType, context, hitContext, highlightQuery); if ((highlighter.canHighlight(fieldType) == false) && fieldNameContainsWildcards) { @@ -109,7 +109,11 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { } HighlightField highlightField = highlighter.highlight(highlighterContext); if (highlightField != null) { - highlightFields.put(highlightField.name(), highlightField); + // Note that we make sure to use the original field name in the response. This is because the + // original field could be an alias, and highlighter implementations may instead reference the + // concrete field it points to. + highlightFields.put(fieldName, + new HighlightField(fieldName, highlightField.fragments())); } } } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java index 2e04443f9e5..9ec20001adc 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java @@ -148,7 +148,7 @@ public class LeafFieldsLookup implements Map { reader.document(docId, fieldVisitor); fieldVisitor.postProcess(mapperService); List storedFields = fieldVisitor.fields().get(data.fieldType().name()); - data.fields(singletonMap(name, storedFields)); + data.fields(singletonMap(fieldName, storedFields)); } catch (IOException e) { throw new ElasticsearchParseException("failed to load field [{}]", e, name); } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index dcdc669539f..9199615868a 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -321,7 +320,7 @@ public abstract class SuggestionBuilder> implemen suggestionContext.setAnalyzer(luceneAnalyzer); } - suggestionContext.setField(field); + suggestionContext.setField(fieldType.name()); if (size != null) { suggestionContext.setSize(size); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index c4f7d8a5000..48aaf705099 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -29,8 +29,8 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; @@ -138,8 +138,8 @@ public class GeoContextMapping extends ContextMapping { @Override public Set parseContext(ParseContext parseContext, XContentParser parser) throws IOException, ElasticsearchParseException { if (fieldName != null) { - FieldMapper mapper = parseContext.docMapper().mappers().getMapper(fieldName); - if (!(mapper instanceof GeoPointFieldMapper)) { + MappedFieldType fieldType = parseContext.mapperService().fullName(fieldName); + if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { throw new ElasticsearchParseException("referenced field must be mapped to geo_point"); } } diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 8c4879fd35e..9aba48f7de5 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -21,12 +21,11 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -84,14 +83,14 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider(analyzerName, AnalyzerScope.INDEX, randomPreBuiltAnalyzer.getAnalyzer(randomVersion)).get(); - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "text").field("analyzer", analyzerName).endObject().endObject() - .endObject().endObject()); - DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + .endObject().endObject(); + MapperService mapperService = createIndex("test", indexSettings, "type", mapping).mapperService(); - FieldMapper fieldMapper = docMapper.mappers().getMapper("field"); - assertThat(fieldMapper.fieldType().searchAnalyzer(), instanceOf(NamedAnalyzer.class)); - NamedAnalyzer fieldMapperNamedAnalyzer = fieldMapper.fieldType().searchAnalyzer(); + MappedFieldType fieldType = mapperService.fullName("field"); + assertThat(fieldType.searchAnalyzer(), instanceOf(NamedAnalyzer.class)); + NamedAnalyzer fieldMapperNamedAnalyzer = fieldType.searchAnalyzer(); assertThat(fieldMapperNamedAnalyzer.analyzer(), is(namedAnalyzer.analyzer())); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index 6e9cb6c0b59..5f4cd98600b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -27,6 +27,8 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.plugins.Plugin; @@ -49,32 +51,32 @@ public class BinaryFieldMapperTests extends ESSingleNodeTestCase { } public void testDefaultMapping() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "binary") .endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping).mapperService(); + MappedFieldType fieldType = mapperService.fullName("field"); - FieldMapper fieldMapper = mapper.mappers().getMapper("field"); - assertThat(fieldMapper, instanceOf(BinaryFieldMapper.class)); - assertThat(fieldMapper.fieldType().stored(), equalTo(false)); + assertThat(fieldType, instanceOf(BinaryFieldMapper.BinaryFieldType.class)); + assertThat(fieldType.stored(), equalTo(false)); } public void testStoredValue() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field") .field("type", "binary") .field("store", true) .endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping).mapperService(); // case 1: a simple binary value final byte[] binaryValue1 = new byte[100]; @@ -89,13 +91,14 @@ public class BinaryFieldMapperTests extends ESSingleNodeTestCase { assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "id", + ParsedDocument doc = mapperService.documentMapper().parse(SourceToParse.source("test", "type", "id", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), XContentType.JSON)); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); assertEquals(new BytesRef(value), indexedValue); - FieldMapper fieldMapper = mapper.mappers().getMapper("field"); - Object originalValue = fieldMapper.fieldType().valueForDisplay(indexedValue); + + MappedFieldType fieldType = mapperService.fullName("field"); + Object originalValue = fieldType.valueForDisplay(indexedValue); assertEquals(new BytesArray(value), originalValue); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index 44ecb24b672..8638e16e29a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -52,7 +52,6 @@ import static org.hamcrest.Matchers.containsString; public class BooleanFieldMapperTests extends ESSingleNodeTestCase { private IndexService indexService; private DocumentMapperParser parser; - private DocumentMapperParser preEs6Parser; @Before public void setup() { @@ -101,7 +100,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper mapper = defaultMapper.mappers().getMapper("field"); + Mapper mapper = defaultMapper.mappers().getMapper("field"); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); mapper.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 1381b6e9205..a01ddccc939 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -61,10 +62,9 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); - - MappedFieldType completionFieldType = fieldMapper.fieldType(); + MappedFieldType completionFieldType = ((CompletionFieldMapper) fieldMapper).fieldType(); NamedAnalyzer indexAnalyzer = completionFieldType.indexAnalyzer(); assertThat(indexAnalyzer.name(), equalTo("simple")); @@ -94,10 +94,9 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); - - MappedFieldType completionFieldType = fieldMapper.fieldType(); + MappedFieldType completionFieldType = ((CompletionFieldMapper) fieldMapper).fieldType(); NamedAnalyzer indexAnalyzer = completionFieldType.indexAnalyzer(); assertThat(indexAnalyzer.name(), equalTo("simple")); @@ -129,12 +128,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); assertThat(fieldMapper, instanceOf(CompletionFieldMapper.class)); - CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; XContentBuilder builder = jsonBuilder().startObject(); - completionFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); + fieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); builder.close(); Map serializedMap = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)).map(); Map configMap = (Map) serializedMap.get("completion"); @@ -153,15 +151,15 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("completion", "suggestion") .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 1); } @@ -192,15 +190,15 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .array("completion", "suggestion1", "suggestion2") .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 2); } @@ -212,8 +210,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() @@ -223,7 +221,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 1); } @@ -235,8 +233,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() @@ -246,10 +244,50 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 3); } + public void testParsingWithGeoFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type1") + .startObject("properties") + .startObject("completion") + .field("type", "completion") + .startObject("contexts") + .field("name", "location") + .field("type", "geo") + .field("path", "alias") + .endObject() + .endObject() + .startObject("birth-place") + .field("type", "geo_point") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "birth-place") + .endObject() + .endObject() + .endObject() + .endObject(); + + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + Mapper fieldMapper = mapperService.documentMapper().mappers().getMapper("completion"); + + ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("completion") + .field("input", "suggestion") + .startObject("contexts") + .field("location", "37.77,-122.42") + .endObject() + .endObject() + .endObject()), XContentType.JSON)); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); + assertSuggestFields(fields, 1); + } + public void testParsingFull() throws Exception { String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") @@ -258,8 +296,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() @@ -279,7 +317,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 3); } @@ -291,8 +329,8 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() @@ -312,7 +350,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertSuggestFields(fields, 6); } @@ -420,7 +458,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType().prefixQuery(new BytesRef("co")); assertThat(prefixQuery, instanceOf(PrefixCompletionQuery.class)); @@ -434,7 +472,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co", Fuzziness.fromEdits(FuzzyCompletionQuery.DEFAULT_MAX_EDITS), FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX, @@ -451,7 +489,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; Query prefixQuery = completionFieldMapper.fieldType() .regexpQuery(new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_MAX_DETERMINIZED_STATES); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java index eff6222e6c6..5eb102208eb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java @@ -72,7 +72,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type1").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("type1"); - FieldMapper fieldMapper = docMapper.mappers().getMapper("copy_test"); + Mapper fieldMapper = docMapper.mappers().getMapper("copy_test"); // Check json serialization TextFieldMapper stringFieldMapper = (TextFieldMapper) fieldMapper; @@ -123,7 +123,7 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { docMapper = index.mapperService().documentMapper("type1"); fieldMapper = docMapper.mappers().getMapper("new_field"); - assertThat(fieldMapper.fieldType().typeName(), equalTo("long")); + assertThat(fieldMapper.typeName(), equalTo("long")); } public void testCopyToFieldsInnerObjectParsing() throws Exception { @@ -308,13 +308,15 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { MapperService mapperService = createIndex("test").mapperService(); DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), MapperService.MergeReason.MAPPING_UPDATE); + FieldMapper fieldMapperBefore = (FieldMapper) docMapperBefore.mappers().getMapper("copy_test"); - assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); + assertEquals(Arrays.asList("foo", "bar"), fieldMapperBefore.copyTo().copyToFields()); DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter), MapperService.MergeReason.MAPPING_UPDATE); + FieldMapper fieldMapperAfter = (FieldMapper) docMapperAfter.mappers().getMapper("copy_test"); - assertEquals(Arrays.asList("baz", "bar"), docMapperAfter.mappers().getMapper("copy_test").copyTo().copyToFields()); - assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); + assertEquals(Arrays.asList("baz", "bar"), fieldMapperAfter.copyTo().copyToFields()); + assertEquals(Arrays.asList("foo", "bar"), fieldMapperBefore.copyTo().copyToFields()); } public void testCopyToNestedField() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index c19965ac5f7..51b27094099 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -382,11 +382,11 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties") .startObject("release_date").field("type", "date").field("format", "yyyy/MM/dd").endObject() .endObject().endObject().endObject()); - DocumentMapper initMapper = indexService.mapperService().merge("movie", new CompressedXContent(initMapping), + indexService.mapperService().merge("movie", new CompressedXContent(initMapping), MapperService.MergeReason.MAPPING_UPDATE); - assertThat(initMapper.mappers().getMapper("release_date"), notNullValue()); - assertFalse(initMapper.mappers().getMapper("release_date").fieldType().stored()); + assertThat(indexService.mapperService().fullName("release_date"), notNullValue()); + assertFalse(indexService.mapperService().fullName("release_date").stored()); String updateFormatMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("movie") .startObject("properties") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java index 4e79a68c50e..4373f2210a7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.io.StringReader; import java.util.Arrays; +import java.util.Collections; import java.util.List; public class DocumentFieldMapperTests extends LuceneTestCase { @@ -138,7 +139,12 @@ public class DocumentFieldMapperTests extends LuceneTestCase { Analyzer defaultSearch = new FakeAnalyzer("default_search"); Analyzer defaultSearchQuote = new FakeAnalyzer("default_search_quote"); - DocumentFieldMappers documentFieldMappers = new DocumentFieldMappers(Arrays.asList(fieldMapper1, fieldMapper2), defaultIndex, defaultSearch, defaultSearchQuote); + DocumentFieldMappers documentFieldMappers = new DocumentFieldMappers( + Arrays.asList(fieldMapper1, fieldMapper2), + Collections.emptyList(), + defaultIndex, + defaultSearch, + defaultSearchQuote); assertAnalyzes(documentFieldMappers.indexAnalyzer(), "field1", "index"); assertAnalyzes(documentFieldMappers.searchAnalyzer(), "field1", "search"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java index 0234fcb681d..54b6b2310da 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java @@ -23,9 +23,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; @@ -104,38 +105,50 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase { } public void testMergeSearchAnalyzer() throws Exception { - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() - .endObject().endObject()); + XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "standard") + .field("search_analyzer", "whitespace") + .endObject().endObject() + .endObject().endObject(); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping1).mapperService(); + + assertThat(mapperService.fullName("field").searchAnalyzer().name(), equalTo("whitespace")); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "keyword").endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "standard") + .field("search_analyzer", "keyword") + .endObject().endObject() + .endObject().endObject()); - DocumentMapper existing = parser.parse("type", new CompressedXContent(mapping1)); - DocumentMapper changed = parser.parse("type", new CompressedXContent(mapping2)); - - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); - DocumentMapper merged = existing.merge(changed.mapping()); - - assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword")); + mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapperService.fullName("field").searchAnalyzer().name(), equalTo("keyword")); } public void testChangeSearchAnalyzerToDefault() throws Exception { - MapperService mapperService = createIndex("test").mapperService(); - String mapping1 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").field("search_analyzer", "whitespace").endObject().endObject() - .endObject().endObject()); + XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "standard") + .field("search_analyzer", "whitespace") + .endObject().endObject() + .endObject().endObject(); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping1).mapperService(); + + assertThat(mapperService.fullName("field").searchAnalyzer().name(), equalTo("whitespace")); + String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "text").field("analyzer", "standard").endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "standard") + .endObject().endObject() + .endObject().endObject()); - DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE); - DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE); - - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); - - assertThat(((NamedAnalyzer) merged.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard")); + mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE); + assertThat(mapperService.fullName("field").searchAnalyzer().name(), equalTo("standard")); } public void testConcurrentMergeTest() throws Throwable { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 11f69c738e9..098aacc799f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -1001,7 +1001,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); } @@ -1014,8 +1014,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = builtDocMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); - assertThat(doc.getBinaryValue(docMapper.idFieldMapper().fieldType().name()), equalTo(Uid.encodeId("1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); + assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); } public void testSimpleParser() throws Exception { @@ -1026,8 +1026,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); - assertThat(doc.getBinaryValue(docMapper.idFieldMapper().fieldType().name()), equalTo(Uid.encodeId("1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); + assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); } public void testSimpleParserNoTypeNoId() throws Exception { @@ -1035,8 +1035,8 @@ public class DocumentParserTests extends ESSingleNodeTestCase { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json")); Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); - assertThat(doc.getBinaryValue(docMapper.idFieldMapper().fieldType().name()), equalTo(Uid.encodeId("1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().name()), equalTo("shay")); + assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); } public void testAttributes() throws Exception { @@ -1389,4 +1389,98 @@ public class DocumentParserTests extends ESSingleNodeTestCase { client().prepareIndex("idx", "type").setSource(bytes2, XContentType.JSON).get()); assertThat(ExceptionsHelper.detailedMessage(err), containsString("field name cannot be an empty string")); } + + public void testWriteToFieldAlias() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .endObject() + .startObject("concrete-field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject() + .field("alias-field", "value") + .endObject()); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + + assertEquals("Cannot write to a field alias [alias-field].", exception.getCause().getMessage()); + } + + public void testCopyToFieldAlias() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .endObject() + .startObject("concrete-field") + .field("type", "keyword") + .endObject() + .startObject("text-field") + .field("type", "text") + .field("copy_to", "alias-field") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject() + .field("text-field", "value") + .endObject()); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + + assertEquals("Cannot copy to a field alias [alias-field].", exception.getCause().getMessage()); + } + + public void testDynamicDottedFieldNameWithFieldAlias() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .endObject() + .startObject("concrete-field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject()); + + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() + .startObject() + .startObject("alias-field.dynamic-field") + .field("type", "keyword") + .endObject() + .endObject()); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + + assertEquals("Could not dynamically add mapping for field [alias-field.dynamic-field]. " + + "Existing mapping for [alias-field] must be of type object but found [alias].", exception.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index c5032090092..b7ee74fb773 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -45,7 +45,9 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase { .endObject().endObject()); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); - DocumentMapper mapper = index.mapperService().documentMapper("type"); + MapperService mapperService = index.mapperService(); + DocumentMapper mapper = mapperService.documentMapper(); + QueryShardContext context = index.newQueryShardContext(0, null, () -> 0L, null); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference @@ -61,7 +63,6 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase { assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("type") .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); - mapper = index.mapperService().documentMapper("type"); writer.addDocument(doc.rootDoc()); writer.addDocument(doc.rootDoc()); @@ -69,25 +70,25 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase { IndexReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); - TopDocs topDocs = searcher.search(mapper.mappers().getMapper("field1").fieldType().termQuery("value1", context), 10); + TopDocs topDocs = searcher.search(mapperService.fullName("field1").termQuery("value1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field2").fieldType().termQuery("1", context), 10); + topDocs = searcher.search(mapperService.fullName("field2").termQuery("1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field3").fieldType().termQuery("1.1", context), 10); + topDocs = searcher.search(mapperService.fullName("field3").termQuery("1.1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field4").fieldType().termQuery("2010-01-01", context), 10); + topDocs = searcher.search(mapperService.fullName("field4").termQuery("2010-01-01", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("1", context), 10); + topDocs = searcher.search(mapperService.fullName("field5").termQuery("1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("2", context), 10); + topDocs = searcher.search(mapperService.fullName("field5").termQuery("2", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("3", context), 10); + topDocs = searcher.search(mapperService.fullName("field5").termQuery("3", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); writer.close(); reader.close(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 0d7dde415aa..7d022b55454 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -625,11 +625,11 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper mapper = defaultMapper.mappers().getMapper("s_long"); - assertThat(mapper.fieldType().typeName(), equalTo("long")); + Mapper mapper = defaultMapper.mappers().getMapper("s_long"); + assertThat(mapper.typeName(), equalTo("long")); mapper = defaultMapper.mappers().getMapper("s_double"); - assertThat(mapper.fieldType().typeName(), equalTo("float")); + assertThat(mapper.typeName(), equalTo("float")); } public void testNumericDetectionDefault() throws Exception { @@ -652,7 +652,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get()); defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper mapper = defaultMapper.mappers().getMapper("s_long"); + Mapper mapper = defaultMapper.mappers().getMapper("s_long"); assertThat(mapper, instanceOf(TextFieldMapper.class)); mapper = defaultMapper.mappers().getMapper("s_double"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index d8e8c8e0e3d..62c764e8060 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -30,11 +30,11 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.hamcrest.Matchers; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; public class DynamicTemplatesTests extends ESSingleNodeTestCase { public void testMatchTypeOnly() throws Exception { @@ -45,7 +45,9 @@ public class DynamicTemplatesTests extends ESSingleNodeTestCase { .endObject().endObject().endArray().endObject().endObject(); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("person").setSource(builder).get(); - DocumentMapper docMapper = index.mapperService().documentMapper("person"); + + MapperService mapperService = index.mapperService(); + DocumentMapper docMapper = mapperService.documentMapper("person"); builder = JsonXContent.contentBuilder(); builder.startObject().field("s", "hello").field("l", 1).endObject(); ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", BytesReference.bytes(builder), @@ -53,14 +55,11 @@ public class DynamicTemplatesTests extends ESSingleNodeTestCase { client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); - docMapper = index.mapperService().documentMapper("person"); - DocumentFieldMappers mappers = docMapper.mappers(); + assertThat(mapperService.fullName("s"), notNullValue()); + assertEquals(IndexOptions.NONE, mapperService.fullName("s").indexOptions()); - assertThat(mappers.getMapper("s"), Matchers.notNullValue()); - assertEquals(IndexOptions.NONE, mappers.getMapper("s").fieldType().indexOptions()); - - assertThat(mappers.getMapper("l"), Matchers.notNullValue()); - assertNotSame(IndexOptions.NONE, mappers.getMapper("l").fieldType().indexOptions()); + assertThat(mapperService.fullName("l"), notNullValue()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("l").indexOptions()); } @@ -84,7 +83,7 @@ public class DynamicTemplatesTests extends ESSingleNodeTestCase { assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); assertThat(f.fieldType().tokenized(), equalTo(false)); - FieldMapper fieldMapper = docMapper.mappers().getMapper("name"); + Mapper fieldMapper = docMapper.mappers().getMapper("name"); assertNotNull(fieldMapper); f = doc.getField("multi1"); @@ -143,7 +142,7 @@ public class DynamicTemplatesTests extends ESSingleNodeTestCase { assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions()); assertThat(f.fieldType().tokenized(), equalTo(false)); - FieldMapper fieldMapper = docMapper.mappers().getMapper("name"); + Mapper fieldMapper = docMapper.mappers().getMapper("name"); assertNotNull(fieldMapper); f = doc.getField("multi1"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java new file mode 100644 index 00000000000..9f87ad3d039 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java @@ -0,0 +1,167 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.Before; + +import java.io.IOException; + +public class FieldAliasMapperTests extends ESSingleNodeTestCase { + private MapperService mapperService; + private DocumentMapperParser parser; + + @Before + public void setup() { + IndexService indexService = createIndex("test"); + mapperService = indexService.mapperService(); + parser = mapperService.documentMapperParser(); + } + + public void testParsing() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .endObject() + .startObject("concrete-field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject()); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + } + + public void testParsingWithMissingPath() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .endObject() + .endObject() + .endObject() + .endObject()); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("The [path] property must be specified for field [alias-field].", exception.getMessage()); + } + + public void testParsingWithExtraArgument() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .field("extra-field", "extra-value") + .endObject() + .endObject() + .endObject() + .endObject()); + MapperParsingException exception = expectThrows(MapperParsingException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("Mapping definition for [alias-field] has unsupported parameters: [extra-field : extra-value]", + exception.getMessage()); + } + + public void testMerge() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("first-field") + .field("type", "keyword") + .endObject() + .startObject("alias-field") + .field("type", "alias") + .field("path", "first-field") + .endObject() + .endObject() + .endObject() + .endObject()); + mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + MappedFieldType firstFieldType = mapperService.fullName("alias-field"); + assertEquals("first-field", firstFieldType.name()); + assertTrue(firstFieldType instanceof KeywordFieldMapper.KeywordFieldType); + + String newMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("second-field") + .field("type", "text") + .endObject() + .startObject("alias-field") + .field("type", "alias") + .field("path", "second-field") + .endObject() + .endObject() + .endObject() + .endObject()); + mapperService.merge("type", new CompressedXContent(newMapping), MergeReason.MAPPING_UPDATE); + + MappedFieldType secondFieldType = mapperService.fullName("alias-field"); + assertEquals("second-field", secondFieldType.name()); + assertTrue(secondFieldType instanceof TextFieldMapper.TextFieldType); + } + + public void testMergeFailure() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("concrete-field") + .field("type", "text") + .endObject() + .startObject("alias-field") + .field("type", "alias") + .field("path", "concrete-field") + .endObject() + .endObject() + .endObject() + .endObject()); + mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + String newMapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("alias-field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject()); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> mapperService.merge("type", new CompressedXContent(newMapping), MergeReason.MAPPING_UPDATE)); + assertEquals("Cannot merge a field alias mapping [alias-field] with a mapping that is not for a field alias.", + exception.getMessage()); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 4f1b908cae8..6e27823f8a0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -28,10 +28,11 @@ import org.elasticsearch.test.ESTestCase; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.Iterator; import java.util.List; +import static java.util.Collections.emptyList; + public class FieldTypeLookupTests extends ESTestCase { public void testEmpty() { @@ -48,7 +49,7 @@ public class FieldTypeLookupTests extends ESTestCase { public void testDefaultMapping() { FieldTypeLookup lookup = new FieldTypeLookup(); try { - lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, Collections.emptyList()); + lookup.copyAndAddAll(MapperService.DEFAULT_MAPPING, emptyList(), emptyList()); fail(); } catch (IllegalArgumentException expected) { assertEquals("Default mappings should not be added to the lookup", expected.getMessage()); @@ -58,7 +59,7 @@ public class FieldTypeLookupTests extends ESTestCase { public void testAddNewField() { FieldTypeLookup lookup = new FieldTypeLookup(); MockFieldMapper f = new MockFieldMapper("foo"); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f)); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), emptyList()); assertNull(lookup.get("foo")); assertNull(lookup.get("bar")); assertEquals(f.fieldType(), lookup2.get("foo")); @@ -70,68 +71,203 @@ public class FieldTypeLookupTests extends ESTestCase { MockFieldMapper f = new MockFieldMapper("foo"); MockFieldMapper f2 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); + lookup = lookup.copyAndAddAll("type1", newList(f), emptyList()); + FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), emptyList()); assertEquals(1, size(lookup2.iterator())); assertSame(f.fieldType(), lookup2.get("foo")); assertEquals(f2.fieldType(), lookup2.get("foo")); } - public void testCheckCompatibilityMismatchedTypes() { + public void testMismatchedFieldTypes() { FieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), emptyList()); OtherFakeFieldType ft2 = new OtherFakeFieldType(); ft2.setName("foo"); FieldMapper f2 = new MockFieldMapper("foo", ft2); try { - lookup.copyAndAddAll("type2", newList(f2)); + lookup.copyAndAddAll("type2", newList(f2), emptyList()); fail("expected type mismatch"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("cannot be changed from type [faketype] to [otherfaketype]")); } } - public void testCheckCompatibilityConflict() { + public void testConflictingFieldTypes() { FieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), emptyList()); MappedFieldType ft2 = new MockFieldMapper.FakeFieldType(); ft2.setName("foo"); ft2.setBoost(2.0f); FieldMapper f2 = new MockFieldMapper("foo", ft2); - lookup.copyAndAddAll("type", newList(f2)); // boost is updateable, so ok since we are implicitly updating all types - lookup.copyAndAddAll("type2", newList(f2)); // boost is updateable, so ok if forcing + lookup.copyAndAddAll("type", newList(f2), emptyList()); // boost is updateable, so ok since we are implicitly updating all types + lookup.copyAndAddAll("type2", newList(f2), emptyList()); // boost is updateable, so ok if forcing // now with a non changeable setting MappedFieldType ft3 = new MockFieldMapper.FakeFieldType(); ft3.setName("foo"); ft3.setStored(true); FieldMapper f3 = new MockFieldMapper("foo", ft3); try { - lookup.copyAndAddAll("type2", newList(f3)); + lookup.copyAndAddAll("type2", newList(f3), emptyList()); fail("expected conflict"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("has different [store] values")); } } - public void testSimpleMatchFullNames() { - MockFieldMapper f1 = new MockFieldMapper("foo"); - MockFieldMapper f2 = new MockFieldMapper("bar"); + public void testAddFieldAlias() { + MockFieldMapper field = new MockFieldMapper("foo"); + FieldAliasMapper alias = new FieldAliasMapper("alias", "alias", "foo"); + FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1, f2)); + lookup = lookup.copyAndAddAll("type", newList(field), newList(alias)); + + MappedFieldType aliasType = lookup.get("alias"); + assertEquals(field.fieldType(), aliasType); + } + + public void testUpdateFieldAlias() { + // Add an alias 'alias' to the concrete field 'foo'. + MockFieldMapper.FakeFieldType fieldType1 = new MockFieldMapper.FakeFieldType(); + MockFieldMapper field1 = new MockFieldMapper("foo", fieldType1); + FieldAliasMapper alias1 = new FieldAliasMapper("alias", "alias", "foo"); + + FieldTypeLookup lookup = new FieldTypeLookup(); + lookup = lookup.copyAndAddAll("type", newList(field1), newList(alias1)); + + // Check that the alias refers to 'foo'. + MappedFieldType aliasType1 = lookup.get("alias"); + assertEquals(fieldType1, aliasType1); + + // Update the alias to refer to a new concrete field 'bar'. + MockFieldMapper.FakeFieldType fieldType2 = new MockFieldMapper.FakeFieldType(); + fieldType2.setStored(!fieldType1.stored()); + MockFieldMapper field2 = new MockFieldMapper("bar", fieldType2); + + FieldAliasMapper alias2 = new FieldAliasMapper("alias", "alias", "bar"); + lookup = lookup.copyAndAddAll("type", newList(field2), newList(alias2)); + + // Check that the alias now refers to 'bar'. + MappedFieldType aliasType2 = lookup.get("alias"); + assertEquals(fieldType2, aliasType2); + } + + public void testUpdateConcreteFieldWithAlias() { + // Add an alias 'alias' to the concrete field 'foo'. + FieldAliasMapper alias1 = new FieldAliasMapper("alias", "alias", "foo"); + MockFieldMapper.FakeFieldType fieldType1 = new MockFieldMapper.FakeFieldType(); + fieldType1.setBoost(1.0f); + MockFieldMapper field1 = new MockFieldMapper("foo", fieldType1); + + FieldTypeLookup lookup = new FieldTypeLookup(); + lookup = lookup.copyAndAddAll("type", newList(field1), newList(alias1)); + + // Check that the alias maps to this field type. + MappedFieldType aliasType1 = lookup.get("alias"); + assertEquals(fieldType1, aliasType1); + + // Update the boost for field 'foo'. + MockFieldMapper.FakeFieldType fieldType2 = new MockFieldMapper.FakeFieldType(); + fieldType2.setBoost(2.0f); + MockFieldMapper field2 = new MockFieldMapper("foo", fieldType2); + lookup = lookup.copyAndAddAll("type", newList(field2), emptyList()); + + // Check that the alias maps to the new field type. + MappedFieldType aliasType2 = lookup.get("alias"); + assertEquals(fieldType2, aliasType2); + } + + public void testAliasThatRefersToAlias() { + MockFieldMapper field = new MockFieldMapper("foo"); + FieldAliasMapper alias = new FieldAliasMapper("alias", "alias", "foo"); + FieldTypeLookup lookup = new FieldTypeLookup() + .copyAndAddAll("type", newList(field), newList(alias)); + + FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "alias"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> lookup.copyAndAddAll("type", emptyList(), newList(invalidAlias))); + assertEquals("Invalid [path] value [alias] for field alias [invalid-alias]: an alias" + + " cannot refer to another alias.", e.getMessage()); + } + + public void testAliasThatRefersToItself() { + FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "invalid-alias"); + + FieldTypeLookup lookup = new FieldTypeLookup(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> lookup.copyAndAddAll("type", emptyList(), newList(invalidAlias))); + assertEquals("Invalid [path] value [invalid-alias] for field alias [invalid-alias]: an alias" + + " cannot refer to itself.", e.getMessage()); + } + + public void testAliasWithNonExistentPath() { + FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "non-existent"); + + FieldTypeLookup lookup = new FieldTypeLookup(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> lookup.copyAndAddAll("type", emptyList(), newList(invalidAlias))); + assertEquals("Invalid [path] value [non-existent] for field alias [invalid-alias]: an alias" + + " must refer to an existing field in the mappings.", e.getMessage()); + } + + public void testAddAliasWithPreexistingField() { + MockFieldMapper field = new MockFieldMapper("field"); + FieldTypeLookup lookup = new FieldTypeLookup() + .copyAndAddAll("type", newList(field), emptyList()); + + MockFieldMapper invalidField = new MockFieldMapper("invalid"); + FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid", "invalid", "field"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> lookup.copyAndAddAll("type", newList(invalidField), newList(invalidAlias))); + assertEquals("The name for field alias [invalid] has already been used to define a concrete field.", + e.getMessage()); + } + + public void testAddFieldWithPreexistingAlias() { + MockFieldMapper field = new MockFieldMapper("field"); + FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid", "invalid", "field"); + + FieldTypeLookup lookup = new FieldTypeLookup() + .copyAndAddAll("type", newList(field), newList(invalidAlias)); + + MockFieldMapper invalidField = new MockFieldMapper("invalid"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> lookup.copyAndAddAll("type", newList(invalidField), emptyList())); + assertEquals("The name for field [invalid] has already been used to define a field alias.", + e.getMessage()); + } + + public void testSimpleMatchToFullName() { + MockFieldMapper field1 = new MockFieldMapper("foo"); + MockFieldMapper field2 = new MockFieldMapper("bar"); + + FieldAliasMapper alias1 = new FieldAliasMapper("food", "food", "foo"); + FieldAliasMapper alias2 = new FieldAliasMapper("barometer", "barometer", "bar"); + + FieldTypeLookup lookup = new FieldTypeLookup(); + lookup = lookup.copyAndAddAll("type", + newList(field1, field2), + newList(alias1, alias2)); + Collection names = lookup.simpleMatchToFullName("b*"); + assertFalse(names.contains("foo")); + assertFalse(names.contains("food")); + assertTrue(names.contains("bar")); + assertTrue(names.contains("barometer")); } public void testIteratorImmutable() { MockFieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type", newList(f1)); + lookup = lookup.copyAndAddAll("type", newList(f1), emptyList()); try { Iterator itr = lookup.iterator(); @@ -144,7 +280,11 @@ public class FieldTypeLookupTests extends ESTestCase { } } - static List newList(FieldMapper... mapper) { + private static List newList(FieldMapper... mapper) { + return Arrays.asList(mapper); + } + + private static List newList(FieldAliasMapper... mapper) { return Arrays.asList(mapper); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java index 57a6173bc65..6999e39b70a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java @@ -23,10 +23,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; @@ -38,13 +35,14 @@ public class GenericStoreDynamicTemplateTests extends ESSingleNodeTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json"); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); - DocumentMapper docMapper = index.mapperService().documentMapper("person"); + + MapperService mapperService = index.mapperService(); + byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), - XContentType.JSON)); + ParsedDocument parsedDoc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); - docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); @@ -52,8 +50,8 @@ public class GenericStoreDynamicTemplateTests extends ESSingleNodeTestCase { assertThat(f.stringValue(), equalTo("some name")); assertThat(f.fieldType().stored(), equalTo(true)); - FieldMapper fieldMapper = docMapper.mappers().getMapper("name"); - assertThat(fieldMapper.fieldType().stored(), equalTo(true)); + MappedFieldType fieldType = mapperService.fullName("name"); + assertThat(fieldType.stored(), equalTo(true)); boolean stored = false; for (IndexableField field : doc.getFields("age")) { @@ -61,7 +59,7 @@ public class GenericStoreDynamicTemplateTests extends ESSingleNodeTestCase { } assertTrue(stored); - fieldMapper = docMapper.mappers().getMapper("age"); - assertThat(fieldMapper.fieldType().stored(), equalTo(true)); + fieldType = mapperService.fullName("age"); + assertThat(fieldType.stored(), equalTo(true)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index facafaf180e..eabf0a849fa 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -287,7 +287,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoPointFieldMapper.class)); boolean ignoreZValue = ((GeoPointFieldMapper)fieldMapper).ignoreZValue().value(); @@ -364,10 +364,10 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoPointFieldMapper.class)); - Object nullValue = fieldMapper.fieldType().nullValue(); + Object nullValue = ((GeoPointFieldMapper) fieldMapper).fieldType().nullValue(); assertThat(nullValue, equalTo(new GeoPoint(1, 2))); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index 7ff8c28f6dc..4c947a44a0a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -59,7 +59,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -83,7 +83,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); ShapeBuilder.Orientation orientation = ((GeoShapeFieldMapper)fieldMapper).fieldType().orientation(); @@ -121,7 +121,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); boolean coerce = ((GeoShapeFieldMapper)fieldMapper).coerce().value(); @@ -157,7 +157,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); boolean ignoreZValue = ((GeoShapeFieldMapper)fieldMapper).ignoreZValue().value(); @@ -191,7 +191,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); Explicit ignoreMalformed = ((GeoShapeFieldMapper)fieldMapper).ignoreMalformed(); @@ -225,7 +225,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -248,7 +248,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -276,7 +276,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -300,7 +300,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -326,7 +326,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -350,7 +350,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -373,7 +373,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -395,7 +395,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -418,7 +418,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -440,7 +440,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -475,7 +475,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { } // verify nothing changed - FieldMapper fieldMapper = docMapper.mappers().getMapper("shape"); + Mapper fieldMapper = docMapper.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; @@ -600,7 +600,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java index 32084c50310..8060c0a3f92 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java @@ -38,69 +38,68 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); + assertThat(mapperService.fullName("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); - Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), nullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), nullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); - doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + doc = mapperService.documentMapper().parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), notNullValue()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), notNullValue()); } public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); + assertThat(mapperService.fullName("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); - Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -108,32 +107,31 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), nullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), nullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); - doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + doc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json"); @@ -146,10 +144,10 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { } // There are conflicts, so the `name.not_indexed3` has not been added - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed2"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed2"), notNullValue()); + assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java new file mode 100644 index 00000000000..af17918baac --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java @@ -0,0 +1,118 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonList; + +public class MapperMergeValidatorTests extends ESTestCase { + + public void testDuplicateFieldAliasAndObject() { + ObjectMapper objectMapper = createObjectMapper("some.path"); + FieldAliasMapper aliasMapper = new FieldAliasMapper("path", "some.path", "field"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + MapperMergeValidator.validateMapperStructure("type", + singletonList(objectMapper), + emptyList(), + singletonList(aliasMapper), + emptyMap(), + new FieldTypeLookup())); + assertEquals("Field [some.path] is defined both as an object and a field in [type]", e.getMessage()); + } + + public void testFieldAliasWithNestedScope() { + ObjectMapper objectMapper = createNestedObjectMapper("nested"); + FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "nested.alias", "nested.field"); + + MapperMergeValidator.validateFieldReferences(emptyList(), + singletonList(aliasMapper), + Collections.singletonMap("nested", objectMapper), + new FieldTypeLookup()); + } + + public void testFieldAliasWithDifferentObjectScopes() { + Map fullPathObjectMappers = new HashMap<>(); + fullPathObjectMappers.put("object1", createObjectMapper("object1")); + fullPathObjectMappers.put("object2", createObjectMapper("object2")); + + FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "object2.alias", "object1.field"); + + MapperMergeValidator.validateFieldReferences(emptyList(), + singletonList(aliasMapper), + fullPathObjectMappers, + new FieldTypeLookup()); + } + + public void testFieldAliasWithNestedTarget() { + ObjectMapper objectMapper = createNestedObjectMapper("nested"); + FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "alias", "nested.field"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + MapperMergeValidator.validateFieldReferences(emptyList(), + singletonList(aliasMapper), + Collections.singletonMap("nested", objectMapper), + new FieldTypeLookup())); + + String expectedMessage = "Invalid [path] value [nested.field] for field alias [alias]: " + + "an alias must have the same nested scope as its target. The alias is not nested, " + + "but the target's nested scope is [nested]."; + assertEquals(expectedMessage, e.getMessage()); + } + + public void testFieldAliasWithDifferentNestedScopes() { + Map fullPathObjectMappers = new HashMap<>(); + fullPathObjectMappers.put("nested1", createNestedObjectMapper("nested1")); + fullPathObjectMappers.put("nested2", createNestedObjectMapper("nested2")); + + FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "nested2.alias", "nested1.field"); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + MapperMergeValidator.validateFieldReferences(emptyList(), + singletonList(aliasMapper), + fullPathObjectMappers, + new FieldTypeLookup())); + + + String expectedMessage = "Invalid [path] value [nested1.field] for field alias [nested2.alias]: " + + "an alias must have the same nested scope as its target. The alias's nested scope is [nested2], " + + "but the target's nested scope is [nested1]."; + assertEquals(expectedMessage, e.getMessage()); + } + + private static ObjectMapper createObjectMapper(String name) { + return new ObjectMapper(name, name, true, + ObjectMapper.Nested.NO, + ObjectMapper.Dynamic.FALSE, emptyMap(), Settings.EMPTY); + } + + private static ObjectMapper createNestedObjectMapper(String name) { + return new ObjectMapper(name, name, true, + ObjectMapper.Nested.newNested(false, false), + ObjectMapper.Dynamic.FALSE, emptyMap(), Settings.EMPTY); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 6bccb7106f6..20e0dd4639c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -235,6 +235,41 @@ public class MapperServiceTests extends ESSingleNodeTestCase { containsString("cannot have nested fields when index sort is activated")); } + public void testFieldAliasWithMismatchedNestedScope() throws Throwable { + IndexService indexService = createIndex("test"); + MapperService mapperService = indexService.mapperService(); + + CompressedXContent mapping = new CompressedXContent(BytesReference.bytes( + XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("nested") + .field("type", "nested") + .startObject("properties") + .startObject("field") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject())); + + mapperService.merge("type", mapping, MergeReason.MAPPING_UPDATE); + + CompressedXContent mappingUpdate = new CompressedXContent(BytesReference.bytes( + XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("alias") + .field("type", "alias") + .field("path", "nested.field") + .endObject() + .endObject() + .endObject())); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> mapperService.merge("type", mappingUpdate, MergeReason.MAPPING_UPDATE)); + assertThat(e.getMessage(), containsString("Invalid [path] value [nested.field] for field alias [alias]")); + } + public void testForbidMultipleTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index 45bb8090206..0f3d5193c28 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; @@ -55,9 +56,15 @@ public class MultiFieldTests extends ESSingleNodeTestCase { } private void testMultiField(String mapping) throws Exception { - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); + IndexService indexService = createIndex("test"); + MapperService mapperService = indexService.mapperService(); + + indexService.mapperService().merge("person", new CompressedXContent(mapping), + MapperService.MergeReason.MAPPING_UPDATE); + BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); - Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); @@ -84,37 +91,37 @@ public class MultiFieldTests extends ESSingleNodeTestCase { assertThat(f.name(), equalTo("object1.multi1.string")); assertThat(f.binaryValue(), equalTo(new BytesRef("2010-01-01"))); - assertThat(docMapper.mappers().getMapper("name"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name"), instanceOf(TextFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name").fieldType().stored(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name").fieldType().tokenized(), equalTo(true)); + assertThat(mapperService.fullName("name"), notNullValue()); + assertThat(mapperService.fullName("name"), instanceOf(TextFieldType.class)); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name").indexOptions()); + assertThat(mapperService.fullName("name").stored(), equalTo(true)); + assertThat(mapperService.fullName("name").tokenized(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.indexed"), instanceOf(TextFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name.indexed").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().stored(), equalTo(false)); - assertThat(docMapper.mappers().getMapper("name.indexed").fieldType().tokenized(), equalTo(true)); + assertThat(mapperService.fullName("name.indexed"), notNullValue()); + assertThat(mapperService.fullName("name"), instanceOf(TextFieldType.class)); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name.indexed").indexOptions()); + assertThat(mapperService.fullName("name.indexed").stored(), equalTo(false)); + assertThat(mapperService.fullName("name.indexed").tokenized(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.not_indexed"), instanceOf(TextFieldMapper.class)); - assertEquals(IndexOptions.NONE, docMapper.mappers().getMapper("name.not_indexed").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().stored(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.not_indexed").fieldType().tokenized(), equalTo(true)); + assertThat(mapperService.fullName("name.not_indexed"), notNullValue()); + assertThat(mapperService.fullName("name"), instanceOf(TextFieldType.class)); + assertEquals(IndexOptions.NONE, mapperService.fullName("name.not_indexed").indexOptions()); + assertThat(mapperService.fullName("name.not_indexed").stored(), equalTo(true)); + assertThat(mapperService.fullName("name.not_indexed").tokenized(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.test1"), notNullValue()); - assertThat(docMapper.mappers().getMapper("name.test1"), instanceOf(TextFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name.test1").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("name.test1").fieldType().stored(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.test1").fieldType().tokenized(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.test1").fieldType().eagerGlobalOrdinals(), equalTo(true)); + assertThat(mapperService.fullName("name.test1"), notNullValue()); + assertThat(mapperService.fullName("name"), instanceOf(TextFieldType.class)); + assertNotSame(IndexOptions.NONE, mapperService.fullName("name.test1").indexOptions()); + assertThat(mapperService.fullName("name.test1").stored(), equalTo(true)); + assertThat(mapperService.fullName("name.test1").tokenized(), equalTo(true)); + assertThat(mapperService.fullName("name.test1").eagerGlobalOrdinals(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("object1.multi1"), notNullValue()); - assertThat(docMapper.mappers().getMapper("object1.multi1"), instanceOf(DateFieldMapper.class)); - assertThat(docMapper.mappers().getMapper("object1.multi1.string"), notNullValue()); - assertThat(docMapper.mappers().getMapper("object1.multi1.string"), instanceOf(KeywordFieldMapper.class)); - assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("object1.multi1.string").fieldType().indexOptions()); - assertThat(docMapper.mappers().getMapper("object1.multi1.string").fieldType().tokenized(), equalTo(false)); + assertThat(mapperService.fullName("object1.multi1"), notNullValue()); + assertThat(mapperService.fullName("object1.multi1"), instanceOf(DateFieldMapper.DateFieldType.class)); + assertThat(mapperService.fullName("object1.multi1.string"), notNullValue()); + assertThat(mapperService.fullName("object1.multi1.string"), instanceOf(KeywordFieldMapper.KeywordFieldType.class)); + assertNotSame(IndexOptions.NONE, mapperService.fullName("object1.multi1.string").indexOptions()); + assertThat(mapperService.fullName("object1.multi1.string").tokenized(), equalTo(false)); } public void testBuildThenParse() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/PathMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/PathMapperTests.java index 271501281cd..6bb15432b1f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/PathMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/PathMapperTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java index 3ad53513c51..9546fb5136e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java @@ -23,10 +23,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; @@ -38,13 +35,14 @@ public class PathMatchDynamicTemplateTests extends ESSingleNodeTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json"); IndexService index = createIndex("test"); client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); - DocumentMapper docMapper = index.mapperService().documentMapper("person"); + + MapperService mapperService = index.mapperService(); + byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), - XContentType.JSON)); + ParsedDocument parsedDoc = mapperService.documentMapper().parse( + SourceToParse.source("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); - docMapper = index.mapperService().documentMapper("person"); Document doc = parsedDoc.rootDoc(); IndexableField f = doc.getField("name"); @@ -52,26 +50,26 @@ public class PathMatchDynamicTemplateTests extends ESSingleNodeTestCase { assertThat(f.stringValue(), equalTo("top_level")); assertThat(f.fieldType().stored(), equalTo(false)); - FieldMapper fieldMapper = docMapper.mappers().getMapper("name"); - assertThat(fieldMapper.fieldType().stored(), equalTo(false)); + MappedFieldType fieldType = mapperService.fullName("name"); + assertThat(fieldType.stored(), equalTo(false)); f = doc.getField("obj1.name"); assertThat(f.name(), equalTo("obj1.name")); assertThat(f.fieldType().stored(), equalTo(true)); - fieldMapper = docMapper.mappers().getMapper("obj1.name"); - assertThat(fieldMapper.fieldType().stored(), equalTo(true)); + fieldType = mapperService.fullName("obj1.name"); + assertThat(fieldType.stored(), equalTo(true)); f = doc.getField("obj1.obj2.name"); assertThat(f.name(), equalTo("obj1.obj2.name")); assertThat(f.fieldType().stored(), equalTo(false)); - fieldMapper = docMapper.mappers().getMapper("obj1.obj2.name"); - assertThat(fieldMapper.fieldType().stored(), equalTo(false)); + fieldType = mapperService.fullName("obj1.obj2.name"); + assertThat(fieldType.stored(), equalTo(false)); // verify more complex path_match expressions - fieldMapper = docMapper.mappers().getMapper("obj3.obj4.prop1"); - assertNotNull(fieldMapper); + fieldType = mapperService.fullName("obj3.obj4.prop1"); + assertNotNull(fieldType); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 95ffc373e6b..a07192df804 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -28,13 +28,14 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.ESSingleNodeTestCase; -import java.util.Collections; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -84,9 +85,11 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { DirectoryReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); - CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor( - Collections.emptySet(), Collections.singletonList("field*"), false); + Set fieldNames = Sets.newHashSet("field1", "field2", "field3", "field4", "field5", + "field6", "field7", "field8", "field9", "field10"); + CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(fieldNames, false); searcher.doc(0, fieldsVisitor); + fieldsVisitor.postProcess(mapperService); assertThat(fieldsVisitor.fields().size(), equalTo(10)); assertThat(fieldsVisitor.fields().get("field1").size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index e3dc8ff0b78..4736cbe4712 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -490,9 +490,10 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); - assertTrue(mapper.mappers().getMapper("field").fieldType().eagerGlobalOrdinals()); + + FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + assertTrue(fieldMapper.fieldType().eagerGlobalOrdinals()); } public void testFielddata() throws IOException { @@ -504,8 +505,10 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, disabledMapper.mappingSource().toString()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> disabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder("test")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + FieldMapper fieldMapper = (FieldMapper) disabledMapper.mappers().getMapper("field"); + fieldMapper.fieldType().fielddataBuilder("test"); + }); assertThat(e.getMessage(), containsString("Fielddata is disabled")); mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") @@ -518,7 +521,9 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, enabledMapper.mappingSource().toString()); - enabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder("test"); // no exception this time + + FieldMapper enabledFieldMapper = (FieldMapper) enabledMapper.mappers().getMapper("field"); + enabledFieldMapper.fieldType().fielddataBuilder("test"); // no exception this time String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") @@ -547,7 +552,9 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - TextFieldType fieldType = (TextFieldType) mapper.mappers().getMapper("field").fieldType(); + TextFieldMapper fieldMapper = (TextFieldMapper) mapper.mappers().getMapper("field"); + TextFieldType fieldType = fieldMapper.fieldType(); + assertThat(fieldType.fielddataMinFrequency(), equalTo(2d)); assertThat(fieldType.fielddataMaxFrequency(), equalTo((double) Integer.MAX_VALUE)); assertThat(fieldType.fielddataMinSegmentSize(), equalTo(1000)); @@ -630,7 +637,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, ft.indexOptions()); } @@ -646,7 +653,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; assertEquals(IndexOptions.DOCS, ft.indexOptions()); assertFalse(ft.storeTermVectors()); @@ -663,7 +670,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions()); @@ -684,7 +691,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions()); @@ -705,7 +712,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions()); @@ -836,10 +843,13 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { assertThat(mapper.mappers().getMapper("field._index_prefix").toString(), containsString("prefixChars=1:10")); - Query q = mapper.mappers().getMapper("field").fieldType().prefixQuery("goin", CONSTANT_SCORE_REWRITE, queryShardContext); + FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + MappedFieldType fieldType = fieldMapper.fieldType; + + Query q = fieldType.prefixQuery("goin", CONSTANT_SCORE_REWRITE, queryShardContext); + assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field._index_prefix", "goin"))), q); - q = mapper.mappers().getMapper("field").fieldType().prefixQuery("internationalisatio", - CONSTANT_SCORE_REWRITE, queryShardContext); + q = fieldType.prefixQuery("internationalisatio", CONSTANT_SCORE_REWRITE, queryShardContext); assertEquals(new PrefixQuery(new Term("field", "internationalisatio")), q); ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference @@ -864,17 +874,16 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { CompressedXContent json = new CompressedXContent(mapping); DocumentMapper mapper = parser.parse("type", json); - Query q1 = mapper.mappers().getMapper("field").fieldType().prefixQuery("g", - CONSTANT_SCORE_REWRITE, queryShardContext); + FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + MappedFieldType fieldType = fieldMapper.fieldType; + + Query q1 = fieldType.prefixQuery("g", CONSTANT_SCORE_REWRITE, queryShardContext); assertThat(q1, instanceOf(PrefixQuery.class)); - Query q2 = mapper.mappers().getMapper("field").fieldType().prefixQuery("go", - CONSTANT_SCORE_REWRITE, queryShardContext); + Query q2 = fieldType.prefixQuery("go", CONSTANT_SCORE_REWRITE, queryShardContext); assertThat(q2, instanceOf(ConstantScoreQuery.class)); - Query q5 = mapper.mappers().getMapper("field").fieldType().prefixQuery("going", - CONSTANT_SCORE_REWRITE, queryShardContext); + Query q5 = fieldType.prefixQuery("going", CONSTANT_SCORE_REWRITE, queryShardContext); assertThat(q5, instanceOf(ConstantScoreQuery.class)); - Query q6 = mapper.mappers().getMapper("field").fieldType().prefixQuery("goings", - CONSTANT_SCORE_REWRITE, queryShardContext); + Query q6 = fieldType.prefixQuery("goings", CONSTANT_SCORE_REWRITE, queryShardContext); assertThat(q6, instanceOf(PrefixQuery.class)); } diff --git a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java index 5478e172fed..fe39345dadd 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; @@ -27,6 +28,7 @@ import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; @@ -39,19 +41,16 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase terms = extendedCommonTermsQuery.getTerms(); + if (!terms.isEmpty()) { + String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); + String actualFieldName = terms.iterator().next().field(); + assertThat(actualFieldName, equalTo(expectedFieldName)); + } + assertThat(extendedCommonTermsQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo(queryBuilder.highFreqMinimumShouldMatch())); assertThat(extendedCommonTermsQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo(queryBuilder.lowFreqMinimumShouldMatch())); } diff --git a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index d4547eee26f..88742e08554 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -76,7 +76,7 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase { /** Randomly generate either NaN or one of the two infinity values. */ @@ -46,7 +47,8 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase { @Override protected GeoPolygonQueryBuilder doCreateTestQueryBuilder() { + String fieldName = randomFrom(GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME); List polygon = randomPolygon(); - GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, polygon); + GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(fieldName, polygon); if (randomBoolean()) { builder.setValidationMethod(randomFrom(GeoValidationMethod.values())); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java index e5da5d7f971..694b63b141b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java @@ -42,13 +42,13 @@ import static org.hamcrest.Matchers.notNullValue; public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchPhrasePrefixQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, - DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, + INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); if (fieldName.equals(DATE_FIELD_NAME)) { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); } Object value; - if (fieldName.equals(STRING_FIELD_NAME)) { + if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); StringBuilder builder = new StringBuilder(); for (int i = 0; i < terms; i++) { @@ -61,7 +61,7 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchPhraseQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, - DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, + INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); if (fieldName.equals(DATE_FIELD_NAME)) { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); } Object value; - if (fieldName.equals(STRING_FIELD_NAME)) { + if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); StringBuilder builder = new StringBuilder(); for (int i = 0; i < terms; i++) { @@ -64,7 +64,7 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, - DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + String fieldName = STRING_ALIAS_FIELD_NAME; //randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, + //INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); if (fieldName.equals(DATE_FIELD_NAME)) { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); } Object value; - if (fieldName.equals(STRING_FIELD_NAME)) { + if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); StringBuilder builder = new StringBuilder(); for (int i = 0; i < terms; i++) { @@ -79,11 +81,11 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase terms = ectq.getTerms(); + if (!terms.isEmpty()) { + Term term = terms.iterator().next(); + String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); + assertThat(term.field(), equalTo(expectedFieldName)); + } assertEquals(queryBuilder.cutoffFrequency(), ectq.getMaxTermFrequency(), Float.MIN_VALUE); } @@ -195,6 +203,9 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase 0) { if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) && context.mapperService().fullName(queryBuilder.fieldName()).hasDocValues()) { - expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(queryBuilder.fieldName())); + expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(expectedFieldName)); } else if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) && context.mapperService().fullName(queryBuilder.fieldName()).omitNorms() == false) { - expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(queryBuilder.fieldName())); + expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(expectedFieldName)); } else { - expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, queryBuilder.fieldName()))); + expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, expectedFieldName))); } } else { expectedQuery = new MatchNoDocsQuery("no mappings yet"); @@ -146,18 +149,18 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase fields = new HashMap<>(); for (int i = 0; i < fieldCount; i++) { if (randomBoolean()) { - fields.put(STRING_FIELD_NAME, AbstractQueryBuilder.DEFAULT_BOOST); + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME); + fields.put(fieldName, AbstractQueryBuilder.DEFAULT_BOOST); } else { fields.put(STRING_FIELD_NAME_2, 2.0f / randomIntBetween(1, 20)); } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index c93df5b7519..7c459737c77 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -63,7 +63,11 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase choice.equals(GEO_POINT_FIELD_NAME) || choice.equals(GEO_SHAPE_FIELD_NAME) - || choice.equals(INT_RANGE_FIELD_NAME) || choice.equals(DATE_RANGE_FIELD_NAME), () -> getRandomFieldName()); + String fieldName = randomValueOtherThanMany(choice -> + choice.equals(GEO_POINT_FIELD_NAME) || + choice.equals(GEO_POINT_ALIAS_FIELD_NAME) || + choice.equals(GEO_SHAPE_FIELD_NAME) || + choice.equals(INT_RANGE_FIELD_NAME) || + choice.equals(DATE_RANGE_FIELD_NAME), + () -> getRandomFieldName()); Object[] values = new Object[randomInt(5)]; for (int i = 0; i < values.length; i++) { values[i] = getRandomValueForFieldName(fieldName); @@ -129,7 +133,8 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase value.equals(GEO_POINT_FIELD_NAME) || value.equals(GEO_SHAPE_FIELD_NAME), + () -> randomFrom(MAPPED_FIELD_NAMES)); List randomTerms = randomValues(fieldName); TermsSetQueryBuilder queryBuilder = new TermsSetQueryBuilder(STRING_FIELD_NAME, randomTerms); if (randomBoolean()) { @@ -261,6 +262,22 @@ public class TermsSetQueryBuilderTests extends AbstractQueryTestCase randomTerms = Arrays.asList(generateRandomStringArray(5, 10, false, false)); + TermsSetQueryBuilder queryBuilder = new TermsSetQueryBuilder(STRING_ALIAS_FIELD_NAME, randomTerms) + .setMinimumShouldMatchField("m_s_m"); + + QueryShardContext context = createShardContext(); + List termQueries = queryBuilder.createTermQueries(context); + assertEquals(randomTerms.size(), termQueries.size()); + + String expectedFieldName = expectedFieldName(queryBuilder.getFieldName()); + for (int i = 0; i < randomTerms.size(); i++) { + Term term = new Term(expectedFieldName, randomTerms.get(i)); + assertThat(termQueries.get(i), equalTo(new TermQuery(term))); + } + } + private static List randomValues(final String fieldName) { final int numValues = randomIntBetween(0, 10); final List values = new ArrayList<>(numValues); diff --git a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java index 7da423de25b..48f43eefeb3 100644 --- a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java @@ -60,21 +60,22 @@ public class WildcardQueryBuilderTests extends AbstractQueryTestCase 0); QueryShardContext context = createShardContext(); String index = context.getFullyQualifiedIndexName(); - + Query query = new WildcardQueryBuilder("_index", index).doToQuery(context); assertThat(query instanceof MatchAllDocsQuery, equalTo(true)); - + query = new WildcardQueryBuilder("_index", index + "*").doToQuery(context); assertThat(query instanceof MatchAllDocsQuery, equalTo(true)); - + query = new WildcardQueryBuilder("_index", "index_" + index + "*").doToQuery(context); assertThat(query instanceof MatchNoDocsQuery, equalTo(true)); } diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index 3de02f68318..6102a1b55f1 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -38,11 +38,12 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -86,22 +87,21 @@ public class SimilarityTests extends ESSingleNodeTestCase { } public void testResolveSimilaritiesFromMapping_classic() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "classic") .put("index.similarity.my_similarity.discount_overlaps", false) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_3_0) // otherwise classic is forbidden .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(ClassicSimilarity.class)); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(ClassicSimilarity.class)); - ClassicSimilarity similarity = (ClassicSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + ClassicSimilarity similarity = (ClassicSimilarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); } @@ -117,11 +117,11 @@ public class SimilarityTests extends ESSingleNodeTestCase { } public void testResolveSimilaritiesFromMapping_bm25() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "BM25") @@ -129,37 +129,32 @@ public class SimilarityTests extends ESSingleNodeTestCase { .put("index.similarity.my_similarity.b", 0.5f) .put("index.similarity.my_similarity.discount_overlaps", false) .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(BM25Similarity.class)); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(BM25Similarity.class)); - BM25Similarity similarity = (BM25Similarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + BM25Similarity similarity = (BM25Similarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getK1(), equalTo(2.0f)); assertThat(similarity.getB(), equalTo(0.5f)); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); } public void testResolveSimilaritiesFromMapping_boolean() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "boolean").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); - IndexService indexService = createIndex("foo", Settings.EMPTY); - DocumentMapper documentMapper = indexService.mapperService() - .documentMapperParser() - .parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), - instanceOf(BooleanSimilarity.class)); + MapperService mapperService = createIndex("foo", Settings.EMPTY, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(BooleanSimilarity.class)); } public void testResolveSimilaritiesFromMapping_DFR() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "DFR") @@ -168,11 +163,10 @@ public class SimilarityTests extends ESSingleNodeTestCase { .put("index.similarity.my_similarity.normalization", "h2") .put("index.similarity.my_similarity.normalization.h2.c", 3f) .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(DFRSimilarity.class)); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(DFRSimilarity.class)); - DFRSimilarity similarity = (DFRSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + DFRSimilarity similarity = (DFRSimilarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getBasicModel(), instanceOf(BasicModelG.class)); assertThat(similarity.getAfterEffect(), instanceOf(AfterEffectL.class)); assertThat(similarity.getNormalization(), instanceOf(NormalizationH2.class)); @@ -180,11 +174,11 @@ public class SimilarityTests extends ESSingleNodeTestCase { } public void testResolveSimilaritiesFromMapping_IB() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "IB") @@ -193,11 +187,10 @@ public class SimilarityTests extends ESSingleNodeTestCase { .put("index.similarity.my_similarity.normalization", "h2") .put("index.similarity.my_similarity.normalization.h2.c", 3f) .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(IBSimilarity.class)); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(IBSimilarity.class)); - IBSimilarity similarity = (IBSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + IBSimilarity similarity = (IBSimilarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getDistribution(), instanceOf(DistributionSPL.class)); assertThat(similarity.getLambda(), instanceOf(LambdaTTF.class)); assertThat(similarity.getNormalization(), instanceOf(NormalizationH2.class)); @@ -205,59 +198,58 @@ public class SimilarityTests extends ESSingleNodeTestCase { } public void testResolveSimilaritiesFromMapping_DFI() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "DFI") .put("index.similarity.my_similarity.independence_measure", "chisquared") .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - MappedFieldType fieldType = documentMapper.mappers().getMapper("field1").fieldType(); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + MappedFieldType fieldType = mapperService.fullName("field1"); + assertThat(fieldType.similarity().get(), instanceOf(DFISimilarity.class)); DFISimilarity similarity = (DFISimilarity) fieldType.similarity().get(); assertThat(similarity.getIndependence(), instanceOf(IndependenceChiSquared.class)); } public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "LMDirichlet") .put("index.similarity.my_similarity.mu", 3000f) .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(LMDirichletSimilarity.class)); - LMDirichletSimilarity similarity = (LMDirichletSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(LMDirichletSimilarity.class)); + + LMDirichletSimilarity similarity = (LMDirichletSimilarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getMu(), equalTo(3000f)); } public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1").field("type", "text").field("similarity", "my_similarity").endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); Settings indexSettings = Settings.builder() .put("index.similarity.my_similarity.type", "LMJelinekMercer") .put("index.similarity.my_similarity.lambda", 0.7f) .build(); - IndexService indexService = createIndex("foo", indexSettings); - DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().get(), instanceOf(LMJelinekMercerSimilarity.class)); + MapperService mapperService = createIndex("foo", indexSettings, "type", mapping).mapperService(); + assertThat(mapperService.fullName("field1").similarity().get(), instanceOf(LMJelinekMercerSimilarity.class)); - LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); + LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) mapperService.fullName("field1").similarity().get(); assertThat(similarity.getLambda(), equalTo(0.7f)); } diff --git a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index 2ba943ba0dc..788777ade7d 100644 --- a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.mapping; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -68,10 +69,26 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase { } private XContentBuilder getMappingForType(String type) throws IOException { - return jsonBuilder().startObject().startObject(type).startObject("properties") - .startObject("field1").field("type", "text").endObject() - .startObject("obj").startObject("properties").startObject("subfield").field("type", "keyword").endObject().endObject().endObject() - .endObject().endObject().endObject(); + return jsonBuilder().startObject() + .startObject(type) + .startObject("properties") + .startObject("field1") + .field("type", "text") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "field1") + .endObject() + .startObject("obj") + .startObject("properties") + .startObject("subfield") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); } public void testGetFieldMappings() throws Exception { @@ -138,8 +155,23 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase { assertThat((Map) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("index", Boolean.TRUE)); assertThat((Map) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("type", (Object) "text")); assertThat((Map) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"), hasEntry("type", (Object) "keyword")); + } + @SuppressWarnings("unchecked") + public void testGetFieldMappingsWithFieldAlias() throws Exception { + assertAcked(prepareCreate("test").addMapping("type", getMappingForType("type"))); + GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings() + .setFields("alias", "field1").get(); + + FieldMappingMetaData aliasMapping = response.fieldMappings("test", "type", "alias"); + assertThat(aliasMapping.fullName(), equalTo("alias")); + assertThat(aliasMapping.sourceAsMap(), hasKey("alias")); + assertThat((Map) aliasMapping.sourceAsMap().get("alias"), hasEntry("type", "alias")); + + FieldMappingMetaData field1Mapping = response.fieldMappings("test", "type", "field1"); + assertThat(field1Mapping.fullName(), equalTo("field1")); + assertThat(field1Mapping.sourceAsMap(), hasKey("field1")); } //fix #6552 diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index c2a2405098d..edc29b0d2c5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -116,6 +116,21 @@ public class RangeIT extends ESIntegTestCase { .field(SINGLE_VALUED_FIELD_NAME, i * 2 - 1) .endObject())); } + + // Create two indices and add the field 'route_length_miles' as an alias in + // one, and a concrete field in the other. + prepareCreate("old_index") + .addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance") + .get(); + prepareCreate("new_index") + .addMapping("_doc", "route_length_miles", "type=double") + .get(); + + builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 50.5)); + builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); + builders.add(client().prepareIndex("new_index", "_doc").setSource(Collections.emptyMap())); + indexRandom(true, builders); ensureSearchable(); } @@ -972,4 +987,72 @@ public class RangeIT extends ESIntegTestCase { assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(1L)); } + + public void testFieldAlias() { + SearchResponse response = client().prepareSearch("old_index", "new_index") + .addAggregation(range("range") + .field("route_length_miles") + .addUnboundedTo(50.0) + .addRange(50.0, 150.0) + .addUnboundedFrom(150.0)) + .execute().actionGet(); + + assertSearchResponse(response); + + Range range = response.getAggregations().get("range"); + assertThat(range, notNullValue()); + assertThat(range.getName(), equalTo("range")); + List buckets = range.getBuckets(); + assertThat(buckets.size(), equalTo(3)); + + Range.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("*-50.0")); + assertThat(bucket.getDocCount(), equalTo(1L)); + + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("50.0-150.0")); + assertThat(bucket.getDocCount(), equalTo(2L)); + + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("150.0-*")); + assertThat(bucket.getDocCount(), equalTo(0L)); + } + + + public void testFieldAliasWithMissingValue() { + SearchResponse response = client().prepareSearch("old_index", "new_index") + .addAggregation(range("range") + .field("route_length_miles") + .missing(0.0) + .addUnboundedTo(50.0) + .addRange(50.0, 150.0) + .addUnboundedFrom(150.0)) + .execute().actionGet(); + + assertSearchResponse(response); + + Range range = response.getAggregations().get("range"); + assertThat(range, notNullValue()); + assertThat(range.getName(), equalTo("range")); + List buckets = range.getBuckets(); + assertThat(buckets.size(), equalTo(3)); + + Range.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("*-50.0")); + assertThat(bucket.getDocCount(), equalTo(2L)); + + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("50.0-150.0")); + assertThat(bucket.getDocCount(), equalTo(2L)); + + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("150.0-*")); + assertThat(bucket.getDocCount(), equalTo(0L)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java index aaf366c7c7b..4a69f9d5379 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java @@ -66,6 +66,10 @@ public class ReverseNestedIT extends ESIntegTestCase { "type", jsonBuilder().startObject().startObject("properties") .startObject("field1").field("type", "keyword").endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "field1") + .endObject() .startObject("nested1").field("type", "nested").startObject("properties") .startObject("field2").field("type", "keyword").endObject() .endObject().endObject() @@ -649,4 +653,28 @@ public class ReverseNestedIT extends ESIntegTestCase { assertThat(barCount.getValue(), equalTo(2L)); } } + + public void testFieldAlias() { + SearchResponse response = client().prepareSearch("idx1") + .addAggregation(nested("nested1", "nested1") + .subAggregation( + terms("field2").field("nested1.field2") + .subAggregation( + reverseNested("nested1_to_field1") + .subAggregation( + terms("field1").field("alias") + .collectMode(randomFrom(SubAggCollectionMode.values())))))).get(); + + assertSearchResponse(response); + + Nested nested = response.getAggregations().get("nested1"); + Terms nestedTerms = nested.getAggregations().get("field2"); + Terms.Bucket bucket = nestedTerms.getBuckets().iterator().next(); + + ReverseNested reverseNested = bucket.getAggregations().get("nested1_to_field1"); + Terms reverseNestedTerms = reverseNested.getAggregations().get("field1"); + + assertThat(((InternalAggregation)reverseNested).getProperty("field1"), sameInstance(reverseNestedTerms)); + assertThat(reverseNestedTerms.getBuckets().size(), equalTo(6)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 11a44a1d89b..4555809bad9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -71,8 +71,14 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; import java.util.stream.DoubleStream; +import static org.elasticsearch.search.aggregations.AggregationBuilders.max; +import static org.elasticsearch.search.aggregations.AggregationBuilders.nested; + public class NestedAggregatorTests extends AggregatorTestCase { private static final String VALUE_FIELD_NAME = "number"; @@ -84,6 +90,15 @@ public class NestedAggregatorTests extends AggregatorTestCase { private final SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); + /** + * For each provided field type, we also register an alias with name -alias. + */ + @Override + protected Map getFieldAliases(MappedFieldType... fieldTypes) { + return Arrays.stream(fieldTypes).collect(Collectors.toMap( + ft -> ft.name() + "-alias", + Function.identity())); + } public void testNoDocs() throws IOException { try (Directory directory = newDirectory()) { @@ -638,6 +653,49 @@ public class NestedAggregatorTests extends AggregatorTestCase { } } + public void testFieldAlias() throws IOException { + int numRootDocs = randomIntBetween(1, 20); + + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.LONG); + fieldType.setName(VALUE_FIELD_NAME); + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + for (int i = 0; i < numRootDocs; i++) { + List documents = new ArrayList<>(); + int numNestedDocs = randomIntBetween(0, 20); + generateDocuments(documents, numNestedDocs, i, NESTED_OBJECT, VALUE_FIELD_NAME); + + Document document = new Document(); + document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), IdFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(TypeFieldMapper.NAME, "test", + TypeFieldMapper.Defaults.FIELD_TYPE)); + document.add(sequenceIDFields.primaryTerm); + documents.add(document); + iw.addDocuments(documents); + } + iw.commit(); + } + + try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) { + NestedAggregationBuilder agg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( + max(MAX_AGG_NAME).field(VALUE_FIELD_NAME)); + + NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( + max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias")); + + Nested nested = search(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), agg, fieldType); + Nested aliasNested = search(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), aliasAgg, fieldType); + + assertTrue(nested.getDocCount() > 0); + assertEquals(nested, aliasNested); + } + } + } + private double generateMaxDocs(List documents, int numNestedDocs, int id, String path, String fieldName) { return DoubleStream.of(generateDocuments(documents, numNestedDocs, id, path, fieldName)) .max().orElse(Double.NEGATIVE_INFINITY); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index 36d6c6bd6e4..bc870bf4dca 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -40,7 +40,15 @@ import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.elasticsearch.search.aggregations.AggregationBuilders.max; +import static org.elasticsearch.search.aggregations.AggregationBuilders.nested; +import static org.elasticsearch.search.aggregations.AggregationBuilders.reverseNested; public class ReverseNestedAggregatorTests extends AggregatorTestCase { @@ -50,6 +58,15 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase { private static final String REVERSE_AGG_NAME = "reverseNestedAgg"; private static final String MAX_AGG_NAME = "maxAgg"; + /** + * For each provided field type, we also register an alias with name -alias. + */ + @Override + protected Map getFieldAliases(MappedFieldType... fieldTypes) { + return Arrays.stream(fieldTypes).collect(Collectors.toMap( + ft -> ft.name() + "-alias", + Function.identity())); + } public void testNoDocs() throws IOException { try (Directory directory = newDirectory()) { @@ -150,4 +167,63 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase { } } + public void testFieldAlias() throws IOException { + int numParentDocs = randomIntBetween(1, 20); + + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.LONG); + fieldType.setName(VALUE_FIELD_NAME); + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + for (int i = 0; i < numParentDocs; i++) { + List documents = new ArrayList<>(); + int numNestedDocs = randomIntBetween(0, 20); + for (int nested = 0; nested < numNestedDocs; nested++) { + Document document = new Document(); + document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), + IdFieldMapper.Defaults.NESTED_FIELD_TYPE)); + document.add(new Field(TypeFieldMapper.NAME, "__" + NESTED_OBJECT, + TypeFieldMapper.Defaults.FIELD_TYPE)); + documents.add(document); + } + Document document = new Document(); + document.add(new Field(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(i)), + IdFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(TypeFieldMapper.NAME, "test", + TypeFieldMapper.Defaults.FIELD_TYPE)); + + long value = randomNonNegativeLong() % 10000; + document.add(new SortedNumericDocValuesField(VALUE_FIELD_NAME, value)); + document.add(SeqNoFieldMapper.SequenceIDFields.emptySeqID().primaryTerm); + documents.add(document); + iw.addDocuments(documents); + } + iw.commit(); + } + + try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) { + + MaxAggregationBuilder maxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME); + MaxAggregationBuilder aliasMaxAgg = max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias"); + + NestedAggregationBuilder agg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( + reverseNested(REVERSE_AGG_NAME).subAggregation(maxAgg)); + NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( + reverseNested(REVERSE_AGG_NAME).subAggregation(aliasMaxAgg)); + + Nested nested = search(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), agg, fieldType); + Nested aliasNested = search(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), aliasAgg, fieldType); + + ReverseNested reverseNested = nested.getAggregations().get(REVERSE_AGG_NAME); + ReverseNested aliasReverseNested = aliasNested.getAggregations().get(REVERSE_AGG_NAME); + + assertTrue(reverseNested.getDocCount() > 0); + assertEquals(reverseNested, aliasReverseNested); + } + } + } + } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java index 8515f0a8994..70f9667ce7b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java @@ -55,7 +55,13 @@ import org.hamcrest.Matchers; import org.junit.Before; import java.io.IOException; +import java.util.Arrays; import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; public class SignificantTermsAggregatorTests extends AggregatorTestCase { @@ -70,6 +76,16 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { fieldType.setName("field"); } + /** + * For each provided field type, we also register an alias with name -alias. + */ + @Override + protected Map getFieldAliases(MappedFieldType... fieldTypes) { + return Arrays.stream(fieldTypes).collect(Collectors.toMap( + ft -> ft.name() + "-alias", + Function.identity())); + } + public void testParsedAsFilter() throws IOException { IndexReader indexReader = new MultiReader(); IndexSearcher indexSearcher = newSearcher(indexReader); @@ -104,7 +120,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment - + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { addMixedTextDocs(textFieldType, w); @@ -137,7 +153,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { assertNull(terms.getBucketByKey("odd")); assertNull(terms.getBucketByKey("common")); assertNotNull(terms.getBucketByKey("even")); - + // Search odd with regex includeexcludes sigAgg.includeExclude(new IncludeExclude("o.d", null)); terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); @@ -149,7 +165,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { // Search with string-based includeexcludes String oddStrings[] = new String[] {"odd", "weird"}; String evenStrings[] = new String[] {"even", "regular"}; - + sigAgg.includeExclude(new IncludeExclude(oddStrings, evenStrings)); sigAgg.significanceHeuristic(SignificanceHeuristicTests.getRandomSignificanceheuristic()); terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); @@ -159,7 +175,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { assertNull(terms.getBucketByKey("common")); assertNull(terms.getBucketByKey("even")); assertNull(terms.getBucketByKey("regular")); - + sigAgg.includeExclude(new IncludeExclude(evenStrings, oddStrings)); terms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), sigAgg, textFieldType); assertEquals(0, terms.getBuckets().size()); @@ -168,7 +184,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { assertNull(terms.getBucketByKey("common")); assertNull(terms.getBucketByKey("even")); assertNull(terms.getBucketByKey("regular")); - + } } } @@ -232,7 +248,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { } } } - + /** * Uses the significant terms aggregation on an index with unmapped field */ @@ -266,7 +282,57 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { } } - } + } + + public void testFieldAlias() throws IOException { + TextFieldType textFieldType = new TextFieldType(); + textFieldType.setName("text"); + textFieldType.setFielddata(true); + textFieldType.setIndexAnalyzer(new NamedAnalyzer("my_analyzer", AnalyzerScope.GLOBAL, new StandardAnalyzer())); + + IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); + indexWriterConfig.setMaxBufferedDocs(100); + indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment + + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { + addMixedTextDocs(textFieldType, w); + + SignificantTermsAggregationBuilder agg = significantTerms("sig_text").field("text"); + SignificantTermsAggregationBuilder aliasAgg = significantTerms("sig_text").field("text-alias"); + + String executionHint = randomExecutionHint(); + agg.executionHint(executionHint); + aliasAgg.executionHint(executionHint); + + if (randomBoolean()) { + // Use a background filter which just happens to be same scope as whole-index. + QueryBuilder backgroundFilter = QueryBuilders.termsQuery("text", "common"); + agg.backgroundFilter(backgroundFilter); + aliasAgg.backgroundFilter(backgroundFilter); + } + + try (IndexReader reader = DirectoryReader.open(w)) { + assertEquals("test expects a single segment", 1, reader.leaves().size()); + IndexSearcher searcher = new IndexSearcher(reader); + + SignificantTerms evenTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), + agg, textFieldType); + SignificantTerms aliasEvenTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), + aliasAgg, textFieldType); + + assertFalse(evenTerms.getBuckets().isEmpty()); + assertEquals(evenTerms, aliasEvenTerms); + + SignificantTerms oddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), + agg, textFieldType); + SignificantTerms aliasOddTerms = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), + aliasAgg, textFieldType); + + assertFalse(oddTerms.getBuckets().isEmpty()); + assertEquals(oddTerms, aliasOddTerms); + } + } + } private void addMixedTextDocs(TextFieldType textFieldType, IndexWriter w) throws IOException { for (int i = 0; i < 10; i++) { @@ -284,7 +350,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { w.addDocument(doc); } - } + } private void addFields(Document doc, List createFields) { for (Field field : createFields) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java index ec15e2fc3e8..c63d5cb7d39 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java @@ -34,19 +34,34 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTextAggregationBuilder; import java.io.IOException; import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.elasticsearch.search.aggregations.AggregationBuilders.sampler; +import static org.elasticsearch.search.aggregations.AggregationBuilders.significantText; public class SignificantTextAggregatorTests extends AggregatorTestCase { - - + + /** + * For each provided field type, we also register an alias with name -alias. + */ + @Override + protected Map getFieldAliases(MappedFieldType... fieldTypes) { + return Arrays.stream(fieldTypes).collect(Collectors.toMap( + ft -> ft.name() + "-alias", + Function.identity())); + } + /** * Uses the significant text aggregation to find the keywords in text fields */ @@ -59,22 +74,7 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase { indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { - for (int i = 0; i < 10; i++) { - Document doc = new Document(); - StringBuilder text = new StringBuilder("common "); - if (i % 2 == 0) { - text.append("odd "); - } else { - text.append("even separator" + i + " duplicate duplicate duplicate duplicate duplicate duplicate "); - } - - doc.add(new Field("text", text.toString(), textFieldType)); - String json ="{ \"text\" : \"" + text.toString() + "\","+ - " \"json_only_field\" : \"" + text.toString() + "\"" + - " }"; - doc.add(new StoredField("_source", new BytesRef(json))); - w.addDocument(doc); - } + indexDocuments(w, textFieldType); SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "text").filterDuplicateText(true); if(randomBoolean()){ @@ -82,37 +82,104 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase { } SamplerAggregationBuilder aggBuilder = new SamplerAggregationBuilder("sampler") .subAggregation(sigAgg); - + try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); - + // Search "odd" which should have no duplication Sampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aggBuilder, textFieldType); SignificantTerms terms = sampler.getAggregations().get("sig_text"); - + assertNull(terms.getBucketByKey("even")); - assertNull(terms.getBucketByKey("duplicate")); - assertNull(terms.getBucketByKey("common")); + assertNull(terms.getBucketByKey("duplicate")); + assertNull(terms.getBucketByKey("common")); assertNotNull(terms.getBucketByKey("odd")); // Search "even" which will have duplication sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aggBuilder, textFieldType); terms = sampler.getAggregations().get("sig_text"); - + assertNull(terms.getBucketByKey("odd")); - assertNull(terms.getBucketByKey("duplicate")); - assertNull(terms.getBucketByKey("common")); + assertNull(terms.getBucketByKey("duplicate")); + assertNull(terms.getBucketByKey("common")); assertNull(terms.getBucketByKey("separator2")); assertNull(terms.getBucketByKey("separator4")); assertNull(terms.getBucketByKey("separator6")); assertNotNull(terms.getBucketByKey("even")); - + } } } - + + public void testFieldAlias() throws IOException { + TextFieldType textFieldType = new TextFieldType(); + textFieldType.setName("text"); + textFieldType.setIndexAnalyzer(new NamedAnalyzer("my_analyzer", AnalyzerScope.GLOBAL, new StandardAnalyzer())); + + IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); + indexWriterConfig.setMaxBufferedDocs(100); + indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { + indexDocuments(w, textFieldType); + + SignificantTextAggregationBuilder agg = significantText("sig_text", "text") + .filterDuplicateText(true); + SignificantTextAggregationBuilder aliasAgg = significantText("sig_text", "text-alias") + .filterDuplicateText(true); + + if (randomBoolean()) { + List sourceFieldNames = Arrays.asList(new String [] {"json_only_field"}); + agg.sourceFieldNames(sourceFieldNames); + aliasAgg.sourceFieldNames(sourceFieldNames); + } + + try (IndexReader reader = DirectoryReader.open(w)) { + assertEquals("test expects a single segment", 1, reader.leaves().size()); + IndexSearcher searcher = new IndexSearcher(reader); + + SamplerAggregationBuilder samplerAgg = sampler("sampler").subAggregation(agg); + SamplerAggregationBuilder aliasSamplerAgg = sampler("sampler").subAggregation(aliasAgg); + + Sampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), samplerAgg, textFieldType); + Sampler aliasSampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aliasSamplerAgg, textFieldType); + + SignificantTerms terms = sampler.getAggregations().get("sig_text"); + SignificantTerms aliasTerms = aliasSampler.getAggregations().get("sig_text"); + assertFalse(terms.getBuckets().isEmpty()); + assertEquals(terms, aliasTerms); + + sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), samplerAgg, textFieldType); + aliasSampler = searchAndReduce(searcher, new TermQuery(new Term("text", "even")), aliasSamplerAgg, textFieldType); + + terms = sampler.getAggregations().get("sig_text"); + aliasTerms = aliasSampler.getAggregations().get("sig_text"); + assertFalse(terms.getBuckets().isEmpty()); + assertEquals(terms, aliasTerms); + } + } + } + + private void indexDocuments(IndexWriter writer, TextFieldType textFieldType) throws IOException { + for (int i = 0; i < 10; i++) { + Document doc = new Document(); + StringBuilder text = new StringBuilder("common "); + if (i % 2 == 0) { + text.append("odd "); + } else { + text.append("even separator" + i + " duplicate duplicate duplicate duplicate duplicate duplicate "); + } + + doc.add(new Field("text", text.toString(), textFieldType)); + String json ="{ \"text\" : \"" + text.toString() + "\","+ + " \"json_only_field\" : \"" + text.toString() + "\"" + + " }"; + doc.add(new StoredField("_source", new BytesRef(json))); + writer.addDocument(doc); + } + } + /** * Test documents with arrays of text */ @@ -137,13 +204,13 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase { sigAgg.sourceFieldNames(Arrays.asList(new String [] {"title", "text"})); try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); - IndexSearcher searcher = new IndexSearcher(reader); + IndexSearcher searcher = new IndexSearcher(reader); searchAndReduce(searcher, new TermQuery(new Term("text", "foo")), sigAgg, textFieldType); // No significant results to be found in this test - only checking we don't end up // with the internal exception discovered in issue https://github.com/elastic/elasticsearch/issues/25029 } } } - - + + } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 4ff3c5fc5b4..d74e43327fd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -809,12 +809,12 @@ public class TermsAggregatorTests extends AggregatorTestCase { fieldType1.setHasDocValues(true); MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); - fieldType1.setName("another_long"); - fieldType1.setHasDocValues(true); + fieldType2.setName("another_long"); + fieldType2.setHasDocValues(true); MappedFieldType fieldType3 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); - fieldType1.setName("another_double"); - fieldType1.setHasDocValues(true); + fieldType3.setName("another_double"); + fieldType3.setHasDocValues(true); try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { IndexSearcher indexSearcher = newIndexSearcher(indexReader); ValueType[] valueTypes = new ValueType[]{ValueType.STRING, ValueType.LONG, ValueType.DOUBLE}; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 1b4ae466bb3..b3a5df4dbfc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -18,12 +18,7 @@ */ package org.elasticsearch.search.aggregations.metrics; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - +import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; @@ -36,6 +31,14 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.hamcrest.core.IsNull; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; @@ -61,6 +64,33 @@ public class SumIT extends AbstractNumericTestCase { return Collections.singleton(MetricAggScriptPlugin.class); } + @Override + public void setupSuiteScopeCluster() throws Exception { + super.setupSuiteScopeCluster(); + + // Create two indices and add the field 'route_length_miles' as an alias in + // one, and a concrete field in the other. + prepareCreate("old_index") + .addMapping("_doc", + "transit_mode", "type=keyword", + "distance", "type=double", + "route_length_miles", "type=alias,path=distance") + .get(); + prepareCreate("new_index") + .addMapping("_doc", + "transit_mode", "type=keyword", + "route_length_miles", "type=double") + .get(); + + List builders = new ArrayList<>(); + builders.add(client().prepareIndex("old_index", "_doc").setSource("transit_mode", "train", "distance", 42.0)); + builders.add(client().prepareIndex("old_index", "_doc").setSource("transit_mode", "bus", "distance", 50.5)); + builders.add(client().prepareIndex("new_index", "_doc").setSource("transit_mode", "train", "route_length_miles", 100.2)); + + indexRandom(true, builders); + ensureSearchable(); + } + @Override public void testEmptyAggregation() throws Exception { @@ -382,4 +412,54 @@ public class SumIT extends AbstractNumericTestCase { assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() .getMissCount(), equalTo(1L)); } + + public void testFieldAlias() { + SearchResponse response = client().prepareSearch("old_index", "new_index") + .addAggregation(sum("sum") + .field("route_length_miles")) + .execute().actionGet(); + + assertSearchResponse(response); + + Sum sum = response.getAggregations().get("sum"); + assertThat(sum, IsNull.notNullValue()); + assertThat(sum.getName(), equalTo("sum")); + assertThat(sum.getValue(), equalTo(192.7)); + } + + public void testFieldAliasInSubAggregation() { + SearchResponse response = client().prepareSearch("old_index", "new_index") + .addAggregation(terms("terms") + .field("transit_mode") + .subAggregation(sum("sum") + .field("route_length_miles"))) + .execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + + List buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(2)); + + Terms.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("train")); + assertThat(bucket.getDocCount(), equalTo(2L)); + + Sum sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + assertThat(sum.getValue(), equalTo(142.2)); + + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo("bus")); + assertThat(bucket.getDocCount(), equalTo(1L)); + + sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + assertThat(sum.getValue(), equalTo(50.5)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java index b2a949ceeee..5124503fc03 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java @@ -26,10 +26,8 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; @@ -344,8 +342,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase { * is final and cannot be mocked */ @Override - protected QueryShardContext queryShardContextMock(MapperService mapperService, final MappedFieldType[] fieldTypes, - CircuitBreakerService circuitBreakerService) { + protected QueryShardContext queryShardContextMock(MapperService mapperService) { MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, SCRIPTS); Map engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java index ae65bc9f32c..b213ca785e2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java @@ -258,4 +258,27 @@ public class ValuesSourceConfigTests extends ESSingleNodeTestCase { assertEquals(1, values.nextValue()); } } + + + public void testFieldAlias() throws Exception { + IndexService indexService = createIndex("index", Settings.EMPTY, "type", + "field", "type=keyword", "alias", "type=alias,path=field"); + client().prepareIndex("index", "type", "1") + .setSource("field", "value") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + + try (Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { + QueryShardContext context = indexService.newQueryShardContext(0, searcher.reader(), () -> 42L, null); + ValuesSourceConfig config = ValuesSourceConfig.resolve( + context, ValueType.STRING, "alias", null, null, null, null); + ValuesSource.Bytes valuesSource = config.toValuesSource(context); + + LeafReaderContext ctx = searcher.reader().leaves().get(0); + SortedBinaryDocValues values = valuesSource.bytesValues(ctx); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(new BytesRef("value"), values.nextValue()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index e5af22cd2ae..069c72c10b4 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -171,6 +171,106 @@ public class HighlighterSearchIT extends ESIntegTestCase { } } + public void testFieldAlias() throws IOException { + XContentBuilder mappings = jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("store", true) + .field("term_vector", "with_positions_offsets") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mappings)); + + client().prepareIndex("test", "type", "1").setSource("text", "foo").get(); + refresh(); + + for (String type : ALL_TYPES) { + HighlightBuilder builder = new HighlightBuilder() + .field(new Field("alias").highlighterType(type)) + .requireFieldMatch(randomBoolean()); + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("alias", "foo")) + .highlighter(builder) + .get(); + assertHighlight(search, 0, "alias", 0, equalTo("foo")); + } + } + + public void testFieldAliasWithSourceLookup() throws IOException { + XContentBuilder mappings = jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("analyzer", "whitespace") + .field("store", false) + .field("term_vector", "with_positions_offsets") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mappings)); + + client().prepareIndex("test", "type", "1").setSource("text", "foo bar").get(); + refresh(); + + for (String type : ALL_TYPES) { + HighlightBuilder builder = new HighlightBuilder() + .field(new Field("alias").highlighterType(type)) + .requireFieldMatch(randomBoolean()); + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("alias", "bar")) + .highlighter(builder) + .get(); + assertHighlight(search, 0, "alias", 0, equalTo("foo bar")); + } + } + + public void testFieldAliasWithWildcardField() throws IOException { + XContentBuilder mappings = jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("keyword") + .field("type", "keyword") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "keyword") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mappings)); + + client().prepareIndex("test", "type", "1").setSource("keyword", "foo").get(); + refresh(); + + HighlightBuilder builder = new HighlightBuilder() + .field(new Field("al*")) + .requireFieldMatch(false); + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("alias", "foo")) + .highlighter(builder) + .get(); + assertHighlight(search, 0, "alias", 0, equalTo("foo")); + } + + public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOException { XContentBuilder mappings = jsonBuilder(); mappings.startObject(); diff --git a/server/src/test/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/test/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java new file mode 100644 index 00000000000..8440357758e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -0,0 +1,151 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fieldcaps; + +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; + +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + +public class FieldCapabilitiesIT extends ESIntegTestCase { + + @Before + public void setUp() throws Exception { + super.setUp(); + + XContentBuilder oldIndexMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("distance") + .field("type", "double") + .endObject() + .startObject("route_length_miles") + .field("type", "alias") + .field("path", "distance") + .endObject() + .startObject("playlist") + .field("type", "text") + .endObject() + .startObject("secret_soundtrack") + .field("type", "alias") + .field("path", "playlist") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("old_index").addMapping("_doc", oldIndexMapping)); + + XContentBuilder newIndexMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("distance") + .field("type", "text") + .endObject() + .startObject("route_length_miles") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("new_index").addMapping("_doc", newIndexMapping)); + } + + public static class FieldFilterPlugin extends Plugin implements MapperPlugin { + @Override + public Function> getFieldFilter() { + return index -> field -> !field.equals("playlist"); + } + } + + @Override + protected Collection> nodePlugins() { + return Collections.singleton(FieldFilterPlugin.class); + } + + public void testFieldAlias() { + FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields("distance", "route_length_miles") + .execute().actionGet(); + + // Ensure the response has entries for both requested fields. + assertTrue(response.get().containsKey("distance")); + assertTrue(response.get().containsKey("route_length_miles")); + + // Check the capabilities for the 'distance' field. + Map distance = response.getField("distance"); + assertEquals(2, distance.size()); + + assertTrue(distance.containsKey("double")); + assertEquals( + new FieldCapabilities("distance", "double", true, true, new String[] {"old_index"}, null, null), + distance.get("double")); + + assertTrue(distance.containsKey("text")); + assertEquals( + new FieldCapabilities("distance", "text", true, false, new String[] {"new_index"}, null, null), + distance.get("text")); + + // Check the capabilities for the 'route_length_miles' alias. + Map routeLength = response.getField("route_length_miles"); + assertEquals(1, routeLength.size()); + + assertTrue(routeLength.containsKey("double")); + assertEquals( + new FieldCapabilities("route_length_miles", "double", true, true), + routeLength.get("double")); + } + + public void testFieldAliasWithWildcard() { + FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields("route*") + .execute().actionGet(); + + assertEquals(1, response.get().size()); + assertTrue(response.get().containsKey("route_length_miles")); + } + + public void testFieldAliasFiltering() { + FieldCapabilitiesResponse response = client().prepareFieldCaps().setFields( + "secret-soundtrack", "route_length_miles") + .execute().actionGet(); + assertEquals(1, response.get().size()); + assertTrue(response.get().containsKey("route_length_miles")); + } + + public void testFieldAliasFilteringWithWildcard() { + FieldCapabilitiesResponse response = client().prepareFieldCaps() + .setFields("distance", "secret*") + .execute().actionGet(); + assertEquals(1, response.get().size()); + assertTrue(response.get().containsKey("distance")); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 2e9426bbe68..2126e0e94eb 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -46,7 +47,12 @@ import org.elasticsearch.search.lookup.FieldLookup; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.ReadableDateTime; import org.joda.time.base.BaseDateTime; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; import java.time.ZoneOffset; import java.time.ZonedDateTime; @@ -913,6 +919,163 @@ public class SearchFieldsIT extends ESIntegTestCase { } } + public void testDocValueFieldsWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("text_field") + .field("type", "text") + .field("fielddata", true) + .endObject() + .startObject("date_field") + .field("type", "date") + .field("format", "yyyy-MM-dd") + .endObject() + .startObject("text_field_alias") + .field("type", "alias") + .field("path", "text_field") + .endObject() + .startObject("date_field_alias") + .field("type", "alias") + .field("path", "date_field") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + ensureGreen("test"); + + DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC); + DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd"); + + index("test", "type", "1", "text_field", "foo", "date_field", formatter.print(date)); + refresh("test"); + + SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery()) + .addDocValueField("text_field_alias") + .addDocValueField("date_field_alias", "use_field_mapping") + .addDocValueField("date_field"); + SearchResponse searchResponse = builder.execute().actionGet(); + + assertNoFailures(searchResponse); + assertHitCount(searchResponse, 1); + SearchHit hit = searchResponse.getHits().getAt(0); + + Map fields = hit.getFields(); + assertThat(fields.keySet(), equalTo(newHashSet("text_field_alias", "date_field_alias", "date_field"))); + + DocumentField textFieldAlias = fields.get("text_field_alias"); + assertThat(textFieldAlias.getName(), equalTo("text_field_alias")); + assertThat(textFieldAlias.getValue(), equalTo("foo")); + + DocumentField dateFieldAlias = fields.get("date_field_alias"); + assertThat(dateFieldAlias.getName(), equalTo("date_field_alias")); + assertThat(dateFieldAlias.getValue(), + equalTo("1990-12-29")); + + DocumentField dateField = fields.get("date_field"); + assertThat(dateField.getName(), equalTo("date_field")); + + ReadableDateTime fetchedDate = dateField.getValue(); + assertThat(fetchedDate, equalTo(date)); + } + + + public void testStoredFieldsWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field1") + .field("type", "text") + .field("store", true) + .endObject() + .startObject("field2") + .field("type", "text") + .field("store", false) + .endObject() + .startObject("field1-alias") + .field("type", "alias") + .field("path", "field1") + .endObject() + .startObject("field2-alias") + .field("type", "alias") + .field("path", "field2") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + + index("test", "type", "1", "field1", "value1", "field2", "value2"); + refresh("test"); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addStoredField("field1-alias") + .addStoredField("field2-alias") + .get(); + assertHitCount(searchResponse, 1L); + + SearchHit hit = searchResponse.getHits().getAt(0); + assertEquals(1, hit.getFields().size()); + assertTrue(hit.getFields().containsKey("field1-alias")); + + DocumentField field = hit.getFields().get("field1-alias"); + assertThat(field.getValue().toString(), equalTo("value1")); + } + + public void testWildcardStoredFieldsWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field1") + .field("type", "text") + .field("store", true) + .endObject() + .startObject("field2") + .field("type", "text") + .field("store", false) + .endObject() + .startObject("field1-alias") + .field("type", "alias") + .field("path", "field1") + .endObject() + .startObject("field2-alias") + .field("type", "alias") + .field("path", "field2") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + + index("test", "type", "1", "field1", "value1", "field2", "value2"); + refresh("test"); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addStoredField("field*") + .get(); + assertHitCount(searchResponse, 1L); + + SearchHit hit = searchResponse.getHits().getAt(0); + assertEquals(2, hit.getFields().size()); + assertTrue(hit.getFields().containsKey("field1")); + assertTrue(hit.getFields().containsKey("field1-alias")); + + DocumentField field = hit.getFields().get("field1"); + assertThat(field.getValue().toString(), equalTo("value1")); + + DocumentField fieldAlias = hit.getFields().get("field1-alias"); + assertThat(fieldAlias.getValue().toString(), equalTo("value1")); + } + public void testLoadMetadata() throws Exception { assertAcked(prepareCreate("test")); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java index 7906165b090..2ff7d0c1383 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java @@ -24,8 +24,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; @@ -58,10 +56,10 @@ public class GeoPolygonIT extends ESIntegTestCase { Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location").field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); + + assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", "location", + "type=geo_point", "alias", + "type=alias,path=location")); ensureGreen(); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() @@ -132,4 +130,17 @@ public class GeoPolygonIT extends ESIntegTestCase { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); } } + + public void testFieldAlias() { + List points = new ArrayList<>(); + points.add(new GeoPoint(40.7, -74.0)); + points.add(new GeoPoint(40.7, -74.1)); + points.add(new GeoPoint(40.8, -74.1)); + points.add(new GeoPoint(40.8, -74.0)); + points.add(new GeoPoint(40.7, -74.0)); + SearchResponse searchResponse = client().prepareSearch("test") // from NY + .setQuery(boolQuery().must(geoPolygonQuery("alias", points))) + .execute().actionGet(); + assertHitCount(searchResponse, 4); + } } diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index d3a31f12c57..6f204796e41 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -19,7 +19,10 @@ package org.elasticsearch.search.geo; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; @@ -27,20 +30,16 @@ import org.elasticsearch.common.geo.builders.LineStringBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; -import org.locationtech.spatial4j.shape.Rectangle; -import org.locationtech.jts.geom.Coordinate; - -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.spatial4j.shape.Rectangle; import java.io.IOException; import java.util.Locale; @@ -503,4 +502,33 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase { assertEquals(2, response.getHits().getTotalHits()); } + + public void testFieldAlias() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("location") + .field("type", "geo_shape") + .field("tree", randomBoolean() ? "quadtree" : "geohash") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "location") + .endObject() + .endObject() + .endObject() + .endObject(); + + createIndex("test", Settings.EMPTY, "type", mapping); + + ShapeBuilder shape = RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.MULTIPOINT); + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("location", shape).endObject()) + .setRefreshPolicy(IMMEDIATE).get(); + + SearchResponse response = client().prepareSearch("test") + .setQuery(geoShapeQuery("alias", shape)) + .execute().actionGet(); + assertEquals(1, response.getHits().getTotalHits()); + } } diff --git a/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java new file mode 100644 index 00000000000..dfdbef1c3d5 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/lookup/LeafDocLookupTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.lookup; + +import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import static org.mockito.AdditionalAnswers.returnsFirstArg; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class LeafDocLookupTests extends ESTestCase { + private ScriptDocValues docValues; + private LeafDocLookup docLookup; + + @Before + public void setUp() throws Exception { + super.setUp(); + + MappedFieldType fieldType = mock(MappedFieldType.class); + when(fieldType.name()).thenReturn("field"); + when(fieldType.valueForDisplay(anyObject())).then(returnsFirstArg()); + + MapperService mapperService = mock(MapperService.class); + when(mapperService.fullName("field")).thenReturn(fieldType); + when(mapperService.fullName("alias")).thenReturn(fieldType); + + docValues = mock(ScriptDocValues.class); + + AtomicFieldData atomicFieldData = mock(AtomicFieldData.class); + doReturn(docValues).when(atomicFieldData).getScriptValues(); + + IndexFieldData fieldData = mock(IndexFieldData.class); + when(fieldData.getFieldName()).thenReturn("field"); + doReturn(atomicFieldData).when(fieldData).load(anyObject()); + + docLookup = new LeafDocLookup(mapperService, + ignored -> fieldData, + new String[] { "type" }, + null); + } + + public void testBasicLookup() { + ScriptDocValues fetchedDocValues = docLookup.get("field"); + assertEquals(docValues, fetchedDocValues); + } + + public void testLookupWithFieldAlias() { + ScriptDocValues fetchedDocValues = docLookup.get("alias"); + assertEquals(docValues, fetchedDocValues); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java new file mode 100644 index 00000000000..1195893a28a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.lookup; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.StoredFieldVisitor; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.Collections; +import java.util.List; + +import static org.mockito.AdditionalAnswers.returnsFirstArg; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyInt; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class LeafFieldsLookupTests extends ESTestCase { + private LeafFieldsLookup fieldsLookup; + + @Before + public void setUp() throws Exception { + super.setUp(); + + MappedFieldType fieldType = mock(MappedFieldType.class); + when(fieldType.name()).thenReturn("field"); + when(fieldType.valueForDisplay(anyObject())).then(returnsFirstArg()); + + MapperService mapperService = mock(MapperService.class); + when(mapperService.fullName("field")).thenReturn(fieldType); + when(mapperService.fullName("alias")).thenReturn(fieldType); + + FieldInfo mockFieldInfo = new FieldInfo("field", 1, false, false, true, + IndexOptions.NONE, DocValuesType.NONE, -1, Collections.emptyMap(), 0, 0, false); + + LeafReader leafReader = mock(LeafReader.class); + doAnswer(invocation -> { + Object[] args = invocation.getArguments(); + StoredFieldVisitor visitor = (StoredFieldVisitor) args[1]; + visitor.doubleField(mockFieldInfo, 2.718); + return null; + }).when(leafReader).document(anyInt(), any(StoredFieldVisitor.class)); + + fieldsLookup = new LeafFieldsLookup(mapperService, + new String[] { "type" }, + leafReader); + } + + public void testBasicLookup() { + FieldLookup fieldLookup = (FieldLookup) fieldsLookup.get("field"); + assertEquals("field", fieldLookup.fieldType().name()); + + List values = fieldLookup.getValues(); + assertNotNull(values); + assertEquals(1, values.size()); + assertEquals(2.718, values.get(0)); + } + + public void testLookupWithFieldAlias() { + FieldLookup fieldLookup = (FieldLookup) fieldsLookup.get("alias"); + assertEquals("field", fieldLookup.fieldType().name()); + + List values = fieldLookup.getValues(); + assertNotNull(values); + assertEquals(1, values.size()); + assertEquals(2.718, values.get(0)); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index 81f48aac074..dedd0f03664 100644 --- a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -337,6 +338,36 @@ public class MoreLikeThisIT extends ESIntegTestCase { assertHitCount(searchResponse, 0L); } + public void testMoreLikeThisWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("text") + .field("type", "text") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + + assertAcked(prepareCreate("test").addMapping("_doc", mapping)); + ensureGreen(); + + index("test", "_doc", "1", "text", "lucene"); + index("test", "_doc", "2", "text", "lucene release"); + refresh(); + + Item item = new Item("test", "_doc", "1"); + QueryBuilder query = QueryBuilders.moreLikeThisQuery(new String[] {"alias"}, null, new Item[] {item}) + .minTermFreq(1) + .minDocFreq(1); + SearchResponse response = client().prepareSearch().setQuery(query).get(); + assertHitCount(response, 1L); + } + public void testSimpleMoreLikeInclude() throws Exception { logger.info("Creating index test"); assertAcked(prepareCreate("test").addMapping("type1", diff --git a/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java index 6e4a1b7d618..45910044d49 100644 --- a/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; @@ -141,4 +142,89 @@ public class ExistsIT extends ESIntegTestCase { } } } + + public void testFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("bar") + .field("type", "long") + .endObject() + .startObject("foo") + .field("type", "object") + .startObject("properties") + .startObject("bar") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .startObject("foo-bar") + .field("type", "alias") + .field("path", "foo.bar") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("idx").addMapping("type", mapping)); + ensureGreen("idx"); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx", "type").setSource("bar", 3)); + indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", singletonMap("bar", 2.718))); + indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", singletonMap("bar", 6.283))); + indexRandom(true, false, indexRequests); + + Map expected = new LinkedHashMap<>(); + expected.put("foo.bar", 2); + expected.put("foo-bar", 2); + expected.put("foo*", 2); + expected.put("*bar", 3); + + for (Map.Entry entry : expected.entrySet()) { + String fieldName = entry.getKey(); + int expectedCount = entry.getValue(); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(QueryBuilders.existsQuery(fieldName)) + .get(); + assertSearchResponse(response); + assertHitCount(response, expectedCount); + } + } + + public void testFieldAliasWithNoDocValues() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("foo") + .field("type", "long") + .field("doc_values", false) + .endObject() + .startObject("foo-alias") + .field("type", "alias") + .field("path", "foo") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("idx").addMapping("type", mapping)); + ensureGreen("idx"); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx", "type").setSource(emptyMap())); + indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", 3)); + indexRequests.add(client().prepareIndex("idx", "type").setSource("foo", 43)); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(QueryBuilders.existsQuery("foo-alias")) + .get(); + assertSearchResponse(response); + assertHitCount(response, 2); + } } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java index 7145f9db2db..5caab8c9dfe 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryStringIT.java @@ -51,6 +51,7 @@ import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -376,6 +377,70 @@ public class QueryStringIT extends ESIntegTestCase { containsString("field expansion matches too many fields, limit: 1024, got: 1025")); } + public void testFieldAlias() throws Exception { + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("value").field("f3_alias")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + } + + public void testFieldAliasWithEmbeddedFieldNames() throws Exception { + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("f3_alias:value AND f2:three")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 1); + assertHits(response.getHits(), "3"); + } + + public void testFieldAliasWithWildcardField() throws Exception { + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("value").field("f3_*")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + } + + public void testFieldAliasOnDisallowedFieldType() throws Exception { + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRandom(true, false, indexRequests); + + // The wildcard field matches aliases for both a text and boolean field. + // By default, the boolean field should be ignored when building the query. + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("text").field("f*_alias")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 1); + assertHits(response.getHits(), "1"); + } + private void assertHits(SearchHits hits, String... ids) { assertThat(hits.getTotalHits(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 1694f86c53e..860c3e074f3 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.query; +import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.English; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -26,13 +27,16 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.Operator; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; @@ -1858,4 +1862,78 @@ public class SearchQueryIT extends ESIntegTestCase { assertHitCount(searchResponse, 1); } + public void testNestedQueryWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("_doc") + .startObject("properties") + .startObject("section") + .field("type", "nested") + .startObject("properties") + .startObject("distance") + .field("type", "long") + .endObject() + .startObject("route_length_miles") + .field("type", "alias") + .field("path", "section.distance") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("index").addMapping("_doc", mapping)); + + XContentBuilder source = XContentFactory.jsonBuilder().startObject() + .startObject("section") + .field("distance", 42) + .endObject() + .endObject(); + + index("index", "_doc", "1", source); + refresh(); + + QueryBuilder nestedQuery = QueryBuilders.nestedQuery("section", + QueryBuilders.termQuery("section.route_length_miles", 42), + ScoreMode.Max); + SearchResponse searchResponse = client().prepareSearch("index").setQuery(nestedQuery).get(); + assertHitCount(searchResponse, 1); + } + + public void testFieldAliasesForMetaFields() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("id-alias") + .field("type", "alias") + .field("path", "_id") + .endObject() + .startObject("routing-alias") + .field("type", "alias") + .field("path", "_routing") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + + IndexRequestBuilder indexRequest = client().prepareIndex("test", "type") + .setId("1") + .setRouting("custom") + .setSource("field", "value"); + indexRandom(true, false, indexRequest); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(termQuery("routing-alias", "custom")) + .addDocValueField("id-alias") + .get(); + assertHitCount(searchResponse, 1L); + + SearchHit hit = searchResponse.getHits().getAt(0); + assertEquals(2, hit.getFields().size()); + assertTrue(hit.getFields().containsKey("id-alias")); + + DocumentField field = hit.getFields().get("id-alias"); + assertThat(field.getValue().toString(), equalTo("1")); + } } diff --git a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index d7db62b6143..5176c327ac7 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -58,6 +58,7 @@ import java.util.concurrent.ExecutionException; import static java.util.Collections.singletonList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; @@ -585,6 +586,67 @@ public class SimpleQueryStringIT extends ESIntegTestCase { containsString("field expansion matches too many fields, limit: 1024, got: 1025")); } + public void testFieldAlias() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + assertAcked(prepareCreate("test").setSource(indexBody, XContentType.JSON)); + ensureGreen("test"); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("test") + .setQuery(simpleQueryStringQuery("value").field("f3_alias")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + } + + public void testFieldAliasWithWildcardField() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + assertAcked(prepareCreate("test").setSource(indexBody, XContentType.JSON)); + ensureGreen("test"); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRequests.add(client().prepareIndex("test", "_doc", "2").setSource("f3", "value", "f2", "two")); + indexRequests.add(client().prepareIndex("test", "_doc", "3").setSource("f3", "another value", "f2", "three")); + indexRandom(true, false, indexRequests); + + SearchResponse response = client().prepareSearch("test") + .setQuery(simpleQueryStringQuery("value").field("f3_*")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 2); + assertHits(response.getHits(), "2", "3"); + } + + + public void testFieldAliasOnDisallowedFieldType() throws Exception { + String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json"); + assertAcked(prepareCreate("test").setSource(indexBody, XContentType.JSON)); + ensureGreen("test"); + + List indexRequests = new ArrayList<>(); + indexRequests.add(client().prepareIndex("test", "_doc", "1").setSource("f3", "text", "f2", "one")); + indexRandom(true, false, indexRequests); + + // The wildcard field matches aliases for both a text and boolean field. + // By default, the boolean field should be ignored when building the query. + SearchResponse response = client().prepareSearch("test") + .setQuery(queryStringQuery("text").field("f*_alias")) + .execute().actionGet(); + + assertNoFailures(response); + assertHitCount(response, 1); + assertHits(response.getHits(), "1"); + } + private void assertHits(SearchHits hits, String... ids) { assertThat(hits.getTotalHits(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index 3af370326f5..ff0196aacdf 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -40,6 +40,7 @@ import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.Matchers; @@ -1572,4 +1573,60 @@ public class FieldSortIT extends ESIntegTestCase { } } } + + public void testFieldAlias() throws Exception { + // Create two indices and add the field 'route_length_miles' as an alias in + // one, and a concrete field in the other. + assertAcked(prepareCreate("old_index") + .addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance")); + assertAcked(prepareCreate("new_index") + .addMapping("_doc", "route_length_miles", "type=double")); + ensureGreen("old_index", "new_index"); + + List builders = new ArrayList<>(); + builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 50.5)); + builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); + indexRandom(true, true, builders); + + SearchResponse response = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(builders.size()) + .addSort(SortBuilders.fieldSort("route_length_miles")) + .execute().actionGet(); + SearchHits hits = response.getHits(); + + assertEquals(3, hits.getHits().length); + assertEquals(42.0, hits.getAt(0).getSortValues()[0]); + assertEquals(50.5, hits.getAt(1).getSortValues()[0]); + assertEquals(100.2, hits.getAt(2).getSortValues()[0]); + } + + public void testFieldAliasesWithMissingValues() throws Exception { + // Create two indices and add the field 'route_length_miles' as an alias in + // one, and a concrete field in the other. + assertAcked(prepareCreate("old_index") + .addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance")); + assertAcked(prepareCreate("new_index") + .addMapping("_doc", "route_length_miles", "type=double")); + ensureGreen("old_index", "new_index"); + + List builders = new ArrayList<>(); + builders.add(client().prepareIndex("old_index", "_doc").setSource("distance", 42.0)); + builders.add(client().prepareIndex("old_index", "_doc").setSource(Collections.emptyMap())); + builders.add(client().prepareIndex("new_index", "_doc").setSource("route_length_miles", 100.2)); + indexRandom(true, true, builders); + + SearchResponse response = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(builders.size()) + .addSort(SortBuilders.fieldSort("route_length_miles").missing(120.3)) + .execute().actionGet(); + SearchHits hits = response.getHits(); + + assertEquals(3, hits.getHits().length); + assertEquals(42.0, hits.getAt(0).getSortValues()[0]); + assertEquals(100.2, hits.getAt(1).getSortValues()[0]); + assertEquals(120.3, hits.getAt(2).getSortValues()[0]); + } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 00a287f0252..9d1f01fe328 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -160,7 +160,7 @@ public abstract class AbstractSuggestionBuilderTestCase builders = new ArrayList<>(); + builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "apple")); + builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "mango")); + builders.add(client().prepareIndex(INDEX, TYPE).setSource(FIELD, "papaya")); + indexRandom(true, false, builders); + + CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("alias").text("app"); + assertSuggestions("suggestion", suggestionBuilder, "apple"); + } public static boolean isReservedChar(char c) { switch (c) { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index 677cc4163cc..aaeaadd4c9f 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -979,6 +979,35 @@ public class SuggestSearchIT extends ESIntegTestCase { // assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging } + public void testSuggestWithFieldAlias() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("text") + .field("type", "keyword") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(prepareCreate("test").addMapping("type", mapping)); + + List builders = new ArrayList<>(); + builders.add(client().prepareIndex("test", "type").setSource("text", "apple")); + builders.add(client().prepareIndex("test", "type").setSource("text", "mango")); + builders.add(client().prepareIndex("test", "type").setSource("text", "papaya")); + indexRandom(true, false, builders); + + TermSuggestionBuilder termSuggest = termSuggestion("alias").text("appple"); + + Suggest searchSuggest = searchSuggest("suggestion", termSuggest); + assertSuggestion(searchSuggest, 0, "suggestion", "apple"); + } + @Override protected Collection> nodePlugins() { return Collections.singleton(DummyTemplatePlugin.class); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 3a7451e78fb..f9b252f0e13 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; @@ -38,10 +39,10 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.CompletionFieldMapper.CompletionFieldType; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; @@ -74,8 +75,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -95,7 +95,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 7); } @@ -113,8 +113,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -129,7 +128,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -147,8 +146,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -163,7 +161,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -181,8 +179,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -197,7 +194,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -247,8 +244,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -261,7 +257,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -279,8 +275,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() @@ -293,7 +288,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject() .endObject()), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -345,8 +340,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); XContentBuilder builder = jsonBuilder() .startObject() .startArray("completion") @@ -362,7 +356,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject(); ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)); - IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); + IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); } @@ -698,7 +692,7 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { } public void testUnknownQueryContextParsing() throws Exception { - String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -712,11 +706,10 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject().endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - CompletionFieldType completionFieldType = (CompletionFieldType) fieldMapper.fieldType(); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + CompletionFieldType completionFieldType = (CompletionFieldType) mapperService.fullName("completion"); Exception e = expectThrows(IllegalArgumentException.class, () -> completionFieldType.getContextMappings().get("brand")); assertEquals("Unknown context name [brand], must be one of [ctx, type]", e.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 37fdb7e0aa0..88e6ce64666 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -164,8 +164,9 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe } @Override - protected MappedFieldType mockFieldType() { + protected MappedFieldType mockFieldType(String fieldName) { CompletionFieldType completionFieldType = new CompletionFieldType(); + completionFieldType.setName(fieldName); completionFieldType.setContextMappings(new ContextMappings(contextMappings)); return completionFieldType; } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index 2d179f3dbe6..56ff157ec71 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -20,16 +20,14 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.search.suggest.completion.context.ContextBuilder; @@ -50,7 +48,7 @@ import static org.hamcrest.Matchers.isIn; public class GeoContextMappingTests extends ESSingleNodeTestCase { public void testIndexingWithNoContexts() throws Exception { - String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -60,13 +58,12 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject().endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference - .bytes(jsonBuilder() + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MappedFieldType completionFieldType = mapperService.fullName("completion"); + ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + BytesReference.bytes(jsonBuilder() .startObject() .startArray("completion") .startObject() @@ -89,7 +86,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { } public void testIndexingWithSimpleContexts() throws Exception { - String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -100,13 +97,12 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endArray() .endObject() .endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference - .bytes(jsonBuilder() + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MappedFieldType completionFieldType = mapperService.fullName("completion"); + ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + BytesReference.bytes(jsonBuilder() .startObject() .startArray("completion") .startObject() @@ -127,7 +123,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { } public void testIndexingWithContextList() throws Exception { - String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -137,13 +133,12 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject().endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference - .bytes(jsonBuilder() + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MappedFieldType completionFieldType = mapperService.fullName("completion"); + ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + BytesReference.bytes(jsonBuilder() .startObject() .startObject("completion") .array("input", "suggestion5", "suggestion6", "suggestion7") @@ -168,7 +163,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { } public void testIndexingWithMultipleContexts() throws Exception { - String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1") + XContentBuilder mapping = jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("completion") .field("type", "completion") .startArray("contexts") @@ -182,11 +177,10 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject().endObject() - .endObject().endObject()); + .endObject().endObject(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - MappedFieldType completionFieldType = fieldMapper.fieldType(); + MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MappedFieldType completionFieldType = mapperService.fullName("completion"); XContentBuilder builder = jsonBuilder() .startObject() .startArray("completion") @@ -200,8 +194,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), - XContentType.JSON)); + ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } diff --git a/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json b/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json index 72c9b54f6e3..abdc1192822 100644 --- a/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json +++ b/server/src/test/resources/org/elasticsearch/search/query/all-query-index.json @@ -11,6 +11,10 @@ "f1": {"type": "text"}, "f2": {"type": "keyword"}, "f3": {"type": "text"}, + "f3_alias": { + "type": "alias", + "path": "f3" + }, "f4": { "type": "text", "index_options": "docs" @@ -42,6 +46,10 @@ "format": "yyyy/MM/dd||epoch_millis" }, "f_bool": {"type": "boolean"}, + "f_bool_alias": { + "type": "alias", + "path": "f_bool" + }, "f_byte": {"type": "byte"}, "f_short": {"type": "short"}, "f_int": {"type": "integer"}, diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index e84f2a99a11..3002711bdbd 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -62,6 +62,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; @@ -69,7 +70,6 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceSubPhase; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.junit.After; @@ -79,7 +79,12 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS; import static org.mockito.Matchers.anyObject; @@ -144,16 +149,50 @@ public abstract class AggregatorTestCase extends ESTestCase { SearchLookup searchLookup = new SearchLookup(mapperService, ifds::getForField, new String[]{TYPE_NAME}); when(searchContext.lookup()).thenReturn(searchLookup); - QueryShardContext queryShardContext = queryShardContextMock(mapperService, fieldTypes, circuitBreakerService); + QueryShardContext queryShardContext = queryShardContextMock(mapperService); when(queryShardContext.getIndexSettings()).thenReturn(indexSettings); when(searchContext.getQueryShardContext()).thenReturn(queryShardContext); - for (MappedFieldType fieldType : fieldTypes) { - when(searchContext.smartNameFieldType(fieldType.name())).thenReturn(fieldType); - } + + Map fieldNameToType = new HashMap<>(); + fieldNameToType.putAll(Arrays.stream(fieldTypes) + .collect(Collectors.toMap(MappedFieldType::name, Function.identity()))); + fieldNameToType.putAll(getFieldAliases(fieldTypes)); + + registerFieldTypes(queryShardContext, searchContext, mapperService, + circuitBreakerService, fieldNameToType); return aggregationBuilder.build(searchContext, null); } + /** + * Allows subclasses to provide alternate names for the provided field type, which + * can be useful when testing aggregations on field aliases. + */ + protected Map getFieldAliases(MappedFieldType... fieldTypes) { + return Collections.emptyMap(); + } + + private void registerFieldTypes(QueryShardContext queryShardContext, + SearchContext searchContext, + MapperService mapperService, + CircuitBreakerService circuitBreakerService, + Map fieldNameToType) { + for (Map.Entry entry : fieldNameToType.entrySet()) { + String fieldName = entry.getKey(); + MappedFieldType fieldType = entry.getValue(); + + when(queryShardContext.fieldMapper(fieldName)).thenReturn(fieldType); + when(searchContext.smartNameFieldType(fieldName)).thenReturn(fieldType); + } + + for (MappedFieldType fieldType : new HashSet<>(fieldNameToType.values())) { + when(queryShardContext.getForField(fieldType)).then(invocation -> + fieldType.fielddataBuilder(mapperService.getIndexSettings().getIndex().getName()) + .build(mapperService.getIndexSettings(), fieldType, + new IndexFieldDataCache.None(), circuitBreakerService, mapperService)); + } + } + protected A createAggregator(AggregationBuilder aggregationBuilder, IndexSearcher indexSearcher, MappedFieldType... fieldTypes) throws IOException { @@ -257,16 +296,9 @@ public abstract class AggregatorTestCase extends ESTestCase { /** * sub-tests that need a more complex mock can overwrite this */ - protected QueryShardContext queryShardContextMock(MapperService mapperService, MappedFieldType[] fieldTypes, - CircuitBreakerService circuitBreakerService) { + protected QueryShardContext queryShardContextMock(MapperService mapperService) { QueryShardContext queryShardContext = mock(QueryShardContext.class); when(queryShardContext.getMapperService()).thenReturn(mapperService); - for (MappedFieldType fieldType : fieldTypes) { - when(queryShardContext.fieldMapper(fieldType.name())).thenReturn(fieldType); - when(queryShardContext.getForField(fieldType)).then(invocation -> fieldType.fielddataBuilder(mapperService.getIndexSettings() - .getIndex().getName()) - .build(mapperService.getIndexSettings(), fieldType, new IndexFieldDataCache.None(), circuitBreakerService, mapperService)); - } NestedScope nestedScope = new NestedScope(); when(queryShardContext.isFilter()).thenCallRealMethod(); Mockito.doCallRealMethod().when(queryShardContext).setIsFilter(Matchers.anyBoolean()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index e50e93ec672..a891f30b93d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -83,7 +83,9 @@ import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.stream.Stream; @@ -94,21 +96,35 @@ import static java.util.stream.Collectors.toList; public abstract class AbstractBuilderTestCase extends ESTestCase { public static final String STRING_FIELD_NAME = "mapped_string"; + public static final String STRING_ALIAS_FIELD_NAME = "mapped_string_alias"; protected static final String STRING_FIELD_NAME_2 = "mapped_string_2"; protected static final String INT_FIELD_NAME = "mapped_int"; + protected static final String INT_ALIAS_FIELD_NAME = "mapped_int_field_alias"; protected static final String INT_RANGE_FIELD_NAME = "mapped_int_range"; protected static final String DOUBLE_FIELD_NAME = "mapped_double"; protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean"; protected static final String DATE_FIELD_NAME = "mapped_date"; + protected static final String DATE_ALIAS_FIELD_NAME = "mapped_date_alias"; protected static final String DATE_RANGE_FIELD_NAME = "mapped_date_range"; protected static final String OBJECT_FIELD_NAME = "mapped_object"; protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; + protected static final String GEO_POINT_ALIAS_FIELD_NAME = "mapped_geo_point_alias"; protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; - protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME, - DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, - GEO_SHAPE_FIELD_NAME}; - protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME, - DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, GEO_POINT_FIELD_NAME, }; + protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, + INT_FIELD_NAME, INT_RANGE_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, + DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, + GEO_SHAPE_FIELD_NAME}; + protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, + INT_FIELD_NAME, INT_RANGE_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, + DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME}; + + private static final Map ALIAS_TO_CONCRETE_FIELD_NAME = new HashMap<>(); + static { + ALIAS_TO_CONCRETE_FIELD_NAME.put(STRING_ALIAS_FIELD_NAME, STRING_FIELD_NAME); + ALIAS_TO_CONCRETE_FIELD_NAME.put(INT_ALIAS_FIELD_NAME, INT_FIELD_NAME); + ALIAS_TO_CONCRETE_FIELD_NAME.put(DATE_ALIAS_FIELD_NAME, DATE_FIELD_NAME); + ALIAS_TO_CONCRETE_FIELD_NAME.put(GEO_POINT_ALIAS_FIELD_NAME, GEO_POINT_FIELD_NAME); + } protected static Version indexVersionCreated; @@ -200,6 +216,13 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { .build(); } + protected static String expectedFieldName(String builderFieldName) { + if (currentTypes.length == 0) { + return builderFieldName; + } + return ALIAS_TO_CONCRETE_FIELD_NAME.getOrDefault(builderFieldName, builderFieldName); + } + @AfterClass public static void afterClass() throws Exception { IOUtils.close(serviceHolder); @@ -356,19 +379,24 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { } }); + for (String type : currentTypes) { mapperService.merge(type, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(type, - STRING_FIELD_NAME, "type=text", - STRING_FIELD_NAME_2, "type=keyword", - INT_FIELD_NAME, "type=integer", - INT_RANGE_FIELD_NAME, "type=integer_range", - DOUBLE_FIELD_NAME, "type=double", - BOOLEAN_FIELD_NAME, "type=boolean", - DATE_FIELD_NAME, "type=date", - DATE_RANGE_FIELD_NAME, "type=date_range", - OBJECT_FIELD_NAME, "type=object", - GEO_POINT_FIELD_NAME, "type=geo_point", - GEO_SHAPE_FIELD_NAME, "type=geo_shape" + STRING_FIELD_NAME, "type=text", + STRING_FIELD_NAME_2, "type=keyword", + STRING_ALIAS_FIELD_NAME, "type=alias,path=" + STRING_FIELD_NAME, + INT_FIELD_NAME, "type=integer", + INT_ALIAS_FIELD_NAME, "type=alias,path=" + INT_FIELD_NAME, + INT_RANGE_FIELD_NAME, "type=integer_range", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + DATE_ALIAS_FIELD_NAME, "type=alias,path=" + DATE_FIELD_NAME, + DATE_RANGE_FIELD_NAME, "type=date_range", + OBJECT_FIELD_NAME, "type=object", + GEO_POINT_FIELD_NAME, "type=geo_point", + GEO_POINT_ALIAS_FIELD_NAME, "type=alias,path=" + GEO_POINT_FIELD_NAME, + GEO_SHAPE_FIELD_NAME, "type=geo_shape" ))), MapperService.MergeReason.MAPPING_UPDATE); // also add mappings for two inner field in the object field mapperService.merge(type, new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index a2acc5371a1..c1efd9d8e6a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -522,7 +522,7 @@ public abstract class AbstractQueryTestCase> */ protected abstract void doAssertLuceneQuery(QB queryBuilder, Query query, SearchContext context) throws IOException; - protected static void assertTermOrBoostQuery(Query query, String field, String value, float fieldBoost) { + protected void assertTermOrBoostQuery(Query query, String field, String value, float fieldBoost) { if (fieldBoost != AbstractQueryBuilder.DEFAULT_BOOST) { assertThat(query, instanceOf(BoostQuery.class)); BoostQuery boostQuery = (BoostQuery) query; @@ -532,10 +532,12 @@ public abstract class AbstractQueryTestCase> assertTermQuery(query, field, value); } - protected static void assertTermQuery(Query query, String field, String value) { + protected void assertTermQuery(Query query, String field, String value) { assertThat(query, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) query; - assertThat(termQuery.getTerm().field(), equalTo(field)); + + String expectedFieldName = expectedFieldName(field); + assertThat(termQuery.getTerm().field(), equalTo(expectedFieldName)); assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(value.toLowerCase(Locale.ROOT))); } @@ -625,6 +627,7 @@ public abstract class AbstractQueryTestCase> Object value; switch (fieldName) { case STRING_FIELD_NAME: + case STRING_ALIAS_FIELD_NAME: if (rarely()) { // unicode in 10% cases JsonStringEncoder encoder = JsonStringEncoder.getInstance(); @@ -783,4 +786,8 @@ public abstract class AbstractQueryTestCase> Rewriteable.rewriteAndFetch(builder, context, future); return future.actionGet(); } + + public boolean isTextField(String fieldName) { + return fieldName.equals(STRING_FIELD_NAME) || fieldName.equals(STRING_ALIAS_FIELD_NAME); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index d7d8c2ceb1b..dd41bc5a7fd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -30,8 +30,10 @@ import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -161,10 +163,12 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .build(); } - public void testQuery() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text", "field2", "type=text", "field3", "type=text") - ); + public void testQuery() { + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=text", + "field2", "type=text", + "field3", "type=text", + "alias", "type=alias,path=field1")); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2", "field3", "value3") .setRefreshPolicy(IMMEDIATE) .get(); @@ -299,6 +303,20 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .setQuery(matchQuery("field3", "value3")) .get(); assertHitCount(response, 0); + + // user1 has access to field1, so a query on its field alias should match with the document: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("alias", "value1")) + .get(); + assertHitCount(response, 1); + // user2 has no access to field1, so a query on its field alias should not match with the document: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("alias", "value1")) + .get(); + assertHitCount(response, 0); } public void testGetApi() throws Exception { @@ -793,10 +811,11 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { } public void testFields() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text,store=true", "field2", "type=text,store=true", - "field3", "type=text,store=true") - ); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=text,store=true", + "field2", "type=text,store=true", + "field3", "type=text,store=true", + "alias", "type=alias,path=field1")); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2", "field3", "value3") .setRefreshPolicy(IMMEDIATE) .get(); @@ -888,6 +907,22 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + + // user1 is granted access to field1 only, and so should be able to load it by alias: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("alias") + .get(); + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getFields().get("alias").getValue(), equalTo("value1")); + + // user2 is not granted access to field1, and so should not be able to load it by alias: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("alias") + .get(); + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)); } public void testSource() throws Exception { @@ -963,11 +998,11 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); } - public void testSort() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=long", "field2", "type=long") - ); - + public void testSort() { + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=long", + "field2", "type=long", + "alias", "type=alias,path=field1")); client().prepareIndex("test", "type1", "1").setSource("field1", 1d, "field2", 2d) .setRefreshPolicy(IMMEDIATE) .get(); @@ -1000,12 +1035,81 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .addSort("field2", SortOrder.ASC) .get(); assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(2L)); + + // user1 is granted to use field1, so it is included in the sort_values when using its alias: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addSort("alias", SortOrder.ASC) + .get(); + assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(1L)); + + // user2 is not granted to use field1, so the default missing sort value is included when using its alias: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addSort("alias", SortOrder.ASC) + .get(); + assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(Long.MAX_VALUE)); } - public void testAggs() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text,fielddata=true", "field2", "type=text,fielddata=true") - ); + public void testHighlighting() { + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=text", + "field2", "type=text", + "field3", "type=text", + "alias", "type=alias,path=field1")); + client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2", "field3", "value3") + .setRefreshPolicy(IMMEDIATE) + .get(); + + // user1 has access to field1, so the highlight should be visible: + SearchResponse response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field1", "value1")) + .highlighter(new HighlightBuilder().field("field1")) + .get(); + assertHitCount(response, 1); + SearchHit hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 1); + + // user2 has no access to field1, so the highlight should not be visible: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field2", "value2")) + .highlighter(new HighlightBuilder().field("field1")) + .get(); + assertHitCount(response, 1); + hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 0); + + // user1 has access to field1, so the highlight on its alias should be visible: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field1", "value1")) + .highlighter(new HighlightBuilder().field("alias")) + .get(); + assertHitCount(response, 1); + hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 1); + + // user2 has no access to field1, so the highlight on its alias should not be visible: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field2", "value2")) + .highlighter(new HighlightBuilder().field("alias")) + .get(); + assertHitCount(response, 1); + hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 0); + } + + public void testAggs() { + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=text,fielddata=true", + "field2", "type=text,fielddata=true", + "alias", "type=alias,path=field1")); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2") .setRefreshPolicy(IMMEDIATE) .get(); @@ -1038,6 +1142,21 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .addAggregation(AggregationBuilders.terms("_name").field("field2")) .get(); assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value2").getDocCount(), equalTo(1L)); + + // user1 is authorized to use field1, so buckets are include for a term agg on its alias: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.terms("_name").field("alias")) + .get(); + assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1").getDocCount(), equalTo(1L)); + + // user2 is not authorized to use field1, so no buckets are include for a term agg on its alias: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.terms("_name").field("alias")) + .get(); + assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1"), nullValue()); } public void testTVApi() throws Exception { @@ -1218,15 +1337,22 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { public void testParentChild() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .endObject() - .endObject() - .endObject() - .endObject(); + .startObject("properties") + .startObject("field1") + .field("type", "keyword") + .endObject() + .startObject("alias") + .field("type", "alias") + .field("path", "field1") + .endObject() + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .endObject() + .endObject() + .endObject() + .endObject(); assertAcked(prepareCreate("test") .addMapping("doc", mapping)); ensureGreen(); @@ -1264,6 +1390,23 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .setQuery(hasChildQuery("child", termQuery("field1", "yellow"), ScoreMode.None)) .get(); assertHitCount(searchResponse, 0L); + + // Perform the same checks, but using an alias for field1. + searchResponse = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)) + .get(); + assertHitCount(searchResponse, 1L); + assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); + + searchResponse = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)) + .get(); + assertHitCount(searchResponse, 0L); } public void testUpdateApiIsBlocked() throws Exception { @@ -1315,10 +1458,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field2").toString(), equalTo("value3")); } - public void testQuery_withRoleWithFieldWildcards() throws Exception { + public void testQuery_withRoleWithFieldWildcards() { assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text", "field2", "type=text") - ); + .addMapping("type1", "field1", "type=text", "field2", "type=text")); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2") .setRefreshPolicy(IMMEDIATE) .get(); @@ -1345,9 +1487,12 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { } public void testExistQuery() { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text", "field2", "type=text", "field3", "type=text") - ); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", + "field1", "type=text", + "field2", "type=text", + "field3", "type=text", + "alias", "type=alias,path=field1")); + client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2", "field3", "value3") .setRefreshPolicy(IMMEDIATE) .get(); @@ -1402,6 +1547,20 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .setQuery(existsQuery("field2")) .get(); assertHitCount(response, 0); + + // user1 has access to field1, so a query on its alias should match with the document: + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(existsQuery("alias")) + .get(); + assertHitCount(response, 1); + // user2 has no access to field1, so the query should not match with the document: + response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(existsQuery("alias")) + .get(); + assertHitCount(response, 0); } } From e20f59aa716f094296960afdc39f927a987481b5 Mon Sep 17 00:00:00 2001 From: Andy Bristol Date: Wed, 18 Jul 2018 10:26:26 -0700 Subject: [PATCH 089/260] [test] use randomized runner in packaging tests (#32109) Use the randomized runner from the test framework and add some basic logging to make the packaging tests behave more similarly to how we use junit in the rest of the project --- qa/vagrant/build.gradle | 18 +++++-- .../packaging/test/ArchiveTestCase.java | 16 ++---- .../packaging/test/PackagingTestCase.java | 51 +++++++++++++++++++ 3 files changed, 67 insertions(+), 18 deletions(-) create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 704136eb4cf..37190632b44 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -28,8 +28,8 @@ plugins { dependencies { compile "junit:junit:${versions.junit}" - compile "org.hamcrest:hamcrest-core:${versions.hamcrest}" - compile "org.hamcrest:hamcrest-library:${versions.hamcrest}" + compile "org.hamcrest:hamcrest-all:${versions.hamcrest}" + compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" @@ -81,7 +81,7 @@ tasks.dependencyLicenses.enabled = false tasks.dependenciesInfo.enabled = false tasks.thirdPartyAudit.excludes = [ - //commons-logging optional dependencies + // commons-logging optional dependencies 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', 'org.apache.log.Logger', @@ -89,7 +89,15 @@ tasks.thirdPartyAudit.excludes = [ 'org.apache.log4j.Level', 'org.apache.log4j.Logger', 'org.apache.log4j.Priority', - //commons-logging provided dependencies + // commons-logging provided dependencies 'javax.servlet.ServletContextEvent', - 'javax.servlet.ServletContextListener' + 'javax.servlet.ServletContextListener', + // from randomized testing + 'org.apache.tools.ant.BuildException', + 'org.apache.tools.ant.DirectoryScanner', + 'org.apache.tools.ant.Task', + 'org.apache.tools.ant.types.FileSet', + 'org.easymock.EasyMock', + 'org.easymock.IArgumentMatcher', + 'org.jmock.core.Constraint' ] diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java index df5e8cf995d..3aada7837d8 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java @@ -19,6 +19,7 @@ package org.elasticsearch.packaging.test; +import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; import org.apache.http.client.fluent.Request; import org.elasticsearch.packaging.util.Archives; import org.elasticsearch.packaging.util.Platforms; @@ -27,9 +28,6 @@ import org.elasticsearch.packaging.util.Shell; import org.elasticsearch.packaging.util.Shell.Result; import org.junit.Before; import org.junit.BeforeClass; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; import org.elasticsearch.packaging.util.Distribution; import org.elasticsearch.packaging.util.Installation; @@ -67,8 +65,8 @@ import static org.junit.Assume.assumeTrue; * Tests that apply to the archive distributions (tar, zip). To add a case for a distribution, subclass and * override {@link ArchiveTestCase#distribution()}. These tests should be the same across all archive distributions */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public abstract class ArchiveTestCase { +@TestCaseOrdering(TestCaseOrdering.AlphabeticOrder.class) +public abstract class ArchiveTestCase extends PackagingTestCase { private static Installation installation; @@ -86,13 +84,11 @@ public abstract class ArchiveTestCase { assumeTrue("only compatible distributions", distribution().packaging.compatible); } - @Test public void test10Install() { installation = installArchive(distribution()); verifyArchiveInstallation(installation, distribution()); } - @Test public void test20PluginsListWithNoPlugins() { assumeThat(installation, is(notNullValue())); @@ -103,7 +99,6 @@ public abstract class ArchiveTestCase { assertThat(r.stdout, isEmptyString()); } - @Test public void test30AbortWhenJavaMissing() { assumeThat(installation, is(notNullValue())); @@ -146,7 +141,6 @@ public abstract class ArchiveTestCase { }); } - @Test public void test40CreateKeystoreManually() { assumeThat(installation, is(notNullValue())); @@ -180,7 +174,6 @@ public abstract class ArchiveTestCase { }); } - @Test public void test50StartAndStop() throws IOException { assumeThat(installation, is(notNullValue())); @@ -198,7 +191,6 @@ public abstract class ArchiveTestCase { Archives.stopElasticsearch(installation); } - @Test public void test60AutoCreateKeystore() { assumeThat(installation, is(notNullValue())); @@ -218,7 +210,6 @@ public abstract class ArchiveTestCase { }); } - @Test public void test70CustomPathConfAndJvmOptions() throws IOException { assumeThat(installation, is(notNullValue())); @@ -268,7 +259,6 @@ public abstract class ArchiveTestCase { } } - @Test public void test80RelativePathConf() throws IOException { assumeThat(installation, is(notNullValue())); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java new file mode 100644 index 00000000000..77644b70f28 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import com.carrotsearch.randomizedtesting.JUnit3MethodProvider; +import com.carrotsearch.randomizedtesting.RandomizedRunner; +import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.Before; +import org.junit.Rule; +import org.junit.rules.TestName; +import org.junit.runner.RunWith; + +@RunWith(RandomizedRunner.class) +@TestMethodProviders({ + JUnit3MethodProvider.class +}) +/** + * Class that all packaging test cases should inherit from. This makes working with the packaging tests more similar to what we're + * familiar with from {@link org.elasticsearch.test.ESTestCase} without having to apply its behavior that's not relevant here + */ +public abstract class PackagingTestCase { + + protected final Log logger = LogFactory.getLog(getClass()); + + @Rule + public final TestName testNameRule = new TestName(); + + @Before + public void logTestNameBefore() { + logger.info("[" + testNameRule.getMethodName() + "]: before test"); + } +} From 90fcb38448158d3a5d595307ee9f5c3d64b42e7f Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 18 Jul 2018 11:49:42 -0600 Subject: [PATCH 090/260] Adjust SSLDriver behavior for JDK11 changes (#32145) This is related to #32122. A number of things changed related to adding TLS 1.3 support in JDK11. Some exception messages and other SSLEngine behavior changed. This commit fixes assertions on exception messages. Additionally it identifies two bugs related to how the SSLDriver behaves in regards to JDK11 changes. Finally, it mutes a tests until correct behavior can be identified. There is another open issue for that muted test (#32144). --- .../security/transport/nio/SSLDriver.java | 19 ++++++-- .../transport/nio/SSLDriverTests.java | 43 +++++++++++++++---- 2 files changed, 49 insertions(+), 13 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java index 382230684c7..fa7791689aa 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java @@ -349,7 +349,10 @@ public class SSLDriver implements AutoCloseable { if (hasFlushPending() == false) { handshakeStatus = wrap(EMPTY_BUFFER_ARRAY).getHandshakeStatus(); } - continueHandshaking = false; + // If we need NEED_TASK we should run the tasks immediately + if (handshakeStatus != SSLEngineResult.HandshakeStatus.NEED_TASK) { + continueHandshaking = false; + } break; case NEED_TASK: runTasks(); @@ -432,8 +435,16 @@ public class SSLDriver implements AutoCloseable { } private void maybeFinishHandshake() { - // We only acknowledge that we are done handshaking if there are no bytes that need to be written - if (hasFlushPending() == false) { + if (engine.isOutboundDone() || engine.isInboundDone()) { + // If the engine is partially closed, immediate transition to close mode. + if (currentMode.isHandshake()) { + currentMode = new CloseMode(true); + } else { + String message = "Expected to be in handshaking mode. Instead in non-handshaking mode: " + currentMode; + throw new AssertionError(message); + } + } else if (hasFlushPending() == false) { + // We only acknowledge that we are done handshaking if there are no bytes that need to be written if (currentMode.isHandshake()) { currentMode = new ApplicationMode(); } else { @@ -510,7 +521,7 @@ public class SSLDriver implements AutoCloseable { if (isHandshaking && engine.isInboundDone() == false) { // If we attempt to close during a handshake either we are sending an alert and inbound // should already be closed or we are sending a close_notify. If we send a close_notify - // the peer will send an handshake error alert. If we attempt to receive the handshake alert, + // the peer might send an handshake error alert. If we attempt to receive the handshake alert, // the engine will throw an IllegalStateException as it is not in a proper state to receive // handshake message. Closing inbound immediately after close_notify is the cleanest option. needToReceiveClose = false; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java index e1e05032014..303ed92130a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java @@ -57,8 +57,15 @@ public class SSLDriverTests extends ESTestCase { public void testRenegotiate() throws Exception { SSLContext sslContext = getSSLContext(); - SSLDriver clientDriver = getDriver(sslContext.createSSLEngine(), true); - SSLDriver serverDriver = getDriver(sslContext.createSSLEngine(), false); + SSLEngine serverEngine = sslContext.createSSLEngine(); + SSLEngine clientEngine = sslContext.createSSLEngine(); + + String[] serverProtocols = {"TLSv1.2"}; + serverEngine.setEnabledProtocols(serverProtocols); + String[] clientProtocols = {"TLSv1.2"}; + clientEngine.setEnabledProtocols(clientProtocols); + SSLDriver clientDriver = getDriver(clientEngine, true); + SSLDriver serverDriver = getDriver(serverEngine, false); handshake(clientDriver, serverDriver); @@ -119,16 +126,27 @@ public class SSLDriverTests extends ESTestCase { SSLContext sslContext = getSSLContext(); SSLEngine clientEngine = sslContext.createSSLEngine(); SSLEngine serverEngine = sslContext.createSSLEngine(); - String[] serverProtocols = {"TLSv1.1", "TLSv1.2"}; + String[] serverProtocols = {"TLSv1.2"}; serverEngine.setEnabledProtocols(serverProtocols); - String[] clientProtocols = {"TLSv1"}; + String[] clientProtocols = {"TLSv1.1"}; clientEngine.setEnabledProtocols(clientProtocols); SSLDriver clientDriver = getDriver(clientEngine, true); SSLDriver serverDriver = getDriver(serverEngine, false); SSLException sslException = expectThrows(SSLException.class, () -> handshake(clientDriver, serverDriver)); - assertEquals("Client requested protocol TLSv1 not enabled or not supported", sslException.getMessage()); - failedCloseAlert(serverDriver, clientDriver); + String oldExpected = "Client requested protocol TLSv1.1 not enabled or not supported"; + String jdk11Expected = "Received fatal alert: protocol_version"; + boolean expectedMessage = oldExpected.equals(sslException.getMessage()) || jdk11Expected.equals(sslException.getMessage()); + assertTrue("Unexpected exception message: " + sslException.getMessage(), expectedMessage); + + // In JDK11 we need an non-application write + if (serverDriver.needsNonApplicationWrite()) { + serverDriver.nonApplicationWrite(); + } + // Prior to JDK11 we still need to send a close alert + if (serverDriver.isClosed() == false) { + failedCloseAlert(serverDriver, clientDriver); + } } public void testHandshakeFailureBecauseNoCiphers() throws Exception { @@ -144,11 +162,18 @@ public class SSLDriverTests extends ESTestCase { SSLDriver clientDriver = getDriver(clientEngine, true); SSLDriver serverDriver = getDriver(serverEngine, false); - SSLException sslException = expectThrows(SSLException.class, () -> handshake(clientDriver, serverDriver)); - assertEquals("no cipher suites in common", sslException.getMessage()); - failedCloseAlert(serverDriver, clientDriver); + expectThrows(SSLException.class, () -> handshake(clientDriver, serverDriver)); + // In JDK11 we need an non-application write + if (serverDriver.needsNonApplicationWrite()) { + serverDriver.nonApplicationWrite(); + } + // Prior to JDK11 we still need to send a close alert + if (serverDriver.isClosed() == false) { + failedCloseAlert(serverDriver, clientDriver); + } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32144") public void testCloseDuringHandshake() throws Exception { SSLContext sslContext = getSSLContext(); SSLDriver clientDriver = getDriver(sslContext.createSSLEngine(), true); From 63749498ea92dc03ac424f7f5764eed752eb567e Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Wed, 18 Jul 2018 13:28:25 -0500 Subject: [PATCH 091/260] Remove empty @return from JavaDoc --- .../xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index d35e7ab0b0c..d6b2e321d6c 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -318,8 +318,6 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase /** * Collects all the certificates that are normally trusted by the node ( contained in testnode.jks ) - * - * @return */ List getNodeTrustedCertificates() { Path testnodeCert = From 27d663b64b775355bb787355888132837f28db2e Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Wed, 18 Jul 2018 16:44:05 -0700 Subject: [PATCH 092/260] Revert "Introduce a Hashing Processor (#31087)" (#32178) This reverts commit 8c78fe711486a3f722879d8f7cf89632daf793e0. --- x-pack/plugin/build.gradle | 1 - .../xpack/security/Security.java | 10 +- .../xpack/security/ingest/HashProcessor.java | 200 ------------------ .../ingest/HashProcessorFactoryTests.java | 136 ------------ .../security/ingest/HashProcessorTests.java | 130 ------------ .../test/hash_processor/10_basic.yml | 51 ----- 6 files changed, 1 insertion(+), 527 deletions(-) delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/HashProcessor.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorFactoryTests.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorTests.java delete mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/hash_processor/10_basic.yml diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 20ae41f10dc..ca529496bf1 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -151,7 +151,6 @@ integTestCluster { setting 'xpack.license.self_generated.type', 'trial' keystoreSetting 'bootstrap.password', 'x-pack-test-password' keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass' - keystoreSetting 'xpack.security.ingest.hash.processor.key', 'hmackey' distribution = 'zip' // this is important since we use the reindex module in ML setupCommand 'setupTestUser', 'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser' diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 3115c08a946..98b5df3edc5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -175,7 +175,6 @@ import org.elasticsearch.xpack.security.authz.accesscontrol.OptOutQueryCache; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.authz.store.FileRolesStore; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; -import org.elasticsearch.xpack.security.ingest.HashProcessor; import org.elasticsearch.xpack.security.ingest.SetSecurityUserProcessor; import org.elasticsearch.xpack.security.rest.SecurityRestFilter; import org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction; @@ -580,10 +579,6 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw // hide settings settingsList.add(Setting.listSetting(SecurityField.setting("hide_settings"), Collections.emptyList(), Function.identity(), Property.NodeScope, Property.Filtered)); - - // ingest processor settings - settingsList.add(HashProcessor.HMAC_KEY_SETTING); - return settingsList; } @@ -727,10 +722,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw @Override public Map getProcessors(Processor.Parameters parameters) { - Map processors = new HashMap<>(); - processors.put(SetSecurityUserProcessor.TYPE, new SetSecurityUserProcessor.Factory(parameters.threadContext)); - processors.put(HashProcessor.TYPE, new HashProcessor.Factory(parameters.env.settings())); - return processors; + return Collections.singletonMap(SetSecurityUserProcessor.TYPE, new SetSecurityUserProcessor.Factory(parameters.threadContext)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/HashProcessor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/HashProcessor.java deleted file mode 100644 index fa49b843847..00000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/ingest/HashProcessor.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.ingest; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.SecureSetting; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.xpack.core.security.SecurityField; - -import javax.crypto.Mac; -import javax.crypto.SecretKeyFactory; -import javax.crypto.spec.PBEKeySpec; -import javax.crypto.spec.SecretKeySpec; -import java.nio.charset.StandardCharsets; -import java.security.InvalidKeyException; -import java.security.NoSuchAlgorithmException; -import java.security.spec.InvalidKeySpecException; -import java.util.Arrays; -import java.util.Base64; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; - -/** - * A processor that hashes the contents of a field (or fields) using various hashing algorithms - */ -public final class HashProcessor extends AbstractProcessor { - public static final String TYPE = "hash"; - public static final Setting.AffixSetting HMAC_KEY_SETTING = SecureSetting - .affixKeySetting(SecurityField.setting("ingest." + TYPE) + ".", "key", - (key) -> SecureSetting.secureString(key, null)); - - private final List fields; - private final String targetField; - private final Method method; - private final Mac mac; - private final byte[] salt; - private final boolean ignoreMissing; - - HashProcessor(String tag, List fields, String targetField, byte[] salt, Method method, @Nullable Mac mac, - boolean ignoreMissing) { - super(tag); - this.fields = fields; - this.targetField = targetField; - this.method = method; - this.mac = mac; - this.salt = salt; - this.ignoreMissing = ignoreMissing; - } - - List getFields() { - return fields; - } - - String getTargetField() { - return targetField; - } - - byte[] getSalt() { - return salt; - } - - @Override - public void execute(IngestDocument document) { - Map hashedFieldValues = fields.stream().map(f -> { - String value = document.getFieldValue(f, String.class, ignoreMissing); - if (value == null && ignoreMissing) { - return new Tuple(null, null); - } - try { - return new Tuple<>(f, method.hash(mac, salt, value)); - } catch (Exception e) { - throw new IllegalArgumentException("field[" + f + "] could not be hashed", e); - } - }).filter(tuple -> Objects.nonNull(tuple.v1())).collect(Collectors.toMap(Tuple::v1, Tuple::v2)); - if (fields.size() == 1) { - document.setFieldValue(targetField, hashedFieldValues.values().iterator().next()); - } else { - document.setFieldValue(targetField, hashedFieldValues); - } - } - - @Override - public String getType() { - return TYPE; - } - - public static final class Factory implements Processor.Factory { - - private final Settings settings; - private final Map secureKeys; - - public Factory(Settings settings) { - this.settings = settings; - this.secureKeys = new HashMap<>(); - HMAC_KEY_SETTING.getAllConcreteSettings(settings).forEach(k -> { - secureKeys.put(k.getKey(), k.get(settings)); - }); - } - - private static Mac createMac(Method method, SecureString password, byte[] salt, int iterations) { - try { - SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2With" + method.getAlgorithm()); - PBEKeySpec keySpec = new PBEKeySpec(password.getChars(), salt, iterations, 128); - byte[] pbkdf2 = secretKeyFactory.generateSecret(keySpec).getEncoded(); - Mac mac = Mac.getInstance(method.getAlgorithm()); - mac.init(new SecretKeySpec(pbkdf2, method.getAlgorithm())); - return mac; - } catch (NoSuchAlgorithmException | InvalidKeySpecException | InvalidKeyException e) { - throw new IllegalArgumentException("invalid settings", e); - } - } - - @Override - public HashProcessor create(Map registry, String processorTag, Map config) { - boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); - List fields = ConfigurationUtils.readList(TYPE, processorTag, config, "fields"); - if (fields.isEmpty()) { - throw ConfigurationUtils.newConfigurationException(TYPE, processorTag, "fields", "must specify at least one field"); - } else if (fields.stream().anyMatch(Strings::isNullOrEmpty)) { - throw ConfigurationUtils.newConfigurationException(TYPE, processorTag, "fields", - "a field-name entry is either empty or null"); - } - String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field"); - String keySettingName = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "key_setting"); - SecureString key = secureKeys.get(keySettingName); - if (key == null) { - throw ConfigurationUtils.newConfigurationException(TYPE, processorTag, "key_setting", - "key [" + keySettingName + "] must match [xpack.security.ingest.hash.*.key]. It is not set"); - } - String saltString = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "salt"); - byte[] salt = saltString.getBytes(StandardCharsets.UTF_8); - String methodProperty = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "method", "SHA256"); - Method method = Method.fromString(processorTag, "method", methodProperty); - int iterations = ConfigurationUtils.readIntProperty(TYPE, processorTag, config, "iterations", 5); - Mac mac = createMac(method, key, salt, iterations); - return new HashProcessor(processorTag, fields, targetField, salt, method, mac, ignoreMissing); - } - } - - enum Method { - SHA1("HmacSHA1"), - SHA256("HmacSHA256"), - SHA384("HmacSHA384"), - SHA512("HmacSHA512"); - - private final String algorithm; - - Method(String algorithm) { - this.algorithm = algorithm; - } - - public String getAlgorithm() { - return algorithm; - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - - public String hash(Mac mac, byte[] salt, String input) { - try { - byte[] encrypted = mac.doFinal(input.getBytes(StandardCharsets.UTF_8)); - byte[] messageWithSalt = new byte[salt.length + encrypted.length]; - System.arraycopy(salt, 0, messageWithSalt, 0, salt.length); - System.arraycopy(encrypted, 0, messageWithSalt, salt.length, encrypted.length); - return Base64.getEncoder().encodeToString(messageWithSalt); - } catch (IllegalStateException e) { - throw new ElasticsearchException("error hashing data", e); - } - } - - public static Method fromString(String processorTag, String propertyName, String type) { - try { - return Method.valueOf(type.toUpperCase(Locale.ROOT)); - } catch(IllegalArgumentException e) { - throw newConfigurationException(TYPE, processorTag, propertyName, "type [" + type + - "] not supported, cannot convert field. Valid hash methods: " + Arrays.toString(Method.values())); - } - } - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorFactoryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorFactoryTests.java deleted file mode 100644 index e9dda488e72..00000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorFactoryTests.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.ingest; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.settings.MockSecureSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; - -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; - -public class HashProcessorFactoryTests extends ESTestCase { - - public void testProcessor() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList("_field")); - config.put("target_field", "_target"); - config.put("salt", "_salt"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - for (HashProcessor.Method method : HashProcessor.Method.values()) { - config.put("method", method.toString()); - HashProcessor processor = factory.create(null, "_tag", new HashMap<>(config)); - assertThat(processor.getFields(), equalTo(Collections.singletonList("_field"))); - assertThat(processor.getTargetField(), equalTo("_target")); - assertArrayEquals(processor.getSalt(), "_salt".getBytes(StandardCharsets.UTF_8)); - } - } - - public void testProcessorNoFields() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("target_field", "_target"); - config.put("salt", "_salt"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - config.put("method", HashProcessor.Method.SHA1.toString()); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(e.getMessage(), equalTo("[fields] required property is missing")); - } - - public void testProcessorNoTargetField() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList("_field")); - config.put("salt", "_salt"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - config.put("method", HashProcessor.Method.SHA1.toString()); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(e.getMessage(), equalTo("[target_field] required property is missing")); - } - - public void testProcessorFieldsIsEmpty() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList(randomBoolean() ? "" : null)); - config.put("salt", "_salt"); - config.put("target_field", "_target"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - config.put("method", HashProcessor.Method.SHA1.toString()); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(e.getMessage(), equalTo("[fields] a field-name entry is either empty or null")); - } - - public void testProcessorMissingSalt() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList("_field")); - config.put("target_field", "_target"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(e.getMessage(), equalTo("[salt] required property is missing")); - } - - public void testProcessorInvalidMethod() { - MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.security.ingest.hash.processor.key", "my_key"); - Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList("_field")); - config.put("salt", "_salt"); - config.put("target_field", "_target"); - config.put("key_setting", "xpack.security.ingest.hash.processor.key"); - config.put("method", "invalid"); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(e.getMessage(), equalTo("[method] type [invalid] not supported, cannot convert field. " + - "Valid hash methods: [sha1, sha256, sha384, sha512]")); - } - - public void testProcessorInvalidOrMissingKeySetting() { - Settings settings = Settings.builder().setSecureSettings(new MockSecureSettings()).build(); - HashProcessor.Factory factory = new HashProcessor.Factory(settings); - Map config = new HashMap<>(); - config.put("fields", Collections.singletonList("_field")); - config.put("salt", "_salt"); - config.put("target_field", "_target"); - config.put("key_setting", "invalid"); - config.put("method", HashProcessor.Method.SHA1.toString()); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", new HashMap<>(config))); - assertThat(e.getMessage(), - equalTo("[key_setting] key [invalid] must match [xpack.security.ingest.hash.*.key]. It is not set")); - config.remove("key_setting"); - ElasticsearchException ex = expectThrows(ElasticsearchException.class, - () -> factory.create(null, "_tag", config)); - assertThat(ex.getMessage(), equalTo("[key_setting] required property is missing")); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorTests.java deleted file mode 100644 index b3890600592..00000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ingest/HashProcessorTests.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.ingest; - -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.security.ingest.HashProcessor.Method; - -import javax.crypto.Mac; -import javax.crypto.SecretKeyFactory; -import javax.crypto.spec.PBEKeySpec; -import javax.crypto.spec.SecretKeySpec; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Base64; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; - -public class HashProcessorTests extends ESTestCase { - - @SuppressWarnings("unchecked") - public void testIgnoreMissing() throws Exception { - Method method = randomFrom(Method.values()); - Mac mac = createMac(method); - Map fields = new HashMap<>(); - fields.put("one", "foo"); - HashProcessor processor = new HashProcessor("_tag", Arrays.asList("one", "two"), - "target", "_salt".getBytes(StandardCharsets.UTF_8), Method.SHA1, mac, true); - IngestDocument ingestDocument = new IngestDocument(fields, new HashMap<>()); - processor.execute(ingestDocument); - Map target = ingestDocument.getFieldValue("target", Map.class); - assertThat(target.size(), equalTo(1)); - assertNotNull(target.get("one")); - - HashProcessor failProcessor = new HashProcessor("_tag", Arrays.asList("one", "two"), - "target", "_salt".getBytes(StandardCharsets.UTF_8), Method.SHA1, mac, false); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> failProcessor.execute(ingestDocument)); - assertThat(exception.getMessage(), equalTo("field [two] not present as part of path [two]")); - } - - public void testStaticKeyAndSalt() throws Exception { - byte[] salt = "_salt".getBytes(StandardCharsets.UTF_8); - SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1"); - PBEKeySpec keySpec = new PBEKeySpec("hmackey".toCharArray(), salt, 5, 128); - byte[] pbkdf2 = secretKeyFactory.generateSecret(keySpec).getEncoded(); - Mac mac = Mac.getInstance(Method.SHA1.getAlgorithm()); - mac.init(new SecretKeySpec(pbkdf2, Method.SHA1.getAlgorithm())); - Map fields = new HashMap<>(); - fields.put("field", "0123456789"); - HashProcessor processor = new HashProcessor("_tag", Collections.singletonList("field"), - "target", salt, Method.SHA1, mac, false); - IngestDocument ingestDocument = new IngestDocument(fields, new HashMap<>()); - processor.execute(ingestDocument); - assertThat(ingestDocument.getFieldValue("target", String.class), equalTo("X3NhbHQMW0oHJGEEE9obGcGv5tGd7HFyDw==")); - } - - public void testProcessorSingleField() throws Exception { - List fields = Collections.singletonList(randomAlphaOfLength(6)); - Map docFields = new HashMap<>(); - for (String field : fields) { - docFields.put(field, randomAlphaOfLengthBetween(2, 10)); - } - - String targetField = randomAlphaOfLength(6); - Method method = randomFrom(Method.values()); - Mac mac = createMac(method); - byte[] salt = randomByteArrayOfLength(5); - HashProcessor processor = new HashProcessor("_tag", fields, targetField, salt, method, mac, false); - IngestDocument ingestDocument = new IngestDocument(docFields, new HashMap<>()); - processor.execute(ingestDocument); - - String targetFieldValue = ingestDocument.getFieldValue(targetField, String.class); - Object expectedTargetFieldValue = method.hash(mac, salt, ingestDocument.getFieldValue(fields.get(0), String.class)); - assertThat(targetFieldValue, equalTo(expectedTargetFieldValue)); - byte[] bytes = Base64.getDecoder().decode(targetFieldValue); - byte[] actualSaltPrefix = new byte[salt.length]; - System.arraycopy(bytes, 0, actualSaltPrefix, 0, salt.length); - assertArrayEquals(salt, actualSaltPrefix); - } - - @SuppressWarnings("unchecked") - public void testProcessorMultipleFields() throws Exception { - List fields = new ArrayList<>(); - for (int i = 0; i < randomIntBetween(2, 10); i++) { - fields.add(randomAlphaOfLength(5 + i)); - } - Map docFields = new HashMap<>(); - for (String field : fields) { - docFields.put(field, randomAlphaOfLengthBetween(2, 10)); - } - - String targetField = randomAlphaOfLength(6); - Method method = randomFrom(Method.values()); - Mac mac = createMac(method); - byte[] salt = randomByteArrayOfLength(5); - HashProcessor processor = new HashProcessor("_tag", fields, targetField, salt, method, mac, false); - IngestDocument ingestDocument = new IngestDocument(docFields, new HashMap<>()); - processor.execute(ingestDocument); - - Map targetFieldMap = ingestDocument.getFieldValue(targetField, Map.class); - for (Map.Entry entry : targetFieldMap.entrySet()) { - Object expectedTargetFieldValue = method.hash(mac, salt, ingestDocument.getFieldValue(entry.getKey(), String.class)); - assertThat(entry.getValue(), equalTo(expectedTargetFieldValue)); - byte[] bytes = Base64.getDecoder().decode(entry.getValue()); - byte[] actualSaltPrefix = new byte[salt.length]; - System.arraycopy(bytes, 0, actualSaltPrefix, 0, salt.length); - assertArrayEquals(salt, actualSaltPrefix); - } - } - - private Mac createMac(Method method) throws Exception { - char[] password = randomAlphaOfLengthBetween(1, 10).toCharArray(); - byte[] salt = randomAlphaOfLength(5).getBytes(StandardCharsets.UTF_8); - int iterations = randomIntBetween(1, 10); - SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2With" + method.getAlgorithm()); - PBEKeySpec keySpec = new PBEKeySpec(password, salt, iterations, 128); - byte[] pbkdf2 = secretKeyFactory.generateSecret(keySpec).getEncoded(); - Mac mac = Mac.getInstance(method.getAlgorithm()); - mac.init(new SecretKeySpec(pbkdf2, method.getAlgorithm())); - return mac; - } -} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/hash_processor/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/hash_processor/10_basic.yml deleted file mode 100644 index ee84e02d2f4..00000000000 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/hash_processor/10_basic.yml +++ /dev/null @@ -1,51 +0,0 @@ ---- -teardown: - - do: - ingest.delete_pipeline: - id: "my_pipeline" - ignore: 404 - ---- -"Test Hash Processor": - - - do: - cluster.health: - wait_for_status: yellow - - do: - ingest.put_pipeline: - id: "my_pipeline" - body: > - { - "processors": [ - { - "hash" : { - "fields" : ["user_ssid"], - "target_field" : "anonymized", - "salt": "_salt", - "iterations": 5, - "method": "sha1", - "key_setting": "xpack.security.ingest.hash.processor.key" - } - } - ] - } - - match: { acknowledged: true } - - - do: - index: - index: test - type: test - id: 1 - pipeline: "my_pipeline" - body: > - { - "user_ssid": "0123456789" - } - - - do: - get: - index: test - type: test - id: 1 - - match: { _source.anonymized: "X3NhbHQMW0oHJGEEE9obGcGv5tGd7HFyDw==" } - From aae0133847254ce38dd2dc3ee01a10b486589243 Mon Sep 17 00:00:00 2001 From: Andy Bristol Date: Wed, 18 Jul 2018 17:18:00 -0700 Subject: [PATCH 093/260] [test] port linux package packaging tests (#31943) Add packaging tests for the linux package distributions to the java test project and remove them from bats. Most of the tests that lived in 30_deb_package.bats and 40_rpm_package.bats are applicable to both package types and are combined into a single type of test case. Others are separated out into separate cases to make their intent more clear For #26741 --- qa/vagrant/build.gradle | 13 +- .../packaging/PackagingTests.java | 22 +- .../test/DebPreservationTestCase.java | 127 +++++++++ .../packaging/test/DefaultDebBasicTests.java | 31 +++ .../test/DefaultDebPreservationTests.java | 30 ++ .../packaging/test/DefaultRpmBasicTests.java | 30 ++ .../test/DefaultRpmPreservationTests.java | 30 ++ .../packaging/test/OssDebBasicTests.java | 30 ++ .../test/OssDebPreservationTests.java | 30 ++ .../packaging/test/OssRpmBasicTests.java | 30 ++ .../test/OssRpmPreservationTests.java | 30 ++ .../test/PackageDependenciesTests.java | 73 +++++ .../packaging/test/PackageTestCase.java | 168 ++++++++++++ .../test/RpmPreservationTestCase.java | 141 ++++++++++ .../packaging/util/Archives.java | 11 +- .../elasticsearch/packaging/util/Cleanup.java | 15 +- .../packaging/util/FileMatcher.java | 1 + .../packaging/util/FileUtils.java | 19 ++ .../packaging/util/Installation.java | 34 ++- .../packaging/util/Packages.java | 259 ++++++++++++++++++ .../packaging/util/Platforms.java | 38 ++- .../packaging/util/ServerUtils.java | 2 +- .../packaging/tests/30_deb_package.bats | 233 ---------------- .../packaging/tests/40_rpm_package.bats | 220 --------------- 24 files changed, 1114 insertions(+), 503 deletions(-) create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebBasicTests.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebPreservationTests.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmBasicTests.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmPreservationTests.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebBasicTests.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebPreservationTests.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmBasicTests.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmPreservationTests.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageDependenciesTests.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java create mode 100644 qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java delete mode 100644 qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats delete mode 100644 qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 37190632b44..4a0c9146962 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -28,7 +28,8 @@ plugins { dependencies { compile "junit:junit:${versions.junit}" - compile "org.hamcrest:hamcrest-all:${versions.hamcrest}" + compile "org.hamcrest:hamcrest-core:${versions.hamcrest}" + compile "org.hamcrest:hamcrest-library:${versions.hamcrest}" compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" @@ -91,13 +92,5 @@ tasks.thirdPartyAudit.excludes = [ 'org.apache.log4j.Priority', // commons-logging provided dependencies 'javax.servlet.ServletContextEvent', - 'javax.servlet.ServletContextListener', - // from randomized testing - 'org.apache.tools.ant.BuildException', - 'org.apache.tools.ant.DirectoryScanner', - 'org.apache.tools.ant.Task', - 'org.apache.tools.ant.types.FileSet', - 'org.easymock.EasyMock', - 'org.easymock.IArgumentMatcher', - 'org.jmock.core.Constraint' + 'javax.servlet.ServletContextListener' ] diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java index fa7f8e8ef78..57b647e1207 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/PackagingTests.java @@ -19,10 +19,19 @@ package org.elasticsearch.packaging; +import org.elasticsearch.packaging.test.DefaultDebPreservationTests; +import org.elasticsearch.packaging.test.DefaultDebBasicTests; +import org.elasticsearch.packaging.test.DefaultRpmPreservationTests; +import org.elasticsearch.packaging.test.DefaultRpmBasicTests; +import org.elasticsearch.packaging.test.OssDebPreservationTests; +import org.elasticsearch.packaging.test.OssDebBasicTests; +import org.elasticsearch.packaging.test.OssRpmPreservationTests; +import org.elasticsearch.packaging.test.OssRpmBasicTests; import org.elasticsearch.packaging.test.OssTarTests; import org.elasticsearch.packaging.test.OssZipTests; import org.elasticsearch.packaging.test.DefaultTarTests; import org.elasticsearch.packaging.test.DefaultZipTests; +import org.elasticsearch.packaging.test.PackageDependenciesTests; import org.junit.runner.RunWith; import org.junit.runners.Suite; @@ -31,8 +40,17 @@ import org.junit.runners.Suite.SuiteClasses; @RunWith(Suite.class) @SuiteClasses({ DefaultTarTests.class, - DefaultZipTests.class, OssTarTests.class, - OssZipTests.class + DefaultZipTests.class, + OssZipTests.class, + PackageDependenciesTests.class, + DefaultRpmBasicTests.class, + OssRpmBasicTests.class, + DefaultDebBasicTests.class, + OssDebBasicTests.class, + DefaultDebPreservationTests.class, + OssDebPreservationTests.class, + DefaultRpmPreservationTests.class, + OssRpmPreservationTests.class }) public class PackagingTests {} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java new file mode 100644 index 00000000000..c584f5d2e44 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DebPreservationTestCase.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; +import org.elasticsearch.packaging.util.Distribution; +import org.elasticsearch.packaging.util.Installation; +import org.elasticsearch.packaging.util.Shell; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.nio.file.Files; +import java.nio.file.Paths; + +import static org.elasticsearch.packaging.util.Cleanup.cleanEverything; +import static org.elasticsearch.packaging.util.FileUtils.assertPathsDontExist; +import static org.elasticsearch.packaging.util.FileUtils.assertPathsExist; +import static org.elasticsearch.packaging.util.Packages.SYSVINIT_SCRIPT; +import static org.elasticsearch.packaging.util.Packages.assertInstalled; +import static org.elasticsearch.packaging.util.Packages.assertRemoved; +import static org.elasticsearch.packaging.util.Packages.install; +import static org.elasticsearch.packaging.util.Packages.remove; +import static org.elasticsearch.packaging.util.Packages.packageStatus; +import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation; +import static org.elasticsearch.packaging.util.Platforms.isDPKG; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeThat; +import static org.junit.Assume.assumeTrue; + +@TestCaseOrdering(TestCaseOrdering.AlphabeticOrder.class) +public abstract class DebPreservationTestCase extends PackagingTestCase { + + private static Installation installation; + + protected abstract Distribution distribution(); + + @BeforeClass + public static void cleanup() { + installation = null; + cleanEverything(); + } + + @Before + public void onlyCompatibleDistributions() { + assumeTrue("only dpkg platforms", isDPKG()); + assumeTrue("only compatible distributions", distribution().packaging.compatible); + } + + public void test10Install() { + assertRemoved(distribution()); + installation = install(distribution()); + assertInstalled(distribution()); + verifyPackageInstallation(installation, distribution()); + } + + public void test20Remove() { + assumeThat(installation, is(notNullValue())); + + remove(distribution()); + + // some config files were not removed + + assertPathsExist( + installation.config, + installation.config("elasticsearch.yml"), + installation.config("jvm.options"), + installation.config("log4j2.properties") + ); + + // keystore was removed + + assertPathsDontExist( + installation.config("elasticsearch.keystore"), + installation.config(".elasticsearch.keystore.initial_md5sum") + ); + + // doc files were removed + + assertPathsDontExist( + Paths.get("/usr/share/doc/" + distribution().flavor.name), + Paths.get("/usr/share/doc/" + distribution().flavor.name + "/copyright") + ); + + // sysvinit service file was not removed + assertTrue(Files.exists(SYSVINIT_SCRIPT)); + + // defaults file was not removed + assertTrue(Files.exists(installation.envFile)); + } + + public void test30Purge() { + assumeThat(installation, is(notNullValue())); + + final Shell sh = new Shell(); + sh.run("dpkg --purge " + distribution().flavor.name); + + assertRemoved(distribution()); + + assertPathsDontExist( + installation.config, + installation.envFile, + SYSVINIT_SCRIPT + ); + + assertThat(packageStatus(distribution()).exitCode, is(1)); + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebBasicTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebBasicTests.java new file mode 100644 index 00000000000..cd40c0e9e81 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebBasicTests.java @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class DefaultDebBasicTests extends PackageTestCase { + + @Override + protected Distribution distribution() { + return Distribution.DEFAULT_DEB; + } + +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebPreservationTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebPreservationTests.java new file mode 100644 index 00000000000..d8b8c7f562b --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultDebPreservationTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class DefaultDebPreservationTests extends DebPreservationTestCase { + + @Override + protected Distribution distribution() { + return Distribution.DEFAULT_DEB; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmBasicTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmBasicTests.java new file mode 100644 index 00000000000..a8ce7b48685 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmBasicTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class DefaultRpmBasicTests extends PackageTestCase { + + @Override + protected Distribution distribution() { + return Distribution.DEFAULT_RPM; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmPreservationTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmPreservationTests.java new file mode 100644 index 00000000000..633492cce6c --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/DefaultRpmPreservationTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class DefaultRpmPreservationTests extends RpmPreservationTestCase { + + @Override + protected Distribution distribution() { + return Distribution.DEFAULT_RPM; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebBasicTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebBasicTests.java new file mode 100644 index 00000000000..5d779ac4653 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebBasicTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class OssDebBasicTests extends PackageTestCase { + + @Override + protected Distribution distribution() { + return Distribution.OSS_DEB; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebPreservationTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebPreservationTests.java new file mode 100644 index 00000000000..cfce73bb160 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssDebPreservationTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class OssDebPreservationTests extends DebPreservationTestCase { + + @Override + protected Distribution distribution() { + return Distribution.OSS_DEB; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmBasicTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmBasicTests.java new file mode 100644 index 00000000000..d3320c07955 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmBasicTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class OssRpmBasicTests extends PackageTestCase { + + @Override + protected Distribution distribution() { + return Distribution.OSS_RPM; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmPreservationTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmPreservationTests.java new file mode 100644 index 00000000000..87071d687d0 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/OssRpmPreservationTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Distribution; + +public class OssRpmPreservationTests extends RpmPreservationTestCase { + + @Override + protected Distribution distribution() { + return Distribution.OSS_RPM; + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageDependenciesTests.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageDependenciesTests.java new file mode 100644 index 00000000000..6861ef0a3ff --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageDependenciesTests.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.Platforms; +import org.elasticsearch.packaging.util.Shell; +import org.elasticsearch.packaging.util.Shell.Result; + +import java.util.regex.Pattern; + +import static junit.framework.TestCase.assertTrue; +import static org.elasticsearch.packaging.util.Distribution.DEFAULT_DEB; +import static org.elasticsearch.packaging.util.Distribution.DEFAULT_RPM; +import static org.elasticsearch.packaging.util.Distribution.OSS_DEB; +import static org.elasticsearch.packaging.util.Distribution.OSS_RPM; +import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; +import static org.junit.Assume.assumeTrue; + +/** + * Tests that linux packages correctly declare their dependencies and their conflicts + */ +public class PackageDependenciesTests extends PackagingTestCase { + + public void testDebDependencies() { + assumeTrue(Platforms.isDPKG()); + + final Shell sh = new Shell(); + + final Result defaultResult = sh.run("dpkg -I " + getDistributionFile(DEFAULT_DEB)); + final Result ossResult = sh.run("dpkg -I " + getDistributionFile(OSS_DEB)); + + assertTrue(Pattern.compile("(?m)^ Depends:.*bash.*").matcher(defaultResult.stdout).find()); + assertTrue(Pattern.compile("(?m)^ Depends:.*bash.*").matcher(ossResult.stdout).find()); + + assertTrue(Pattern.compile("(?m)^ Conflicts: elasticsearch-oss$").matcher(defaultResult.stdout).find()); + assertTrue(Pattern.compile("(?m)^ Conflicts: elasticsearch$").matcher(ossResult.stdout).find()); + } + + public void testRpmDependencies() { + assumeTrue(Platforms.isRPM()); + + final Shell sh = new Shell(); + + final Result defaultDeps = sh.run("rpm -qpR " + getDistributionFile(DEFAULT_RPM)); + final Result ossDeps = sh.run("rpm -qpR " + getDistributionFile(OSS_RPM)); + + assertTrue(Pattern.compile("(?m)^/bin/bash\\s*$").matcher(defaultDeps.stdout).find()); + assertTrue(Pattern.compile("(?m)^/bin/bash\\s*$").matcher(ossDeps.stdout).find()); + + final Result defaultConflicts = sh.run("rpm -qp --conflicts " + getDistributionFile(DEFAULT_RPM)); + final Result ossConflicts = sh.run("rpm -qp --conflicts " + getDistributionFile(OSS_RPM)); + + assertTrue(Pattern.compile("(?m)^elasticsearch-oss\\s*$").matcher(defaultConflicts.stdout).find()); + assertTrue(Pattern.compile("(?m)^elasticsearch\\s*$").matcher(ossConflicts.stdout).find()); + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java new file mode 100644 index 00000000000..28a767e95ae --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java @@ -0,0 +1,168 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; +import org.elasticsearch.packaging.util.Distribution; +import org.elasticsearch.packaging.util.Installation; +import org.elasticsearch.packaging.util.Shell; + +import org.elasticsearch.packaging.util.Shell.Result; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.nio.file.Files; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static org.elasticsearch.packaging.util.Cleanup.cleanEverything; +import static org.elasticsearch.packaging.util.FileUtils.assertPathsDontExist; +import static org.elasticsearch.packaging.util.Packages.SYSTEMD_SERVICE; +import static org.elasticsearch.packaging.util.Packages.assertInstalled; +import static org.elasticsearch.packaging.util.Packages.assertRemoved; +import static org.elasticsearch.packaging.util.Packages.install; +import static org.elasticsearch.packaging.util.Packages.remove; +import static org.elasticsearch.packaging.util.Packages.startElasticsearch; +import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation; +import static org.elasticsearch.packaging.util.Platforms.getOsRelease; +import static org.elasticsearch.packaging.util.Platforms.isSystemd; +import static org.elasticsearch.packaging.util.ServerUtils.runElasticsearchTests; + +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.isEmptyString; + +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertFalse; +import static org.junit.Assume.assumeThat; +import static org.junit.Assume.assumeTrue; + +@TestCaseOrdering(TestCaseOrdering.AlphabeticOrder.class) +public abstract class PackageTestCase extends PackagingTestCase { + + private static Installation installation; + + protected abstract Distribution distribution(); + + @BeforeClass + public static void cleanup() { + installation = null; + cleanEverything(); + } + + @Before + public void onlyCompatibleDistributions() { + assumeTrue("only compatible distributions", distribution().packaging.compatible); + } + + public void test10InstallPackage() { + assertRemoved(distribution()); + installation = install(distribution()); + assertInstalled(distribution()); + verifyPackageInstallation(installation, distribution()); + } + + public void test20PluginsCommandWhenNoPlugins() { + assumeThat(installation, is(notNullValue())); + + final Shell sh = new Shell(); + assertThat(sh.run(installation.bin("elasticsearch-plugin") + " list").stdout, isEmptyString()); + } + + public void test30InstallDoesNotStartServer() { + assumeThat(installation, is(notNullValue())); + + final Shell sh = new Shell(); + assertThat(sh.run("ps aux").stdout, not(containsString("org.elasticsearch.bootstrap.Elasticsearch"))); + } + + public void test40StartServer() throws IOException { + assumeThat(installation, is(notNullValue())); + + startElasticsearch(); + runElasticsearchTests(); + verifyPackageInstallation(installation, distribution()); // check startup script didn't change permissions + } + + public void test50Remove() { + assumeThat(installation, is(notNullValue())); + + remove(distribution()); + + // removing must stop the service + final Shell sh = new Shell(); + assertThat(sh.run("ps aux").stdout, not(containsString("org.elasticsearch.bootstrap.Elasticsearch"))); + + if (isSystemd()) { + + final int statusExitCode; + + // Before version 231 systemctl returned exit code 3 for both services that were stopped, and nonexistent + // services [1]. In version 231 and later it returns exit code 4 for non-existent services. + // + // The exception is Centos 7 and oel 7 where it returns exit code 4 for non-existent services from a systemd reporting a version + // earlier than 231. Centos 6 does not have an /etc/os-release, but that's fine because it also doesn't use systemd. + // + // [1] https://github.com/systemd/systemd/pull/3385 + if (getOsRelease().contains("ID=\"centos\"") || getOsRelease().contains("ID=\"ol\"")) { + statusExitCode = 4; + } else { + + final Result versionResult = sh.run("systemctl --version"); + final Matcher matcher = Pattern.compile("^systemd (\\d+)\n").matcher(versionResult.stdout); + matcher.find(); + final int version = Integer.parseInt(matcher.group(1)); + + statusExitCode = version < 231 + ? 3 + : 4; + } + + assertThat(sh.runIgnoreExitCode("systemctl status elasticsearch.service").exitCode, is(statusExitCode)); + assertThat(sh.runIgnoreExitCode("systemctl is-enabled elasticsearch.service").exitCode, is(1)); + + } + + assertPathsDontExist( + installation.bin, + installation.lib, + installation.modules, + installation.plugins, + installation.logs, + installation.pidDir + ); + + assertFalse(Files.exists(SYSTEMD_SERVICE)); + } + + public void test60Reinstall() { + assumeThat(installation, is(notNullValue())); + + installation = install(distribution()); + assertInstalled(distribution()); + verifyPackageInstallation(installation, distribution()); + + remove(distribution()); + assertRemoved(distribution()); + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java new file mode 100644 index 00000000000..527c1d2cc13 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/RpmPreservationTestCase.java @@ -0,0 +1,141 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.test; + +import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering; +import org.elasticsearch.packaging.util.Distribution; +import org.elasticsearch.packaging.util.Installation; +import org.elasticsearch.packaging.util.Shell; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.stream.Stream; + +import static org.elasticsearch.packaging.util.Cleanup.cleanEverything; +import static org.elasticsearch.packaging.util.FileUtils.append; +import static org.elasticsearch.packaging.util.FileUtils.assertPathsDontExist; +import static org.elasticsearch.packaging.util.Packages.SYSTEMD_SERVICE; +import static org.elasticsearch.packaging.util.Packages.SYSVINIT_SCRIPT; +import static org.elasticsearch.packaging.util.Packages.assertInstalled; +import static org.elasticsearch.packaging.util.Packages.assertRemoved; +import static org.elasticsearch.packaging.util.Packages.install; +import static org.elasticsearch.packaging.util.Packages.remove; +import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation; +import static org.elasticsearch.packaging.util.Platforms.isRPM; +import static org.elasticsearch.packaging.util.Platforms.isSystemd; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeThat; +import static org.junit.Assume.assumeTrue; + +@TestCaseOrdering(TestCaseOrdering.AlphabeticOrder.class) +public abstract class RpmPreservationTestCase extends PackagingTestCase { + + private static Installation installation; + + protected abstract Distribution distribution(); + + @BeforeClass + public static void cleanup() { + installation = null; + cleanEverything(); + } + + @Before + public void onlyCompatibleDistributions() { + assumeTrue("only rpm platforms", isRPM()); + assumeTrue("only compatible distributions", distribution().packaging.compatible); + } + + public void test10Install() { + assertRemoved(distribution()); + installation = install(distribution()); + assertInstalled(distribution()); + verifyPackageInstallation(installation, distribution()); + } + + public void test20Remove() { + assumeThat(installation, is(notNullValue())); + + remove(distribution()); + + // config was removed + assertFalse(Files.exists(installation.config)); + + // sysvinit service file was removed + assertFalse(Files.exists(SYSVINIT_SCRIPT)); + + // defaults file was removed + assertFalse(Files.exists(installation.envFile)); + } + + public void test30PreserveConfig() { + final Shell sh = new Shell(); + + installation = install(distribution()); + assertInstalled(distribution()); + verifyPackageInstallation(installation, distribution()); + + sh.run("echo foobar | " + installation.executables().elasticsearchKeystore + " add --stdin foo.bar"); + Stream.of( + installation.config("elasticsearch.yml"), + installation.config("jvm.options"), + installation.config("log4j2.properties") + ).forEach(path -> append(path, "# foo")); + + remove(distribution()); + assertRemoved(distribution()); + + if (isSystemd()) { + assertThat(sh.runIgnoreExitCode("systemctl is-enabled elasticsearch.service").exitCode, is(1)); + } + + assertPathsDontExist( + installation.bin, + installation.lib, + installation.modules, + installation.plugins, + installation.logs, + installation.pidDir, + installation.envFile, + SYSVINIT_SCRIPT, + SYSTEMD_SERVICE + ); + + assertTrue(Files.exists(installation.config)); + assertTrue(Files.exists(installation.config("elasticsearch.keystore"))); + + Stream.of( + "elasticsearch.yml", + "jvm.options", + "log4j2.properties" + ).forEach(configFile -> { + final Path original = installation.config(configFile); + final Path saved = installation.config(configFile + ".rpmsave"); + assertFalse(original + " should not exist", Files.exists(original)); + assertTrue(saved + " should exist", Files.exists(saved)); + }); + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java index 6ffec813eb0..9e9a453ca84 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java @@ -35,7 +35,7 @@ import static org.elasticsearch.packaging.util.FileMatcher.p660; import static org.elasticsearch.packaging.util.FileMatcher.p755; import static org.elasticsearch.packaging.util.FileUtils.getCurrentVersion; import static org.elasticsearch.packaging.util.FileUtils.getDefaultArchiveInstallPath; -import static org.elasticsearch.packaging.util.FileUtils.getPackagingArchivesDir; +import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; import static org.elasticsearch.packaging.util.FileUtils.lsGlob; import static org.elasticsearch.packaging.util.FileUtils.mv; @@ -66,7 +66,7 @@ public class Archives { public static Installation installArchive(Distribution distribution, Path fullInstallPath, String version) { final Shell sh = new Shell(); - final Path distributionFile = getPackagingArchivesDir().resolve(distribution.filename(version)); + final Path distributionFile = getDistributionFile(distribution); final Path baseInstallPath = fullInstallPath.getParent(); final Path extractedPath = baseInstallPath.resolve("elasticsearch-" + version); @@ -106,7 +106,7 @@ public class Archives { Platforms.onLinux(() -> setupArchiveUsersLinux(fullInstallPath)); Platforms.onWindows(() -> setupArchiveUsersWindows(fullInstallPath)); - return new Installation(fullInstallPath); + return Installation.ofArchive(fullInstallPath); } private static void setupArchiveUsersLinux(Path installPath) { @@ -176,7 +176,6 @@ public class Archives { ).forEach(dir -> assertThat(dir, file(Directory, owner, owner, p755))); assertThat(Files.exists(es.data), is(false)); - assertThat(Files.exists(es.scripts), is(false)); assertThat(es.bin, file(Directory, owner, owner, p755)); assertThat(es.lib, file(Directory, owner, owner, p755)); @@ -209,7 +208,7 @@ public class Archives { "elasticsearch.yml", "jvm.options", "log4j2.properties" - ).forEach(config -> assertThat(es.config(config), file(File, owner, owner, p660))); + ).forEach(configFile -> assertThat(es.config(configFile), file(File, owner, owner, p660))); Stream.of( "NOTICE.txt", @@ -252,7 +251,7 @@ public class Archives { "roles.yml", "role_mapping.yml", "log4j2.properties" - ).forEach(config -> assertThat(es.config(config), file(File, owner, owner, p660))); + ).forEach(configFile -> assertThat(es.config(configFile), file(File, owner, owner, p660))); } public static void runElasticsearch(Installation installation) throws IOException { diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java index 4ff2998988c..fda61e9fb36 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java @@ -27,11 +27,9 @@ import java.util.List; import static org.elasticsearch.packaging.util.FileUtils.getTempDir; import static org.elasticsearch.packaging.util.FileUtils.lsGlob; -import static org.elasticsearch.packaging.util.Platforms.isAptGet; import static org.elasticsearch.packaging.util.Platforms.isDPKG; import static org.elasticsearch.packaging.util.Platforms.isRPM; import static org.elasticsearch.packaging.util.Platforms.isSystemd; -import static org.elasticsearch.packaging.util.Platforms.isYUM; public class Cleanup { @@ -100,19 +98,14 @@ public class Cleanup { final Shell sh = new Shell(); if (isRPM()) { - sh.runIgnoreExitCode("rpm --quiet -e elasticsearch elasticsearch-oss"); - } - - if (isYUM()) { - sh.runIgnoreExitCode("yum remove -y elasticsearch elasticsearch-oss"); + // Doing rpm erase on both packages in one command will remove neither since both cannot be installed + // this may leave behind config files in /etc/elasticsearch, but a later step in this cleanup will get them + sh.runIgnoreExitCode("rpm --quiet -e elasticsearch"); + sh.runIgnoreExitCode("rpm --quiet -e elasticsearch-oss"); } if (isDPKG()) { sh.runIgnoreExitCode("dpkg --purge elasticsearch elasticsearch-oss"); } - - if (isAptGet()) { - sh.runIgnoreExitCode("apt-get --quiet --yes purge elasticsearch elasticsearch-oss"); - } } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileMatcher.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileMatcher.java index 9fdf6d60081..34bae68f97f 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileMatcher.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileMatcher.java @@ -47,6 +47,7 @@ public class FileMatcher extends TypeSafeMatcher { public enum Fileness { File, Directory } public static final Set p755 = fromString("rwxr-xr-x"); + public static final Set p750 = fromString("rwxr-x---"); public static final Set p660 = fromString("rw-rw----"); public static final Set p644 = fromString("rw-r--r--"); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java index 315dc6ffee1..10d1b3ee6b6 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java @@ -33,11 +33,14 @@ import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileOwnerAttributeView; import java.nio.file.attribute.PosixFileAttributes; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsNot.not; import static org.hamcrest.text.IsEmptyString.isEmptyOrNullString; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; /** * Wrappers and convenience methods for common filesystem operations @@ -160,4 +163,20 @@ public class FileUtils { assertThat(fromEnv, not(isEmptyOrNullString())); return Paths.get(fromEnv); } + + public static Path getDistributionFile(Distribution distribution) { + return getDistributionFile(distribution, getCurrentVersion()); + } + + public static Path getDistributionFile(Distribution distribution, String version) { + return getPackagingArchivesDir().resolve(distribution.filename(version)); + } + + public static void assertPathsExist(Path... paths) { + Arrays.stream(paths).forEach(path -> assertTrue(path + " should exist", Files.exists(path))); + } + + public static void assertPathsDontExist(Path... paths) { + Arrays.stream(paths).forEach(path -> assertFalse(path + " should not exist", Files.exists(path))); + } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java index 68da440400a..40dc546f230 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java @@ -20,6 +20,7 @@ package org.elasticsearch.packaging.util; import java.nio.file.Path; +import java.nio.file.Paths; /** * Represents an installation of Elasticsearch @@ -34,9 +35,10 @@ public class Installation { public final Path logs; public final Path plugins; public final Path modules; - public final Path scripts; + public final Path pidDir; + public final Path envFile; - public Installation(Path home, Path config, Path data, Path logs, Path plugins, Path modules, Path scripts) { + public Installation(Path home, Path config, Path data, Path logs, Path plugins, Path modules, Path pidDir, Path envFile) { this.home = home; this.bin = home.resolve("bin"); this.lib = home.resolve("lib"); @@ -46,18 +48,38 @@ public class Installation { this.logs = logs; this.plugins = plugins; this.modules = modules; - this.scripts = scripts; + this.pidDir = pidDir; + this.envFile = envFile; } - public Installation(Path home) { - this( + public static Installation ofArchive(Path home) { + return new Installation( home, home.resolve("config"), home.resolve("data"), home.resolve("logs"), home.resolve("plugins"), home.resolve("modules"), - home.resolve("scripts") + null, + null + ); + } + + public static Installation ofPackage(Distribution.Packaging packaging) { + + final Path envFile = (packaging == Distribution.Packaging.RPM) + ? Paths.get("/etc/sysconfig/elasticsearch") + : Paths.get("/etc/default/elasticsearch"); + + return new Installation( + Paths.get("/usr/share/elasticsearch"), + Paths.get("/etc/elasticsearch"), + Paths.get("/var/lib/elasticsearch"), + Paths.get("/var/log/elasticsearch"), + Paths.get("/usr/share/elasticsearch/plugins"), + Paths.get("/usr/share/elasticsearch/modules"), + Paths.get("/var/run/elasticsearch"), + envFile ); } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java new file mode 100644 index 00000000000..6e80d9e027d --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java @@ -0,0 +1,259 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.util; + +import org.elasticsearch.packaging.util.Shell.Result; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.regex.Pattern; +import java.util.stream.Stream; + +import static org.elasticsearch.packaging.util.FileMatcher.Fileness.Directory; +import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File; +import static org.elasticsearch.packaging.util.FileMatcher.file; +import static org.elasticsearch.packaging.util.FileMatcher.p644; +import static org.elasticsearch.packaging.util.FileMatcher.p660; +import static org.elasticsearch.packaging.util.FileMatcher.p750; +import static org.elasticsearch.packaging.util.FileMatcher.p755; +import static org.elasticsearch.packaging.util.FileUtils.getCurrentVersion; +import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; +import static org.elasticsearch.packaging.util.Platforms.isSysVInit; +import static org.elasticsearch.packaging.util.Platforms.isSystemd; +import static org.elasticsearch.packaging.util.ServerUtils.waitForElasticsearch; +import static org.hamcrest.CoreMatchers.anyOf; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class Packages { + + public static final Path SYSVINIT_SCRIPT = Paths.get("/etc/init.d/elasticsearch"); + public static final Path SYSTEMD_SERVICE = Paths.get("/usr/lib/systemd/system/elasticsearch.service"); + + public static void assertInstalled(Distribution distribution) { + final Result status = packageStatus(distribution); + assertThat(status.exitCode, is(0)); + + Platforms.onDPKG(() -> assertFalse(Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find())); + } + + public static void assertRemoved(Distribution distribution) { + final Result status = packageStatus(distribution); + + Platforms.onRPM(() -> assertThat(status.exitCode, is(1))); + + Platforms.onDPKG(() -> { + assertThat(status.exitCode, anyOf(is(0), is(1))); + if (status.exitCode == 0) { + assertTrue(Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find()); + } + }); + } + + public static Result packageStatus(Distribution distribution) { + final Shell sh = new Shell(); + final Result result; + + if (distribution.packaging == Distribution.Packaging.RPM) { + result = sh.runIgnoreExitCode("rpm -qe " + distribution.flavor.name); + } else { + result = sh.runIgnoreExitCode("dpkg -s " + distribution.flavor.name); + } + + return result; + } + + public static Installation install(Distribution distribution) { + return install(distribution, getCurrentVersion()); + } + + public static Installation install(Distribution distribution, String version) { + final Shell sh = new Shell(); + final Path distributionFile = getDistributionFile(distribution, version); + + Platforms.onRPM(() -> sh.run("rpm -i " + distributionFile)); + Platforms.onDPKG(() -> sh.run("dpkg -i " + distributionFile)); + + return Installation.ofPackage(distribution.packaging); + } + + public static void remove(Distribution distribution) { + final Shell sh = new Shell(); + + Platforms.onRPM(() -> { + sh.run("rpm -e " + distribution.flavor.name); + final Result status = packageStatus(distribution); + assertThat(status.exitCode, is(1)); + }); + + Platforms.onDPKG(() -> { + sh.run("dpkg -r " + distribution.flavor.name); + final Result status = packageStatus(distribution); + assertThat(status.exitCode, is(0)); + assertTrue(Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find()); + }); + } + + public static void verifyPackageInstallation(Installation installation, Distribution distribution) { + verifyOssInstallation(installation, distribution); + if (distribution.flavor == Distribution.Flavor.DEFAULT) { + verifyDefaultInstallation(installation); + } + } + + + private static void verifyOssInstallation(Installation es, Distribution distribution) { + final Shell sh = new Shell(); + + sh.run("id elasticsearch"); + sh.run("getent group elasticsearch"); + + final Result passwdResult = sh.run("getent passwd elasticsearch"); + final Path homeDir = Paths.get(passwdResult.stdout.trim().split(":")[5]); + assertFalse("elasticsearch user home directory must not exist", Files.exists(homeDir)); + + Stream.of( + es.home, + es.plugins, + es.modules + ).forEach(dir -> assertThat(dir, file(Directory, "root", "root", p755))); + + assertThat(es.pidDir, file(Directory, "elasticsearch", "elasticsearch", p755)); + + Stream.of( + es.data, + es.logs + ).forEach(dir -> assertThat(dir, file(Directory, "elasticsearch", "elasticsearch", p750))); + + // we shell out here because java's posix file permission view doesn't support special modes + assertThat(es.config, file(Directory, "root", "elasticsearch", p750)); + assertThat(sh.run("find \"" + es.config + "\" -maxdepth 0 -printf \"%m\"").stdout, containsString("2750")); + + Stream.of( + "elasticsearch.keystore", + "elasticsearch.yml", + "jvm.options", + "log4j2.properties" + ).forEach(configFile -> assertThat(es.config(configFile), file(File, "root", "elasticsearch", p660))); + assertThat(es.config(".elasticsearch.keystore.initial_md5sum"), file(File, "root", "elasticsearch", p644)); + + assertThat(sh.run("sudo -u elasticsearch " + es.bin("elasticsearch-keystore") + " list").stdout, containsString("keystore.seed")); + + Stream.of( + es.bin, + es.lib + ).forEach(dir -> assertThat(dir, file(Directory, "root", "root", p755))); + + Stream.of( + "elasticsearch", + "elasticsearch-plugin", + "elasticsearch-keystore", + "elasticsearch-translog" + ).forEach(executable -> assertThat(es.bin(executable), file(File, "root", "root", p755))); + + Stream.of( + "NOTICE.txt", + "README.textile" + ).forEach(doc -> assertThat(es.home.resolve(doc), file(File, "root", "root", p644))); + + assertThat(es.envFile, file(File, "root", "elasticsearch", p660)); + + if (distribution.packaging == Distribution.Packaging.RPM) { + assertThat(es.home.resolve("LICENSE.txt"), file(File, "root", "root", p644)); + } else { + Path copyrightDir = Paths.get(sh.run("readlink -f /usr/share/doc/" + distribution.flavor.name).stdout.trim()); + assertThat(copyrightDir, file(Directory, "root", "root", p755)); + assertThat(copyrightDir.resolve("copyright"), file(File, "root", "root", p644)); + } + + if (isSystemd()) { + Stream.of( + SYSTEMD_SERVICE, + Paths.get("/usr/lib/tmpfiles.d/elasticsearch.conf"), + Paths.get("/usr/lib/sysctl.d/elasticsearch.conf") + ).forEach(confFile -> assertThat(confFile, file(File, "root", "root", p644))); + + final String sysctlExecutable = (distribution.packaging == Distribution.Packaging.RPM) + ? "/usr/sbin/sysctl" + : "/sbin/sysctl"; + assertThat(sh.run(sysctlExecutable + " vm.max_map_count").stdout, containsString("vm.max_map_count = 262144")); + } + + if (isSysVInit()) { + assertThat(SYSVINIT_SCRIPT, file(File, "root", "root", p750)); + } + } + + private static void verifyDefaultInstallation(Installation es) { + + Stream.of( + "elasticsearch-certgen", + "elasticsearch-certutil", + "elasticsearch-croneval", + "elasticsearch-migrate", + "elasticsearch-saml-metadata", + "elasticsearch-setup-passwords", + "elasticsearch-sql-cli", + "elasticsearch-syskeygen", + "elasticsearch-users", + "x-pack-env", + "x-pack-security-env", + "x-pack-watcher-env" + ).forEach(executable -> assertThat(es.bin(executable), file(File, "root", "root", p755))); + + // at this time we only install the current version of archive distributions, but if that changes we'll need to pass + // the version through here + assertThat(es.bin("elasticsearch-sql-cli-" + getCurrentVersion() + ".jar"), file(File, "root", "root", p755)); + + Stream.of( + "users", + "users_roles", + "roles.yml", + "role_mapping.yml", + "log4j2.properties" + ).forEach(configFile -> assertThat(es.config(configFile), file(File, "root", "elasticsearch", p660))); + } + + public static void startElasticsearch() throws IOException { + final Shell sh = new Shell(); + if (isSystemd()) { + sh.run("systemctl daemon-reload"); + sh.run("systemctl enable elasticsearch.service"); + sh.run("systemctl is-enabled elasticsearch.service"); + sh.run("systemctl start elasticsearch.service"); + } else { + sh.run("service elasticsearch start"); + } + + waitForElasticsearch(); + + if (isSystemd()) { + sh.run("systemctl is-active elasticsearch.service"); + sh.run("systemctl status elasticsearch.service"); + } else { + sh.run("service elasticsearch status"); + } + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java index 5ffbc318200..c7ca1284ca6 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java @@ -19,11 +19,23 @@ package org.elasticsearch.packaging.util; +import java.nio.file.Paths; + +import static org.elasticsearch.packaging.util.FileUtils.slurp; + public class Platforms { public static final String OS_NAME = System.getProperty("os.name"); public static final boolean LINUX = OS_NAME.startsWith("Linux"); public static final boolean WINDOWS = OS_NAME.startsWith("Windows"); + public static String getOsRelease() { + if (LINUX) { + return slurp(Paths.get("/etc/os-release")); + } else { + throw new RuntimeException("os-release is only supported on linux"); + } + } + public static boolean isDPKG() { if (WINDOWS) { return false; @@ -31,13 +43,6 @@ public class Platforms { return new Shell().runIgnoreExitCode("which dpkg").isSuccess(); } - public static boolean isAptGet() { - if (WINDOWS) { - return false; - } - return new Shell().runIgnoreExitCode("which apt-get").isSuccess(); - } - public static boolean isRPM() { if (WINDOWS) { return false; @@ -45,13 +50,6 @@ public class Platforms { return new Shell().runIgnoreExitCode("which rpm").isSuccess(); } - public static boolean isYUM() { - if (WINDOWS) { - return false; - } - return new Shell().runIgnoreExitCode("which yum").isSuccess(); - } - public static boolean isSystemd() { if (WINDOWS) { return false; @@ -78,6 +76,18 @@ public class Platforms { } } + public static void onRPM(PlatformAction action) { + if (isRPM()) { + action.run(); + } + } + + public static void onDPKG(PlatformAction action) { + if (isDPKG()) { + action.run(); + } + } + /** * Essentially a Runnable, but we make the distinction so it's more clear that these are synchronous */ diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java index ff006a34e68..6331b4bf46e 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java @@ -72,7 +72,7 @@ public class ServerUtils { } catch (HttpHostConnectException e) { // we want to retry if the connection is refused - LOG.info("Got connection refused when waiting for cluster health", e); + LOG.debug("Got connection refused when waiting for cluster health", e); } timeElapsed = System.currentTimeMillis() - startTime; diff --git a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats deleted file mode 100644 index 749c72c8b31..00000000000 --- a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats +++ /dev/null @@ -1,233 +0,0 @@ -#!/usr/bin/env bats - -# This file is used to test the installation and removal -# of a Debian package. - -# WARNING: This testing file must be executed as root and can -# dramatically change your system. It should only be executed -# in a throw-away VM like those made by the Vagrantfile at -# the root of the Elasticsearch source code. This should -# cause the script to fail if it is executed any other way: -[ -f /etc/is_vagrant_vm ] || { - >&2 echo "must be run on a vagrant VM" - exit 1 -} - -# The test case can be executed with the Bash Automated -# Testing System tool available at https://github.com/sstephenson/bats -# Thanks to Sam Stephenson! - -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Load test utilities -load $BATS_UTILS/utils.bash -load $BATS_UTILS/packages.bash -load $BATS_UTILS/plugins.bash - -# Cleans everything for the 1st execution -setup() { - skip_not_dpkg - export_elasticsearch_paths -} - -@test "[DEB] package depends on bash" { - dpkg -I elasticsearch-oss-$(cat version).deb | grep "Depends:.*bash.*" -} - -@test "[DEB] package conflicts" { - dpkg -I elasticsearch-oss-$(cat version).deb | grep "^ Conflicts: elasticsearch$" - dpkg -I elasticsearch-$(cat version).deb | grep "^ Conflicts: elasticsearch-oss$" -} - -################################## -# Install DEB package -################################## -@test "[DEB] dpkg command is available" { - clean_before_test - dpkg --version -} - -@test "[DEB] package is available" { - count=$(ls elasticsearch-oss-$(cat version).deb | wc -l) - [ "$count" -eq 1 ] -} - -@test "[DEB] package is not installed" { - run dpkg -s 'elasticsearch-oss' - [ "$status" -eq 1 ] -} - -@test "[DEB] temporarily remove java and ensure the install fails" { - move_java - run dpkg -i elasticsearch-oss-$(cat version).deb - output=$status - unmove_java - [ "$output" -eq 1 ] -} - -@test "[DEB] install package" { - dpkg -i elasticsearch-oss-$(cat version).deb -} - -@test "[DEB] package is installed" { - dpkg -s 'elasticsearch-oss' -} - -@test "[DEB] verify package installation" { - verify_package_installation -} - -@test "[DEB] verify elasticsearch-plugin list runs without any plugins installed" { - local plugins_list=`$ESHOME/bin/elasticsearch-plugin list` - [[ -z $plugins_list ]] -} - -@test "[DEB] elasticsearch isn't started by package install" { - # Wait a second to give Elasticsearch a change to start if it is going to. - # This isn't perfect by any means but its something. - sleep 1 - ! ps aux | grep elasticsearch | grep java - # You might be tempted to use jps instead of the above but that'd have to - # look like: - # ! sudo -u elasticsearch jps | grep -i elasticsearch - # which isn't really easier to read than the above. -} - -@test "[DEB] test elasticsearch" { - start_elasticsearch_service - run_elasticsearch_tests -} - -@test "[DEB] verify package installation after start" { - # Checks that the startup scripts didn't change the permissions - verify_package_installation -} - -################################## -# Uninstall DEB package -################################## -@test "[DEB] remove package" { - dpkg -r 'elasticsearch-oss' -} - -@test "[DEB] package has been removed" { - run dpkg -s 'elasticsearch-oss' - [ "$status" -eq 0 ] - echo "$output" | grep -i "status" | grep -i "deinstall ok" -} - -@test "[DEB] verify package removal" { - # The removal must stop the service - count=$(ps | grep Elasticsearch | wc -l) - [ "$count" -eq 0 ] - - # The removal must disable the service - # see prerm file - if is_systemd; then - missing_exit_code=4 - if [ $(systemctl --version | head -1 | awk '{print $2}') -lt 231 ]; then - # systemd before version 231 used exit code 3 when the service did not exist - missing_exit_code=3 - fi - run systemctl status elasticsearch.service - [ "$status" -eq $missing_exit_code ] - - run systemctl is-enabled elasticsearch.service - [ "$status" -eq 1 ] - fi - - # Those directories are deleted when removing the package - # see postrm file - assert_file_not_exist "/var/log/elasticsearch" - assert_file_not_exist "/usr/share/elasticsearch/plugins" - assert_file_not_exist "/usr/share/elasticsearch/modules" - assert_file_not_exist "/var/run/elasticsearch" - - # Those directories are removed by the package manager - assert_file_not_exist "/usr/share/elasticsearch/bin" - assert_file_not_exist "/usr/share/elasticsearch/lib" - assert_file_not_exist "/usr/share/elasticsearch/modules" - assert_file_not_exist "/usr/share/elasticsearch/modules/lang-painless" - - # The configuration files are still here - assert_file_exist "/etc/elasticsearch" - # TODO: use ucf to handle these better for Debian-based systems - assert_file_not_exist "/etc/elasticsearch/elasticsearch.keystore" - assert_file_not_exist "/etc/elasticsearch/.elasticsearch.keystore.initial_md5sum" - assert_file_exist "/etc/elasticsearch/elasticsearch.yml" - assert_file_exist "/etc/elasticsearch/jvm.options" - assert_file_exist "/etc/elasticsearch/log4j2.properties" - - # The env file is still here - assert_file_exist "/etc/default/elasticsearch" - - # The service files are still here - assert_file_exist "/etc/init.d/elasticsearch" -} - -@test "[DEB] purge package" { - # User installed scripts aren't removed so we'll just get them ourselves - rm -rf $ESSCRIPTS - dpkg --purge 'elasticsearch-oss' -} - -@test "[DEB] verify package purge" { - # all remaining files are deleted by the purge - assert_file_not_exist "/etc/elasticsearch" - assert_file_not_exist "/etc/elasticsearch/elasticsearch.keystore" - assert_file_not_exist "/etc/elasticsearch/.elasticsearch.keystore.initial_md5sum" - assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" - assert_file_not_exist "/etc/elasticsearch/jvm.options" - assert_file_not_exist "/etc/elasticsearch/log4j2.properties" - - assert_file_not_exist "/etc/default/elasticsearch" - - assert_file_not_exist "/etc/init.d/elasticsearch" - assert_file_not_exist "/usr/lib/systemd/system/elasticsearch.service" - - assert_file_not_exist "/usr/share/elasticsearch" - - assert_file_not_exist "/usr/share/doc/elasticsearch-oss" - assert_file_not_exist "/usr/share/doc/elasticsearch-oss/copyright" -} - -@test "[DEB] package has been completly removed" { - run dpkg -s 'elasticsearch-oss' - [ "$status" -eq 1 ] -} - -@test "[DEB] reinstall package" { - dpkg -i elasticsearch-oss-$(cat version).deb -} - -@test "[DEB] package is installed by reinstall" { - dpkg -s 'elasticsearch-oss' -} - -@test "[DEB] verify package reinstallation" { - verify_package_installation -} - -@test "[DEB] repurge package" { - dpkg --purge 'elasticsearch-oss' -} - -@test "[DEB] package has been completly removed again" { - run dpkg -s 'elasticsearch-oss' - [ "$status" -eq 1 ] -} diff --git a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats deleted file mode 100644 index cb12d4b50e0..00000000000 --- a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats +++ /dev/null @@ -1,220 +0,0 @@ -#!/usr/bin/env bats - -# This file is used to test the installation of a RPM package. - -# WARNING: This testing file must be executed as root and can -# dramatically change your system. It should only be executed -# in a throw-away VM like those made by the Vagrantfile at -# the root of the Elasticsearch source code. This should -# cause the script to fail if it is executed any other way: -[ -f /etc/is_vagrant_vm ] || { - >&2 echo "must be run on a vagrant VM" - exit 1 -} - -# The test case can be executed with the Bash Automated -# Testing System tool available at https://github.com/sstephenson/bats -# Thanks to Sam Stephenson! - -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Load test utilities -load $BATS_UTILS/utils.bash -load $BATS_UTILS/packages.bash -load $BATS_UTILS/plugins.bash - -# Cleans everything for the 1st execution -setup() { - skip_not_rpm - export_elasticsearch_paths -} - -@test "[RPM] package depends on bash" { - rpm -qpR elasticsearch-oss-$(cat version).rpm | grep '/bin/bash' -} - -@test "[RPM] package conflicts" { - rpm -qp --conflicts elasticsearch-oss-$(cat version).rpm | grep "^elasticsearch\s*$" - rpm -qp --conflicts elasticsearch-$(cat version).rpm | grep "^elasticsearch-oss\s*$" -} - -################################## -# Install RPM package -################################## -@test "[RPM] rpm command is available" { - clean_before_test - rpm --version -} - -@test "[RPM] package is available" { - count=$(ls elasticsearch-oss-$(cat version).rpm | wc -l) - [ "$count" -eq 1 ] -} - -@test "[RPM] package is not installed" { - run rpm -qe 'elasticsearch-oss' - [ "$status" -eq 1 ] -} - -@test "[RPM] temporarily remove java and ensure the install fails" { - move_java - run rpm -i elasticsearch-oss-$(cat version).rpm - output=$status - unmove_java - [ "$output" -eq 1 ] -} - -@test "[RPM] install package" { - rpm -i elasticsearch-oss-$(cat version).rpm -} - -@test "[RPM] package is installed" { - rpm -qe 'elasticsearch-oss' -} - -@test "[RPM] verify package installation" { - verify_package_installation -} - -@test "[RPM] verify elasticsearch-plugin list runs without any plugins installed" { - local plugins_list=`$ESHOME/bin/elasticsearch-plugin list` - [[ -z $plugins_list ]] -} - -@test "[RPM] elasticsearch isn't started by package install" { - # Wait a second to give Elasticsearch a change to start if it is going to. - # This isn't perfect by any means but its something. - sleep 1 - ! ps aux | grep elasticsearch | grep java -} - -@test "[RPM] test elasticsearch" { - start_elasticsearch_service - run_elasticsearch_tests -} - -@test "[RPM] verify package installation after start" { - # Checks that the startup scripts didn't change the permissions - verify_package_installation -} - -@test "[RPM] remove package" { - # User installed scripts aren't removed so we'll just get them ourselves - rm -rf $ESSCRIPTS - rpm -e 'elasticsearch-oss' -} - -@test "[RPM] package has been removed" { - run rpm -qe 'elasticsearch-oss' - [ "$status" -eq 1 ] -} - -@test "[RPM] verify package removal" { - # The removal must stop the service - count=$(ps | grep Elasticsearch | wc -l) - [ "$count" -eq 0 ] - - # The removal must disable the service - # see prerm file - if is_systemd; then - run systemctl is-enabled elasticsearch.service - [ "$status" -eq 1 ] - fi - - # Those directories are deleted when removing the package - # see postrm file - assert_file_not_exist "/var/log/elasticsearch" - assert_file_not_exist "/usr/share/elasticsearch/plugins" - assert_file_not_exist "/var/run/elasticsearch" - - # Those directories are removed by the package manager - assert_file_not_exist "/usr/share/elasticsearch/bin" - assert_file_not_exist "/usr/share/elasticsearch/lib" - assert_file_not_exist "/usr/share/elasticsearch/modules" - - assert_file_not_exist "/etc/elasticsearch" - - assert_file_not_exist "/etc/init.d/elasticsearch" - assert_file_not_exist "/usr/lib/systemd/system/elasticsearch.service" - - assert_file_not_exist "/etc/sysconfig/elasticsearch" -} - -@test "[RPM] reinstall package" { - rpm -i elasticsearch-oss-$(cat version).rpm -} - -@test "[RPM] package is installed by reinstall" { - rpm -qe 'elasticsearch-oss' -} - -@test "[RPM] verify package reinstallation" { - verify_package_installation -} - -@test "[RPM] reremove package" { - echo foobar | "$ESHOME/bin/elasticsearch-keystore" add --stdin foo.bar - echo "# ping" >> "/etc/elasticsearch/elasticsearch.yml" - echo "# ping" >> "/etc/elasticsearch/jvm.options" - echo "# ping" >> "/etc/elasticsearch/log4j2.properties" - rpm -e 'elasticsearch-oss' -} - -@test "[RPM] verify preservation" { - # The removal must disable the service - # see prerm file - if is_systemd; then - run systemctl is-enabled elasticsearch.service - [ "$status" -eq 1 ] - fi - - # Those directories are deleted when removing the package - # see postrm file - assert_file_not_exist "/var/log/elasticsearch" - assert_file_not_exist "/usr/share/elasticsearch/plugins" - assert_file_not_exist "/usr/share/elasticsearch/modules" - assert_file_not_exist "/var/run/elasticsearch" - - assert_file_not_exist "/usr/share/elasticsearch/bin" - assert_file_not_exist "/usr/share/elasticsearch/lib" - assert_file_not_exist "/usr/share/elasticsearch/modules" - assert_file_not_exist "/usr/share/elasticsearch/modules/lang-painless" - - assert_file_exist "/etc/elasticsearch/elasticsearch.keystore" - assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml" - assert_file_exist "/etc/elasticsearch/elasticsearch.yml.rpmsave" - assert_file_not_exist "/etc/elasticsearch/jvm.options" - assert_file_exist "/etc/elasticsearch/jvm.options.rpmsave" - assert_file_not_exist "/etc/elasticsearch/log4j2.properties" - assert_file_exist "/etc/elasticsearch/log4j2.properties.rpmsave" - - assert_file_not_exist "/etc/init.d/elasticsearch" - assert_file_not_exist "/usr/lib/systemd/system/elasticsearch.service" - - assert_file_not_exist "/etc/sysconfig/elasticsearch" -} - -@test "[RPM] finalize package removal" { - # cleanup - rm -rf /etc/elasticsearch -} - -@test "[RPM] package has been removed again" { - run rpm -qe 'elasticsearch-oss' - [ "$status" -eq 1 ] -} From 38e2e1d553280de0f49ed9bb5a2c7df4187d4ed5 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Thu, 19 Jul 2018 06:46:58 +0000 Subject: [PATCH 094/260] Detect and prevent configuration that triggers a Gradle bug (#31912) * Detect and prevent configuration that triggers a Gradle bug As we found in #31862, this can lead to a lot of wasted time as it's not immediatly obvius what's going on. Givent how many projects we have it's getting increasingly easier to run into gradle/gradle#847. --- build.gradle | 17 ++++++++++++++++- client/test/build.gradle | 2 ++ libs/cli/build.gradle | 10 ---------- plugins/discovery-ec2/qa/build.gradle | 1 + plugins/repository-azure/qa/build.gradle | 1 + plugins/repository-gcs/qa/build.gradle | 1 + settings.gradle | 2 ++ .../with-system-key/build.gradle | 1 + .../without-system-key/build.gradle | 1 + x-pack/qa/sql/security/build.gradle | 2 ++ 10 files changed, 27 insertions(+), 11 deletions(-) diff --git a/build.gradle b/build.gradle index 90a9d88ac8b..66f34d8f445 100644 --- a/build.gradle +++ b/build.gradle @@ -222,7 +222,7 @@ subprojects { "org.elasticsearch.gradle:build-tools:${version}": ':build-tools', "org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec', "org.elasticsearch:elasticsearch:${version}": ':server', - "org.elasticsearch:elasticsearch-cli:${version}": ':libs:cli', + "org.elasticsearch:elasticsearch-cli:${version}": ':libs:elasticsearch-cli', "org.elasticsearch:elasticsearch-core:${version}": ':libs:core', "org.elasticsearch:elasticsearch-nio:${version}": ':libs:nio', "org.elasticsearch:elasticsearch-x-content:${version}": ':libs:x-content', @@ -622,6 +622,21 @@ gradle.projectsEvaluated { } } } + // Having the same group and name for distinct projects causes Gradle to consider them equal when resolving + // dependencies leading to hard to debug failures. Run a check across all project to prevent this from happening. + // see: https://github.com/gradle/gradle/issues/847 + Map coordsToProject = [:] + project.allprojects.forEach { p -> + String coords = "${p.group}:${p.name}" + if (false == coordsToProject.putIfAbsent(coords, p)) { + throw new GradleException( + "Detected that two projects: ${p.path} and ${coordsToProject[coords].path} " + + "have the same name and group: ${coords}. " + + "This doesn't currently work correctly in Gradle, see: " + + "https://github.com/gradle/gradle/issues/847" + ) + } + } } if (System.properties.get("build.compare") != null) { diff --git a/client/test/build.gradle b/client/test/build.gradle index 59c45186fe7..cc69a1828dc 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -25,6 +25,8 @@ apply plugin: 'elasticsearch.build' targetCompatibility = JavaVersion.VERSION_1_7 sourceCompatibility = JavaVersion.VERSION_1_7 +group = "${group}.client.test" + dependencies { compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" diff --git a/libs/cli/build.gradle b/libs/cli/build.gradle index 91fbca19eca..00d6d96ef0d 100644 --- a/libs/cli/build.gradle +++ b/libs/cli/build.gradle @@ -24,16 +24,6 @@ apply plugin: 'nebula.optional-base' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' -publishing { - publications { - nebula { - artifactId 'elasticsearch-cli' - } - } -} - -archivesBaseName = 'elasticsearch-cli' - dependencies { compile 'net.sf.jopt-simple:jopt-simple:5.0.2' compile "org.elasticsearch:elasticsearch-core:${version}" diff --git a/plugins/discovery-ec2/qa/build.gradle b/plugins/discovery-ec2/qa/build.gradle index e69de29bb2d..0aed6df8838 100644 --- a/plugins/discovery-ec2/qa/build.gradle +++ b/plugins/discovery-ec2/qa/build.gradle @@ -0,0 +1 @@ +group = "${group}.plugins.discovery-ec2.qa" diff --git a/plugins/repository-azure/qa/build.gradle b/plugins/repository-azure/qa/build.gradle index e69de29bb2d..5c25485a8f5 100644 --- a/plugins/repository-azure/qa/build.gradle +++ b/plugins/repository-azure/qa/build.gradle @@ -0,0 +1 @@ +group = "${group}.plugins.repository-azure.qa" diff --git a/plugins/repository-gcs/qa/build.gradle b/plugins/repository-gcs/qa/build.gradle index e69de29bb2d..d10d9050dfe 100644 --- a/plugins/repository-gcs/qa/build.gradle +++ b/plugins/repository-gcs/qa/build.gradle @@ -0,0 +1 @@ +group = "${group}.plugins.repository-gcs.qa" diff --git a/settings.gradle b/settings.gradle index 5904cc4daf4..bdd866e622b 100644 --- a/settings.gradle +++ b/settings.gradle @@ -131,3 +131,5 @@ if (extraProjects.exists()) { // enable in preparation for Gradle 5.0 enableFeaturePreview('STABLE_PUBLISHING') + +project(":libs:cli").name = 'elasticsearch-cli' diff --git a/x-pack/qa/rolling-upgrade/with-system-key/build.gradle b/x-pack/qa/rolling-upgrade/with-system-key/build.gradle index e69de29bb2d..03505e01ded 100644 --- a/x-pack/qa/rolling-upgrade/with-system-key/build.gradle +++ b/x-pack/qa/rolling-upgrade/with-system-key/build.gradle @@ -0,0 +1 @@ +group = "${group}.x-pack.qa.rolling-upgrade.with-system-key" diff --git a/x-pack/qa/rolling-upgrade/without-system-key/build.gradle b/x-pack/qa/rolling-upgrade/without-system-key/build.gradle index e69de29bb2d..aa7ac502eb3 100644 --- a/x-pack/qa/rolling-upgrade/without-system-key/build.gradle +++ b/x-pack/qa/rolling-upgrade/without-system-key/build.gradle @@ -0,0 +1 @@ +group = "${group}.x-pack.qa.rolling-upgrade.without-system-key" diff --git a/x-pack/qa/sql/security/build.gradle b/x-pack/qa/sql/security/build.gradle index 5c3169d9d20..15f7734f942 100644 --- a/x-pack/qa/sql/security/build.gradle +++ b/x-pack/qa/sql/security/build.gradle @@ -4,6 +4,8 @@ dependencies { Project mainProject = project +group = "${group}.x-pack.qa.sql.security" + subprojects { // Use resources from the parent project in subprojects sourceSets { From 644a92f1582ceeda6981c0653a5e455f5ec43e3c Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 19 Jul 2018 09:34:23 +0200 Subject: [PATCH 095/260] Fix rollup on date fields that don't support epoch_millis (#31890) The rollup indexer uses a range query to select the next page of results based on the last time bucket of the previous round and the `delay` configured on the rollup job. This query uses the `epoch_millis` format implicitly but doesn't set the `format`. This result in errors during the rollup job if the field definition doesn't allow this format. It can also miss documents if the format is not accepted but another format in the field definition is able to parse the query (e.g.: `epoch_second`). This change ensures that we use `epoch_millis` as the only format to parse the rollup range query. --- .../xpack/rollup/job/RollupIndexer.java | 3 +- .../job/RollupIndexerIndexingTests.java | 5 +- .../elasticsearch/multi_node/RollupIT.java | 48 ++++++++++++++----- 3 files changed, 43 insertions(+), 13 deletions(-) diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index 1711c0e34eb..c7d29451ab3 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -425,7 +425,8 @@ public abstract class RollupIndexer { assert lowerBound <= maxBoundary; final RangeQueryBuilder query = new RangeQueryBuilder(fieldName) .gte(lowerBound) - .lt(maxBoundary); + .lt(maxBoundary) + .format("epoch_millis"); return query; } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index bf4f4892ef6..f658fa574eb 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -29,6 +29,8 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; @@ -506,6 +508,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { private Map createFieldTypes(RollupJobConfig job) { Map fieldTypes = new HashMap<>(); MappedFieldType fieldType = new DateFieldMapper.Builder(job.getGroupConfig().getDateHisto().getField()) + .dateTimeFormatter(Joda.forPattern(randomFrom("basic_date", "date_optional_time", "epoch_second"))) .build(new Mapper.BuilderContext(settings.getSettings(), new ContentPath(0))) .fieldType(); fieldTypes.put(fieldType.name(), fieldType); @@ -618,7 +621,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { RangeQueryBuilder range = (RangeQueryBuilder) request.source().query(); final DateTimeZone timeZone = range.timeZone() != null ? DateTimeZone.forID(range.timeZone()) : null; Query query = timestampField.rangeQuery(range.from(), range.to(), range.includeLower(), range.includeUpper(), - null, timeZone, null, queryShardContext); + null, timeZone, new DateMathParser(Joda.forPattern(range.format())), queryShardContext); // extract composite agg assertThat(request.source().aggregations().getAggregatorFactories().size(), equalTo(1)); diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java index b0142ae1418..43ad4dc0a45 100644 --- a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java +++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java @@ -6,12 +6,16 @@ package org.elasticsearch.multi_node; import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -33,8 +37,8 @@ import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isOneOf; @@ -73,6 +77,31 @@ public class RollupIT extends ESRestTestCase { public void testBigRollup() throws Exception { final int numDocs = 200; + String dateFormat = "strict_date_optional_time"; + + // create the test-index index + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + { + builder.startObject("mappings").startObject("_doc") + .startObject("properties") + .startObject("timestamp") + .field("type", "date") + .field("format", dateFormat) + .endObject() + .startObject("value") + .field("type", "integer") + .endObject() + .endObject() + .endObject().endObject(); + } + builder.endObject(); + final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); + Request req = new Request("PUT", "rollup-docs"); + req.setEntity(entity); + client().performRequest(req); + } + // index documents for the rollup job final StringBuilder bulk = new StringBuilder(); @@ -88,13 +117,15 @@ public class RollupIT extends ESRestTestCase { bulkRequest.addParameter("refresh", "true"); bulkRequest.setJsonEntity(bulk.toString()); client().performRequest(bulkRequest); + // create the rollup job final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test"); + int pageSize = randomIntBetween(2, 50); createRollupJobRequest.setJsonEntity("{" + "\"index_pattern\":\"rollup-*\"," + "\"rollup_index\":\"results-rollup\"," - + "\"cron\":\"*/1 * * * * ?\"," // fast cron and big page size so test runs quickly - + "\"page_size\":20," + + "\"cron\":\"*/1 * * * * ?\"," // fast cron so test runs quickly + + "\"page_size\":" + pageSize + "," + "\"groups\":{" + " \"date_histogram\":{" + " \"field\":\"timestamp\"," @@ -142,7 +173,8 @@ public class RollupIT extends ESRestTestCase { " \"date_histo\": {\n" + " \"date_histogram\": {\n" + " \"field\": \"timestamp\",\n" + - " \"interval\": \"1h\"\n" + + " \"interval\": \"1h\",\n" + + " \"format\": \"date_time\"\n" + " },\n" + " \"aggs\": {\n" + " \"the_max\": {\n" + @@ -226,7 +258,7 @@ public class RollupIT extends ESRestTestCase { } - private void waitForRollUpJob(final String rollupJob,String[] expectedStates) throws Exception { + private void waitForRollUpJob(final String rollupJob, String[] expectedStates) throws Exception { assertBusy(() -> { final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); Response getRollupJobResponse = client().performRequest(getRollupJobRequest); @@ -317,10 +349,4 @@ public class RollupIT extends ESRestTestCase { } } } - - private static String responseEntityToString(Response response) throws Exception { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) { - return reader.lines().collect(Collectors.joining("\n")); - } - } } From f232c36c197d7d2e470823e672f66e85e4e1e20c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 19 Jul 2018 09:41:32 +0200 Subject: [PATCH 096/260] Fix comments causing errors with Java 11 --- .../aggregations/bucket/nested/NestedAggregatorTests.java | 2 +- .../bucket/nested/ReverseNestedAggregatorTests.java | 2 +- .../bucket/significant/SignificantTermsAggregatorTests.java | 2 +- .../bucket/significant/SignificantTextAggregatorTests.java | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 4555809bad9..c893e595964 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -91,7 +91,7 @@ public class NestedAggregatorTests extends AggregatorTestCase { private final SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); /** - * For each provided field type, we also register an alias with name -alias. + * For each provided field type, we also register an alias with name {@code -alias}. */ @Override protected Map getFieldAliases(MappedFieldType... fieldTypes) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index bc870bf4dca..99322af2264 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -59,7 +59,7 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase { private static final String MAX_AGG_NAME = "maxAgg"; /** - * For each provided field type, we also register an alias with name -alias. + * For each provided field type, we also register an alias with name {@code -alias}. */ @Override protected Map getFieldAliases(MappedFieldType... fieldTypes) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java index 70f9667ce7b..0485d4f5855 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorTests.java @@ -77,7 +77,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { } /** - * For each provided field type, we also register an alias with name -alias. + * For each provided field type, we also register an alias with name {@code -alias}. */ @Override protected Map getFieldAliases(MappedFieldType... fieldTypes) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java index c63d5cb7d39..dbff6daed62 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java @@ -53,7 +53,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.signific public class SignificantTextAggregatorTests extends AggregatorTestCase { /** - * For each provided field type, we also register an alias with name -alias. + * For each provided field type, we also register an alias with name {@code -alias}. */ @Override protected Map getFieldAliases(MappedFieldType... fieldTypes) { From 185689ad75955522872acfc7cf21170036680fea Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 19 Jul 2018 01:24:49 -0700 Subject: [PATCH 097/260] Build: Remove pom generation for plugin zip files (#32180) In 1.x and 2.x, plugins were published to maven and the plugin installer downloaded them from there. This was later changed to install from the download service, and in 5.0 plugin zips were no longer published to maven. However, the build still currently produces an unused pom file. This is troublesome in the special case when the main jar of a plugin needs to be published (and thus needs a pom file of the same name). closes #31946 --- .../gradle/plugin/PluginBuildPlugin.groovy | 36 ++++--------------- 1 file changed, 7 insertions(+), 29 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index d76084bf22e..7f6f337e8a9 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -75,10 +75,10 @@ public class PluginBuildPlugin extends BuildPlugin { // and generate a different pom for the zip addClientJarPomGeneration(project) addClientJarTask(project) - } else { - // no client plugin, so use the pom file from nebula, without jar, for the zip - project.ext.set("nebulaPublish.maven.jar", false) } + // while the jar isn't normally published, we still at least build a pom of deps + // in case it is published, for instance when other plugins extend this plugin + configureJarPom(project) project.integTestCluster.dependsOn(project.bundlePlugin) project.tasks.run.dependsOn(project.bundlePlugin) @@ -94,7 +94,6 @@ public class PluginBuildPlugin extends BuildPlugin { } if (isModule == false || isXPackModule) { - addZipPomGeneration(project) addNoticeGeneration(project) } @@ -239,36 +238,15 @@ public class PluginBuildPlugin extends BuildPlugin { } } - /** Adds a task to generate a pom file for the zip distribution. */ - public static void addZipPomGeneration(Project project) { + /** Configure the pom for the main jar of this plugin */ + protected static void configureJarPom(Project project) { project.plugins.apply(ScmInfoPlugin.class) project.plugins.apply(MavenPublishPlugin.class) project.publishing { publications { - zip(MavenPublication) { - artifact project.bundlePlugin - } - /* HUGE HACK: the underlying maven publication library refuses to deploy any attached artifacts - * when the packaging type is set to 'pom'. But Sonatype's OSS repositories require source files - * for artifacts that are of type 'zip'. We already publish the source and javadoc for Elasticsearch - * under the various other subprojects. So here we create another publication using the same - * name that has the "real" pom, and rely on the fact that gradle will execute the publish tasks - * in alphabetical order. This lets us publish the zip file and even though the pom says the - * type is 'pom' instead of 'zip'. We cannot setup a dependency between the tasks because the - * publishing tasks are created *extremely* late in the configuration phase, so that we cannot get - * ahold of the actual task. Furthermore, this entire hack only exists so we can make publishing to - * maven local work, since we publish to maven central externally. */ - zipReal(MavenPublication) { - artifactId = project.pluginProperties.extension.name - pom.withXml { XmlProvider xml -> - Node root = xml.asNode() - root.appendNode('name', project.pluginProperties.extension.name) - root.appendNode('description', project.pluginProperties.extension.description) - root.appendNode('url', urlFromOrigin(project.scminfo.origin)) - Node scmNode = root.appendNode('scm') - scmNode.appendNode('url', project.scminfo.origin) - } + nebula(MavenPublication) { + artifactId project.pluginProperties.extension.name } } } From 202894b8322571c447bc20a6262fd49da89f9bf5 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Thu, 19 Jul 2018 10:40:54 +0200 Subject: [PATCH 098/260] Rest HL client: Add put watch action (#32026) Relates #29827 This implementation behaves like the current transport client, that you basically cannot configure a Watch POJO representation as an argument to the put watch API, but only a bytes reference. You can use the the `WatchSourceBuilder` from the `org.elasticsearch.plugin:x-pack-core` dependency to build watches. This commit also changes the license type to trial, so that watcher is available in high level rest client tests. /cc @hub-cap --- client/rest-high-level/build.gradle | 4 + .../client/RequestConverters.java | 20 +++ .../elasticsearch/client/WatcherClient.java | 64 +++++++++ .../org/elasticsearch/client/XPackClient.java | 7 + .../elasticsearch/client/PingAndInfoIT.java | 8 +- .../client/RequestConvertersTests.java | 33 ++++- .../client/RestHighLevelClientTests.java | 4 +- .../org/elasticsearch/client/WatcherIT.java | 46 +++++++ .../MiscellaneousDocumentationIT.java | 5 +- .../documentation/WatcherDocumentationIT.java | 92 +++++++++++++ .../high-level/supported-apis.asciidoc | 3 +- .../x-pack/watcher/put-watch.asciidoc | 55 ++++++++ .../x-pack-info.asciidoc | 0 .../x-pack-usage.asciidoc | 0 .../core/watcher/client/WatcherClient.java | 4 +- .../transport/actions/put/PutWatchAction.java | 1 + .../actions/put/PutWatchRequestBuilder.java | 4 +- .../actions/put/PutWatchResponse.java | 59 --------- .../exporter/local/LocalExporter.java | 4 +- .../rest/action/RestPutWatchAction.java | 4 +- .../actions/put/TransportPutWatchAction.java | 4 +- .../actions/ActionErrorIntegrationTests.java | 2 +- .../actions/TimeThrottleIntegrationTests.java | 2 +- .../throttler/ActionThrottleTests.java | 2 +- .../history/HistoryActionConditionTests.java | 2 +- .../HistoryTemplateEmailMappingsTests.java | 2 +- .../HistoryTemplateHttpMappingsTests.java | 2 +- ...storyTemplateIndexActionMappingsTests.java | 2 +- ...storyTemplateSearchInputMappingsTests.java | 2 +- .../HistoryTemplateTimeMappingsTests.java | 2 +- .../WatcherExecutorServiceBenchmark.java | 2 +- .../test/integration/BasicWatcherTests.java | 2 +- .../ExecutionVarsIntegrationTests.java | 2 +- .../integration/HistoryIntegrationTests.java | 2 +- .../test/integration/WatchAckTests.java | 2 +- .../transform/TransformIntegrationTests.java | 2 +- .../action/WatchRequestValidationTests.java | 2 +- .../action/activate/ActivateWatchTests.java | 4 +- .../action/delete/DeleteWatchTests.java | 2 +- .../action/execute/ExecuteWatchTests.java | 2 +- .../transport/action/get/GetWatchTests.java | 2 +- .../put/PutWatchSerializationTests.java | 2 +- .../put/TransportPutWatchActionTests.java | 4 +- .../xpack/watcher}/PutWatchRequest.java | 56 ++++---- .../xpack/watcher/PutWatchResponse.java | 124 ++++++++++++++++++ .../xpack/watcher/PutWatchResponseTests.java | 45 +++++++ 46 files changed, 570 insertions(+), 124 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java create mode 100644 docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc rename docs/java-rest/high-level/{miscellaneous => x-pack}/x-pack-info.asciidoc (100%) rename docs/java-rest/high-level/{miscellaneous => x-pack}/x-pack-usage.asciidoc (100%) delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchResponse.java rename x-pack/{plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put => protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher}/PutWatchRequest.java (73%) create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java create mode 100644 x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index a1260894bf7..65c5d094c71 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -70,3 +70,7 @@ forbiddenApisMain { signaturesURLs += [PrecommitTasks.getResource('/forbidden/http-signatures.txt')] signaturesURLs += [file('src/main/resources/forbidden/rest-high-level-signatures.txt').toURI().toURL()] } + +integTestCluster { + setting 'xpack.license.self_generated.type', 'trial' +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 9dbd4916c77..a6122b0681e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -106,6 +106,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; @@ -1097,6 +1098,25 @@ final class RequestConverters { return request; } + static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("watcher") + .addPathPartAsIs("watch") + .addPathPart(putWatchRequest.getId()) + .build(); + + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + Params params = new Params(request).withVersion(putWatchRequest.getVersion()); + if (putWatchRequest.isActive() == false) { + params.putParam("active", "false"); + } + ContentType contentType = createContentType(putWatchRequest.xContentType()); + BytesReference source = putWatchRequest.getSource(); + request.setEntity(new ByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType)); + return request; + } + static Request xpackUsage(XPackUsageRequest usageRequest) { Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage"); Params parameters = new Params(request); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java new file mode 100644 index 00000000000..73c92ba5c45 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; + +import java.io.IOException; + +import static java.util.Collections.emptySet; + +public final class WatcherClient { + + private final RestHighLevelClient restHighLevelClient; + + WatcherClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Put a watch into the cluster + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public PutWatchResponse putWatch(PutWatchRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options, + PutWatchResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously put a watch into the cluster + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void putWatchAsync(PutWatchRequest request, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options, + PutWatchResponse::fromXContent, listener, emptySet()); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java index a497619b987..4acaadfdb85 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -39,10 +39,17 @@ import static java.util.Collections.emptySet; * X-Pack APIs on elastic.co for more information. */ public final class XPackClient { + private final RestHighLevelClient restHighLevelClient; + private final WatcherClient watcherClient; XPackClient(RestHighLevelClient restHighLevelClient) { this.restHighLevelClient = restHighLevelClient; + this.watcherClient = new WatcherClient(restHighLevelClient); + } + + public WatcherClient watcher() { + return watcherClient; } /** diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java index b45f52f9e44..5f38316fd75 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java @@ -66,13 +66,13 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase { assertEquals(mainResponse.getBuild().shortHash(), info.getBuildInfo().getHash()); - assertEquals("basic", info.getLicenseInfo().getType()); - assertEquals("basic", info.getLicenseInfo().getMode()); + assertEquals("trial", info.getLicenseInfo().getType()); + assertEquals("trial", info.getLicenseInfo().getMode()); assertEquals(LicenseStatus.ACTIVE, info.getLicenseInfo().getStatus()); FeatureSet graph = info.getFeatureSetsInfo().getFeatureSets().get("graph"); assertNotNull(graph.description()); - assertFalse(graph.available()); + assertTrue(graph.available()); assertTrue(graph.enabled()); assertNull(graph.nativeCodeInfo()); FeatureSet monitoring = info.getFeatureSetsInfo().getFeatureSets().get("monitoring"); @@ -82,7 +82,7 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase { assertNull(monitoring.nativeCodeInfo()); FeatureSet ml = info.getFeatureSetsInfo().getFeatureSets().get("ml"); assertNotNull(ml.description()); - assertFalse(ml.available()); + assertTrue(ml.available()); assertTrue(ml.enabled()); assertEquals(mainResponse.getVersion().toString(), ml.nativeCodeInfo().get("version").toString().replace("-SNAPSHOT", "")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index fb4e3b22712..c1f47feb33d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -41,9 +41,9 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; @@ -125,6 +125,7 @@ import org.elasticsearch.index.rankeval.RankEvalSpec; import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RestRankEvalAction; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.ScriptType; @@ -145,6 +146,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; import org.hamcrest.CoreMatchers; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -2523,6 +2525,35 @@ public class RequestConvertersTests extends ESTestCase { assertEquals(expectedParams, request.getParameters()); } + public void testXPackPutWatch() throws Exception { + PutWatchRequest putWatchRequest = new PutWatchRequest(); + String watchId = randomAlphaOfLength(10); + putWatchRequest.setId(watchId); + String body = randomAlphaOfLength(20); + putWatchRequest.setSource(new BytesArray(body), XContentType.JSON); + + Map expectedParams = new HashMap<>(); + if (randomBoolean()) { + putWatchRequest.setActive(false); + expectedParams.put("active", "false"); + } + + if (randomBoolean()) { + long version = randomLongBetween(10, 100); + putWatchRequest.setVersion(version); + expectedParams.put("version", String.valueOf(version)); + } + + Request request = RequestConverters.xPackWatcherPutWatch(putWatchRequest); + assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + request.getEntity().writeTo(bos); + assertThat(bos.toString("UTF-8"), is(body)); + } + /** * Randomize the {@link FetchSourceContext} request parameters. */ diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 47870125aa2..5acc6f5552f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -767,7 +767,9 @@ public class RestHighLevelClientTests extends ESTestCase { private static Stream> getSubClientMethods(String namespace, Class clientClass) { return Arrays.stream(clientClass.getMethods()).filter(method -> method.getDeclaringClass().equals(clientClass)) - .map(method -> Tuple.tuple(namespace + "." + toSnakeCase(method.getName()), method)); + .map(method -> Tuple.tuple(namespace + "." + toSnakeCase(method.getName()), method)) + .flatMap(tuple -> tuple.v2().getReturnType().getName().endsWith("Client") + ? getSubClientMethods(tuple.v1(), tuple.v2().getReturnType()) : Stream.of(tuple)); } private static String toSnakeCase(String camelCase) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java new file mode 100644 index 00000000000..dec438a47ab --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; + +import static org.hamcrest.Matchers.is; + +public class WatcherIT extends ESRestHighLevelClientTestCase { + + public void testPutWatch() throws Exception { + String watchId = randomAlphaOfLength(10); + String json = "{ \n" + + " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" + + " \"input\": { \"none\": {} },\n" + + " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" + + "}"; + BytesReference bytesReference = new BytesArray(json); + PutWatchRequest putWatchRequest = new PutWatchRequest(watchId, bytesReference, XContentType.JSON); + PutWatchResponse putWatchResponse = highLevelClient().xpack().watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT); + assertThat(putWatchResponse.isCreated(), is(true)); + assertThat(putWatchResponse.getId(), is(watchId)); + assertThat(putWatchResponse.getVersion(), is(1L)); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java index a99b991620a..a9fe4aba2f7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java @@ -39,11 +39,13 @@ import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.protocol.xpack.XPackUsageResponse; import java.io.IOException; +import java.time.Instant; import java.util.EnumSet; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; /** @@ -97,8 +99,7 @@ public class MiscellaneousDocumentationIT extends ESRestHighLevelClientTestCase //tag::x-pack-info-response BuildInfo build = response.getBuildInfo(); // <1> LicenseInfo license = response.getLicenseInfo(); // <2> - assertEquals(XPackInfoResponse.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS, - license.getExpiryDate()); // <3> + assertThat(license.getExpiryDate(), is(greaterThan(Instant.now().toEpochMilli()))); // <3> FeatureSetsInfo features = response.getFeatureSetsInfo(); // <4> //end::x-pack-info-response diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java new file mode 100644 index 00000000000..df51d896cda --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.documentation; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { + + public void testPutWatch() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + //tag::x-pack-put-watch-execute + // you can also use the WatchSourceBuilder from org.elasticsearch.plugin:x-pack-core to create a watch programmatically + BytesReference watch = new BytesArray("{ \n" + + " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" + + " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" + + " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" + + "}"); + PutWatchRequest request = new PutWatchRequest("my_watch_id", watch, XContentType.JSON); + request.setActive(false); // <1> + PutWatchResponse response = client.xpack().watcher().putWatch(request, RequestOptions.DEFAULT); + //end::x-pack-put-watch-execute + + //tag::x-pack-put-watch-response + String watchId = response.getId(); // <1> + boolean isCreated = response.isCreated(); // <2> + long version = response.getVersion(); // <3> + //end::x-pack-put-watch-response + } + + { + BytesReference watch = new BytesArray("{ \n" + + " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" + + " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" + + " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" + + "}"); + PutWatchRequest request = new PutWatchRequest("my_other_watch_id", watch, XContentType.JSON); + // tag::x-pack-put-watch-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(PutWatchResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-put-watch-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-put-watch-execute-async + client.xpack().watcher().putWatchAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-put-watch-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } +} diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index cf38040e865..d952870677b 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -57,7 +57,8 @@ The Java High Level REST Client supports the following Miscellaneous APIs: include::miscellaneous/main.asciidoc[] include::miscellaneous/ping.asciidoc[] -include::miscellaneous/x-pack-info.asciidoc[] +include::x-pack/x-pack-info.asciidoc[] +include::x-pack/watcher/put-watch.asciidoc[] == Indices APIs diff --git a/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc b/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc new file mode 100644 index 00000000000..c803c54eb5e --- /dev/null +++ b/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc @@ -0,0 +1,55 @@ +[[java-rest-high-x-pack-watcher-put-watch]] +=== X-Pack Info API + +[[java-rest-high-x-pack-watcher-put-watch-execution]] +==== Execution + +General information about the installed {watcher} features can be retrieved +using the `watcher()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-execute] +-------------------------------------------------- +<1> Allows to store the watch, but to not trigger it. Defaults to `true` + +[[java-rest-high-x-pack-watcher-put-watch-response]] +==== Response + +The returned `XPackPutWatchResponse` contain `created`, `id`, +and `version` information. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-response] +-------------------------------------------------- +<1> `_id` contains id of the watch +<2> `created` is a boolean indicating whether the watch was created for the first time +<3> `_version` returns the newly created version + +[[java-rest-high-x-pack-watcher-put-watch-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-execute-async] +-------------------------------------------------- +<1> The `XPackPutWatchRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `XPackPutWatchResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/WatcherDocumentationIT.java[x-pack-put-watch-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc b/docs/java-rest/high-level/x-pack/x-pack-info.asciidoc similarity index 100% rename from docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc rename to docs/java-rest/high-level/x-pack/x-pack-info.asciidoc diff --git a/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc b/docs/java-rest/high-level/x-pack/x-pack-usage.asciidoc similarity index 100% rename from docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc rename to docs/java-rest/high-level/x-pack/x-pack-usage.asciidoc diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java index 10c4f0fffc3..063f1f655a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatcherClient.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.core.watcher.client; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchAction; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRequestBuilder; @@ -29,9 +31,7 @@ import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchReques import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequestBuilder; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchAction; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequestBuilder; -import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.transport.actions.service.WatcherServiceAction; import org.elasticsearch.xpack.core.watcher.transport.actions.service.WatcherServiceRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.service.WatcherServiceRequestBuilder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java index 56cedc457bd..509116b018e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.watcher.transport.actions.put; import org.elasticsearch.action.Action; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; /** * This action puts an watch into the watch index and adds it to the scheduler diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequestBuilder.java index 050ac38dbb0..840ff560fba 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchRequestBuilder.java @@ -9,6 +9,8 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; public class PutWatchRequestBuilder extends ActionRequestBuilder { @@ -43,7 +45,7 @@ public class PutWatchRequestBuilder extends ActionRequestBuilder PARSER + = new ObjectParser<>("x_pack_put_watch_response", PutWatchResponse::new); + static { + PARSER.declareString(PutWatchResponse::setId, new ParseField("_id")); + PARSER.declareLong(PutWatchResponse::setVersion, new ParseField("_version")); + PARSER.declareBoolean(PutWatchResponse::setCreated, new ParseField("created")); + } + + private String id; + private long version; + private boolean created; + + public PutWatchResponse() { + } + + public PutWatchResponse(String id, long version, boolean created) { + this.id = id; + this.version = version; + this.created = created; + } + + private void setId(String id) { + this.id = id; + } + + private void setVersion(long version) { + this.version = version; + } + + private void setCreated(boolean created) { + this.created = created; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public boolean isCreated() { + return created; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + PutWatchResponse that = (PutWatchResponse) o; + + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(created, that.created); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, created); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeVLong(version); + out.writeBoolean(created); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readVLong(); + created = in.readBoolean(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("_id", id) + .field("_version", version) + .field("created", created) + .endObject(); + } + + public static PutWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java new file mode 100644 index 00000000000..d0aadef1611 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class PutWatchResponseTests extends AbstractXContentTestCase { + + @Override + protected PutWatchResponse createTestInstance() { + String id = randomAlphaOfLength(10); + long version = randomLongBetween(1, 10); + boolean created = randomBoolean(); + return new PutWatchResponse(id, version, created); + } + + @Override + protected PutWatchResponse doParseInstance(XContentParser parser) throws IOException { + return PutWatchResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} From 9aa562d3dd943103e6ae5dbfb2305778e572c76a Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 19 Jul 2018 01:53:08 -0700 Subject: [PATCH 099/260] Make x-pack-core generate a pom file This was a forgotten part of #32180 --- x-pack/plugin/core/build.gradle | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 5db149bc677..ca926fa0d54 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -6,6 +6,8 @@ import java.nio.file.Paths import java.nio.file.StandardCopyOption apply plugin: 'elasticsearch.esplugin' +apply plugin: 'nebula.maven-base-publish' +apply plugin: 'nebula.maven-scm' archivesBaseName = 'x-pack-core' From f7da55aa3c878a845795ece9ae21327801a4204c Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Thu, 19 Jul 2018 10:44:32 +0200 Subject: [PATCH 100/260] Rest test - allow for snapshots to take 0 milliseconds Sometimes we get lucky and things are fast :) --- .../resources/rest-api-spec/test/snapshot.status/10_basic.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.status/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.status/10_basic.yml index c9dd85b11de..c35f2419bdc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.status/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.status/10_basic.yml @@ -38,7 +38,8 @@ setup: - gt: { snapshots.0.stats.total.file_count: 0 } - gt: { snapshots.0.stats.total.size_in_bytes: 0 } - is_true: snapshots.0.stats.start_time_in_millis - - is_true: snapshots.0.stats.time_in_millis +## fast in memory snapshots can take less than one millisecond to complete. + - gte: { snapshots.0.stats.time_in_millis: 0 } --- "Get missing snapshot status throws an exception": From f6d7854f76216247ed70ecf2dabb4d6edd978fff Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 19 Jul 2018 11:33:46 +0100 Subject: [PATCH 101/260] Remove indication of future multi-homing support (#32187) We do not support intra-cluster connections on multiple interfaces, but the documentation indicates that we will in future. In fact there is currently no plan to support this, so the forward-looking documentation is misleading. This commit - removes the misleading sentence - fixes that a transport profile affects outbound connections, not inbound ones - tidies up some nearby text --- docs/reference/modules/transport.asciidoc | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index 046d82cc507..257181f70c5 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -59,7 +59,8 @@ It also uses the common [float] ==== TCP Transport Profiles -Elasticsearch allows you to bind to multiple ports on different interfaces by the use of transport profiles. See this example configuration +Elasticsearch allows you to bind to multiple ports on different interfaces by +the use of transport profiles. See this example configuration [source,yaml] -------------- @@ -71,10 +72,12 @@ transport.profiles.dmz.port: 9700-9800 transport.profiles.dmz.bind_host: 172.16.1.2 -------------- -The `default` profile is a special. It is used as fallback for any other profiles, if those do not have a specific configuration setting set. -Note that the default profile is how other nodes in the cluster will connect to this node usually. In the future this feature will allow to enable node-to-node communication via multiple interfaces. +The `default` profile is special. It is used as a fallback for any other +profiles, if those do not have a specific configuration setting set, and is how +this node connects to other nodes in the cluster. -The following parameters can be configured like that +The following parameters can be configured on each transport profile, as in the +example above: * `port`: The port to bind to * `bind_host`: The host to bind From 7c0fc209bf78e4824ca1f232b84a1dab22bc2dfa Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Thu, 19 Jul 2018 12:54:38 +0200 Subject: [PATCH 102/260] ECS Task IAM profile credentials ignored in repository-s3 plugin (#31864) ECS Task IAM profile credentials ignored in repository-s3 plugin (#31864) Closes #26913 --- .../gradle/test/ClusterConfiguration.groovy | 7 + .../elasticsearch/gradle/test/NodeInfo.groovy | 1 + docs/plugins/repository-s3.asciidoc | 4 +- plugins/repository-s3/build.gradle | 50 +++- .../repositories/s3/S3Service.java | 7 +- .../repositories/s3/AmazonS3Fixture.java | 16 +- .../50_repository_ecs_credentials.yml | 243 ++++++++++++++++++ 7 files changed, 315 insertions(+), 13 deletions(-) create mode 100644 plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index d6477e05b15..b29bb7a8cd3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -142,6 +142,8 @@ class ClusterConfiguration { // there are cases when value depends on task that is not executed yet on configuration stage Map systemProperties = new HashMap<>() + Map environmentVariables = new HashMap<>() + Map settings = new HashMap<>() Map keystoreSettings = new HashMap<>() @@ -164,6 +166,11 @@ class ClusterConfiguration { systemProperties.put(property, value) } + @Input + void environment(String variable, Object value) { + environmentVariables.put(variable, value) + } + @Input void setting(String name, Object value) { settings.put(name, value) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 7844ea77fc1..0dd56b86332 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -181,6 +181,7 @@ class NodeInfo { args.addAll("-E", "node.portsfile=true") env = [:] + env.putAll(config.environmentVariables) for (Map.Entry property : System.properties.entrySet()) { if (property.key.startsWith('tests.es.')) { args.add("-E") diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index 0d73e35f18e..19ead367204 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -13,8 +13,8 @@ include::install_remove.asciidoc[] ==== Getting started with AWS The plugin provides a repository type named `s3` which may be used when creating a repository. -The repository defaults to using -http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html[IAM Role] +The repository defaults to using https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html[ECS IAM Role] or +http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html[EC2 IAM Role] credentials for authentication. The only mandatory setting is the bucket name: [source,js] diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 225d523817e..181891e2056 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -92,11 +92,15 @@ String s3TemporaryBasePath = System.getenv("amazon_s3_base_path_temporary") String s3EC2Bucket = System.getenv("amazon_s3_bucket_ec2") String s3EC2BasePath = System.getenv("amazon_s3_base_path_ec2") +String s3ECSBucket = System.getenv("amazon_s3_bucket_ecs") +String s3ECSBasePath = System.getenv("amazon_s3_base_path_ecs") + // If all these variables are missing then we are testing against the internal fixture instead, which has the following // credentials hard-coded in. if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath - && !s3EC2Bucket && !s3EC2BasePath) { + && !s3EC2Bucket && !s3EC2BasePath + && !s3ECSBucket && !s3ECSBasePath) { s3PermanentAccessKey = 's3_integration_test_permanent_access_key' s3PermanentSecretKey = 's3_integration_test_permanent_secret_key' s3PermanentBucket = 'permanent-bucket-test' @@ -105,10 +109,14 @@ if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3P s3EC2Bucket = 'ec2-bucket-test' s3EC2BasePath = 'integration_test' + s3ECSBucket = 'ecs-bucket-test' + s3ECSBasePath = 'integration_test' + useFixture = true } else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath - || !s3EC2Bucket || !s3EC2BasePath) { + || !s3EC2Bucket || !s3EC2BasePath + || !s3ECSBucket || !s3ECSBasePath) { throw new IllegalArgumentException("not all options specified to run against external S3 service") } @@ -284,7 +292,8 @@ if (useFixture && minioDistribution) { // Minio only supports a single access key, see https://github.com/minio/minio/pull/5968 integTestMinioRunner.systemProperty 'tests.rest.blacklist', [ 'repository_s3/30_repository_temporary_credentials/*', - 'repository_s3/40_repository_ec2_credentials/*' + 'repository_s3/40_repository_ec2_credentials/*', + 'repository_s3/50_repository_ecs_credentials/*' ].join(",") project.check.dependsOn(integTestMinio) @@ -302,7 +311,8 @@ task s3FixtureProperties { "s3Fixture.temporary_bucket_name" : s3TemporaryBucket, "s3Fixture.temporary_key" : s3TemporaryAccessKey, "s3Fixture.temporary_session_token": s3TemporarySessionToken, - "s3Fixture.ec2_bucket_name" : s3EC2Bucket + "s3Fixture.ec2_bucket_name" : s3EC2Bucket, + "s3Fixture.ecs_bucket_name" : s3ECSBucket ] doLast { @@ -327,7 +337,9 @@ Map expansions = [ 'temporary_bucket': s3TemporaryBucket, 'temporary_base_path': s3TemporaryBasePath, 'ec2_bucket': s3EC2Bucket, - 'ec2_base_path': s3EC2BasePath + 'ec2_base_path': s3EC2BasePath, + 'ecs_bucket': s3ECSBucket, + 'ecs_base_path': s3ECSBasePath ] processTestResources { @@ -364,6 +376,34 @@ integTestCluster { } } +integTestRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/50_repository_ecs_credentials/*' + +/// +RestIntegTestTask integTestECS = project.tasks.create('integTestECS', RestIntegTestTask.class) { + description = "Runs tests using the ECS repository." +} + +// The following closure must execute before the afterEvaluate block in the constructor of the following integrationTest tasks: +project.afterEvaluate { + ClusterConfiguration cluster = project.extensions.getByName('integTestECSCluster') as ClusterConfiguration + cluster.dependsOn(project.s3Fixture) + + cluster.setting 's3.client.integration_test_ecs.endpoint', "http://${-> s3Fixture.addressAndPort}" + + Task integTestECSTask = project.tasks.getByName('integTestECS') + integTestECSTask.clusterConfig.plugin(project.path) + integTestECSTask.clusterConfig.environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', + "http://${-> s3Fixture.addressAndPort}/ecs_credentials_endpoint" + integTestECSRunner.systemProperty 'tests.rest.blacklist', [ + 'repository_s3/10_basic/*', + 'repository_s3/20_repository_permanent_credentials/*', + 'repository_s3/30_repository_temporary_credentials/*', + 'repository_s3/40_repository_ec2_credentials/*' + ].join(",") +} +project.check.dependsOn(integTestECS) +/// + thirdPartyAudit.excludes = [ // classes are missing 'javax.servlet.ServletContextEvent', diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index 91a7a30024b..b177686bd71 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -22,7 +22,7 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.InstanceProfileCredentialsProvider; +import com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper; import com.amazonaws.http.IdleConnectionReaper; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; @@ -156,10 +156,11 @@ class S3Service extends AbstractComponent implements Closeable { } static class PrivilegedInstanceProfileCredentialsProvider implements AWSCredentialsProvider { - private final InstanceProfileCredentialsProvider credentials; + private final AWSCredentialsProvider credentials; private PrivilegedInstanceProfileCredentialsProvider() { - this.credentials = new InstanceProfileCredentialsProvider(); + // InstanceProfileCredentialsProvider as last item of chain + this.credentials = new EC2ContainerCredentialsProviderWrapper(); } @Override diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java index ce6c4723149..a411a1c53cf 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java @@ -88,7 +88,10 @@ public class AmazonS3Fixture extends AbstractHttpFixture { final Bucket ec2Bucket = new Bucket("s3Fixture.ec2", randomAsciiAlphanumOfLength(random, 10), randomAsciiAlphanumOfLength(random, 10)); - this.handlers = defaultHandlers(buckets, ec2Bucket); + final Bucket ecsBucket = new Bucket("s3Fixture.ecs", + randomAsciiAlphanumOfLength(random, 10), randomAsciiAlphanumOfLength(random, 10)); + + this.handlers = defaultHandlers(buckets, ec2Bucket, ecsBucket); } private static String nonAuthPath(Request request) { @@ -174,7 +177,7 @@ public class AmazonS3Fixture extends AbstractHttpFixture { } /** Builds the default request handlers **/ - private PathTrie defaultHandlers(final Map buckets, final Bucket ec2Bucket) { + private PathTrie defaultHandlers(final Map buckets, final Bucket ec2Bucket, final Bucket ecsBucket) { final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER); // HEAD Object @@ -400,11 +403,18 @@ public class AmazonS3Fixture extends AbstractHttpFixture { handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/latest/meta-data/iam/security-credentials/{profileName}"), (request) -> { final String profileName = request.getParam("profileName"); if (EC2_PROFILE.equals(profileName) == false) { - return new Response(RestStatus.NOT_FOUND.getStatus(), new HashMap<>(), "unknown credentials".getBytes(UTF_8)); + return new Response(RestStatus.NOT_FOUND.getStatus(), new HashMap<>(), "unknown profile".getBytes(UTF_8)); } return credentialResponseFunction.apply(profileName, ec2Bucket.key, ec2Bucket.token); }); + // GET + // + // https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html + handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/ecs_credentials_endpoint"), + (request) -> credentialResponseFunction.apply("CPV_ECS", ecsBucket.key, ecsBucket.token)); + + return handlers; } diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml new file mode 100644 index 00000000000..54929e6e3ad --- /dev/null +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml @@ -0,0 +1,243 @@ +# Integration tests for repository-s3 + +--- +setup: + + # Register repository with ecs credentials + - do: + snapshot.create_repository: + repository: repository_ecs + body: + type: s3 + settings: + bucket: ${ecs_bucket} + client: integration_test_ecs + base_path: ${ecs_base_path} + canned_acl: private + storage_class: standard + +--- +"Snapshot and Restore with repository-s3 using ecs credentials": + + # Get repository + - do: + snapshot.get_repository: + repository: repository_ecs + + - match: { repository_ecs.settings.bucket : ${ecs_bucket} } + - match: { repository_ecs.settings.client : "integration_test_ecs" } + - match: { repository_ecs.settings.base_path : ${ecs_base_path} } + - match: { repository_ecs.settings.canned_acl : "private" } + - match: { repository_ecs.settings.storage_class : "standard" } + - is_false: repository_ecs.settings.access_key + - is_false: repository_ecs.settings.secret_key + - is_false: repository_ecs.settings.session_token + + # Index documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 1 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 2 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 3 + - snapshot: one + + - do: + count: + index: docs + + - match: {count: 3} + + # Create a first snapshot + - do: + snapshot.create: + repository: repository_ecs + snapshot: snapshot-one + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-one } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.include_global_state: true } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.status: + repository: repository_ecs + snapshot: snapshot-one + + - is_true: snapshots + - match: { snapshots.0.snapshot: snapshot-one } + - match: { snapshots.0.state : SUCCESS } + + # Index more documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 4 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 5 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 6 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 7 + - snapshot: two + + - do: + count: + index: docs + + - match: {count: 7} + + # Create a second snapshot + - do: + snapshot.create: + repository: repository_ecs + snapshot: snapshot-two + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-two } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.get: + repository: repository_ecs + snapshot: snapshot-one,snapshot-two + + - is_true: snapshots + - match: { snapshots.0.state : SUCCESS } + - match: { snapshots.1.state : SUCCESS } + + # Delete the index + - do: + indices.delete: + index: docs + + # Restore the second snapshot + - do: + snapshot.restore: + repository: repository_ecs + snapshot: snapshot-two + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 7} + + # Delete the index again + - do: + indices.delete: + index: docs + + # Restore the first snapshot + - do: + snapshot.restore: + repository: repository_ecs + snapshot: snapshot-one + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 3} + + # Remove the snapshots + - do: + snapshot.delete: + repository: repository_ecs + snapshot: snapshot-two + + - do: + snapshot.delete: + repository: repository_ecs + snapshot: snapshot-one + +--- +"Register a repository with a non existing bucket": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_ecs + body: + type: s3 + settings: + bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE + client: integration_test_temporary + +--- +"Register a repository with a non existing client": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_ecs + body: + type: s3 + settings: + bucket: repository_ecs + client: unknown + +--- +"Get a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.get: + repository: repository_ecs + snapshot: missing + +--- +"Delete a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.delete: + repository: repository_ecs + snapshot: missing + +--- +"Restore a non existing snapshot": + + - do: + catch: /snapshot_restore_exception/ + snapshot.restore: + repository: repository_ecs + snapshot: missing + wait_for_completion: true + +--- +teardown: + + # Remove our repository + - do: + snapshot.delete_repository: + repository: repository_ecs From 9ae6905657b310ede5d968d71f4f3265eb26db5b Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Thu, 19 Jul 2018 09:17:49 -0700 Subject: [PATCH 103/260] add support for write index resolution when creating/updating documents (#31520) Now write operations like Index, Delete, Update rely on the write-index associated with an alias to operate against. This means writes will be accepted even when an alias points to multiple indices, so long as one is the write index. Routing values will be used from the AliasMetaData for the alias in the write-index. All read operations are left untouched. --- .../action/bulk/TransportBulkAction.java | 4 +- .../action/index/IndexRequest.java | 2 +- .../action/update/TransportUpdateAction.java | 2 +- .../metadata/IndexNameExpressionResolver.java | 101 ++++++++---- .../cluster/metadata/MetaData.java | 36 ++++ .../action/bulk/BulkIntegrationIT.java | 41 +++++ .../elasticsearch/aliases/IndexAliasesIT.java | 51 ++++++ .../IndexNameExpressionResolverTests.java | 154 ++++++++++++++++++ .../cluster/metadata/MetaDataTests.java | 81 +++++++++ .../org/elasticsearch/get/GetActionIT.java | 26 ++- .../org/elasticsearch/update/UpdateIT.java | 3 +- .../test/security/authz/12_index_alias.yml | 71 ++++++++ 12 files changed, 534 insertions(+), 38 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index a6ed8de6530..939b0b70249 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -295,7 +295,7 @@ public class TransportBulkAction extends HandledTransportAction request) { Index concreteIndex = indices.get(request.index()); if (concreteIndex == null) { - concreteIndex = indexNameExpressionResolver.concreteSingleIndex(state, request); + concreteIndex = indexNameExpressionResolver.concreteWriteIndex(state, request); indices.put(request.index(), concreteIndex); } return concreteIndex; diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 51997b32edf..57e8ea66138 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -496,7 +496,7 @@ public class IndexRequest extends ReplicatedWriteRequest implement /* resolve the routing if needed */ public void resolveRouting(MetaData metaData) { - routing(metaData.resolveIndexRouting(routing, index)); + routing(metaData.resolveWriteIndexRouting(routing, index)); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 299a2ce8123..cc682619cbd 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -104,7 +104,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio } public static void resolveAndValidateRouting(MetaData metaData, String concreteIndex, UpdateRequest request) { - request.routing((metaData.resolveIndexRouting(request.routing(), request.index()))); + request.routing((metaData.resolveWriteIndexRouting(request.routing(), request.index()))); // Fail fast on the node that received the request, rather than failing when translating on the index or delete request. if (request.routing() == null && metaData.routingRequired(concreteIndex, request.type())) { throw new RoutingMissingException(concreteIndex, request.type(), request.id()); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 8fa3c2e0fc1..1f6a9fe027d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -42,7 +42,6 @@ import org.joda.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -103,7 +102,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { return concreteIndexNames(context, indexExpressions); } - /** + /** * Translates the provided index expression into actual concrete indices, properly deduplicated. * * @param state the cluster state containing all the data to resolve to expressions to concrete indices @@ -117,7 +116,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { * indices options in the context don't allow such a case. */ public Index[] concreteIndices(ClusterState state, IndicesOptions options, String... indexExpressions) { - Context context = new Context(state, options); + Context context = new Context(state, options, false, false); return concreteIndices(context, indexExpressions); } @@ -193,30 +192,40 @@ public class IndexNameExpressionResolver extends AbstractComponent { } } - Collection resolvedIndices = aliasOrIndex.getIndices(); - if (resolvedIndices.size() > 1 && !options.allowAliasesToMultipleIndices()) { - String[] indexNames = new String[resolvedIndices.size()]; - int i = 0; - for (IndexMetaData indexMetaData : resolvedIndices) { - indexNames[i++] = indexMetaData.getIndex().getName(); + if (aliasOrIndex.isAlias() && context.isResolveToWriteIndex()) { + AliasOrIndex.Alias alias = (AliasOrIndex.Alias) aliasOrIndex; + IndexMetaData writeIndex = alias.getWriteIndex(); + if (writeIndex == null) { + throw new IllegalArgumentException("no write index is defined for alias [" + alias.getAliasName() + "]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index"); } - throw new IllegalArgumentException("Alias [" + expression + "] has more than one indices associated with it [" + - Arrays.toString(indexNames) + "], can't execute a single index op"); - } - - for (IndexMetaData index : resolvedIndices) { - if (index.getState() == IndexMetaData.State.CLOSE) { - if (failClosed) { - throw new IndexClosedException(index.getIndex()); - } else { - if (options.forbidClosedIndices() == false) { - concreteIndices.add(index.getIndex()); - } + concreteIndices.add(writeIndex.getIndex()); + } else { + if (aliasOrIndex.getIndices().size() > 1 && !options.allowAliasesToMultipleIndices()) { + String[] indexNames = new String[aliasOrIndex.getIndices().size()]; + int i = 0; + for (IndexMetaData indexMetaData : aliasOrIndex.getIndices()) { + indexNames[i++] = indexMetaData.getIndex().getName(); + } + throw new IllegalArgumentException("Alias [" + expression + "] has more than one indices associated with it [" + + Arrays.toString(indexNames) + "], can't execute a single index op"); + } + + for (IndexMetaData index : aliasOrIndex.getIndices()) { + if (index.getState() == IndexMetaData.State.CLOSE) { + if (failClosed) { + throw new IndexClosedException(index.getIndex()); + } else { + if (options.forbidClosedIndices() == false) { + concreteIndices.add(index.getIndex()); + } + } + } else if (index.getState() == IndexMetaData.State.OPEN) { + concreteIndices.add(index.getIndex()); + } else { + throw new IllegalStateException("index state [" + index.getState() + "] not supported"); } - } else if (index.getState() == IndexMetaData.State.OPEN) { - concreteIndices.add(index.getIndex()); - } else { - throw new IllegalStateException("index state [" + index.getState() + "] not supported"); } } } @@ -255,6 +264,28 @@ public class IndexNameExpressionResolver extends AbstractComponent { return indices[0]; } + /** + * Utility method that allows to resolve an index expression to its corresponding single write index. + * + * @param state the cluster state containing all the data to resolve to expression to a concrete index + * @param request The request that defines how the an alias or an index need to be resolved to a concrete index + * and the expression that can be resolved to an alias or an index name. + * @throws IllegalArgumentException if the index resolution does not lead to an index, or leads to more than one index + * @return the write index obtained as a result of the index resolution + */ + public Index concreteWriteIndex(ClusterState state, IndicesRequest request) { + if (request.indices() == null || (request.indices() != null && request.indices().length != 1)) { + throw new IllegalArgumentException("indices request must specify a single index expression"); + } + Context context = new Context(state, request.indicesOptions(), false, true); + Index[] indices = concreteIndices(context, request.indices()[0]); + if (indices.length != 1) { + throw new IllegalArgumentException("The index expression [" + request.indices()[0] + + "] and options provided did not point to a single write-index"); + } + return indices[0]; + } + /** * @return whether the specified alias or index exists. If the alias or index contains datemath then that is resolved too. */ @@ -292,7 +323,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { String... expressions) { // expand the aliases wildcard List resolvedExpressions = expressions != null ? Arrays.asList(expressions) : Collections.emptyList(); - Context context = new Context(state, IndicesOptions.lenientExpandOpen(), true); + Context context = new Context(state, IndicesOptions.lenientExpandOpen(), true, false); for (ExpressionResolver expressionResolver : expressionResolvers) { resolvedExpressions = expressionResolver.resolve(context, resolvedExpressions); } @@ -512,24 +543,26 @@ public class IndexNameExpressionResolver extends AbstractComponent { private final IndicesOptions options; private final long startTime; private final boolean preserveAliases; + private final boolean resolveToWriteIndex; Context(ClusterState state, IndicesOptions options) { this(state, options, System.currentTimeMillis()); } - Context(ClusterState state, IndicesOptions options, boolean preserveAliases) { - this(state, options, System.currentTimeMillis(), preserveAliases); + Context(ClusterState state, IndicesOptions options, boolean preserveAliases, boolean resolveToWriteIndex) { + this(state, options, System.currentTimeMillis(), preserveAliases, resolveToWriteIndex); } Context(ClusterState state, IndicesOptions options, long startTime) { - this(state, options, startTime, false); + this(state, options, startTime, false, false); } - Context(ClusterState state, IndicesOptions options, long startTime, boolean preserveAliases) { + Context(ClusterState state, IndicesOptions options, long startTime, boolean preserveAliases, boolean resolveToWriteIndex) { this.state = state; this.options = options; this.startTime = startTime; this.preserveAliases = preserveAliases; + this.resolveToWriteIndex = resolveToWriteIndex; } public ClusterState getState() { @@ -552,6 +585,14 @@ public class IndexNameExpressionResolver extends AbstractComponent { boolean isPreserveAliases() { return preserveAliases; } + + /** + * This is used to require that aliases resolve to their write-index. It is currently not used in conjunction + * with preserveAliases. + */ + boolean isResolveToWriteIndex() { + return resolveToWriteIndex; + } } private interface ExpressionResolver { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 4ed2adc9a1c..c0243888683 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -471,6 +471,42 @@ public class MetaData implements Iterable, Diffable, To return allClosedIndices; } + /** + * Returns indexing routing for the given aliasOrIndex. Resolves routing from the alias metadata used + * in the write index. + */ + public String resolveWriteIndexRouting(@Nullable String routing, String aliasOrIndex) { + if (aliasOrIndex == null) { + return routing; + } + + AliasOrIndex result = getAliasAndIndexLookup().get(aliasOrIndex); + if (result == null || result.isAlias() == false) { + return routing; + } + AliasOrIndex.Alias alias = (AliasOrIndex.Alias) result; + IndexMetaData writeIndex = alias.getWriteIndex(); + if (writeIndex == null) { + throw new IllegalArgumentException("alias [" + aliasOrIndex + "] does not have a write index"); + } + AliasMetaData aliasMd = writeIndex.getAliases().get(alias.getAliasName()); + if (aliasMd.indexRouting() != null) { + if (aliasMd.indexRouting().indexOf(',') != -1) { + throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + + aliasMd.getIndexRouting() + "] that resolved to several routing values, rejecting operation"); + } + if (routing != null) { + if (!routing.equals(aliasMd.indexRouting())) { + throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has index routing associated with it [" + + aliasMd.indexRouting() + "], and was provided with routing value [" + routing + "], rejecting operation"); + } + } + // Alias routing overrides the parent routing (if any). + return aliasMd.indexRouting(); + } + return routing; + } + /** * Returns indexing routing for the given index. */ diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java index 8fcc76e018a..1fd912e72a4 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java @@ -20,13 +20,20 @@ package org.elasticsearch.action.bulk; +import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.Map; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.hamcrest.Matchers.equalTo; public class BulkIntegrationIT extends ESIntegTestCase { public void testBulkIndexCreatesMapping() throws Exception { @@ -40,4 +47,38 @@ public class BulkIntegrationIT extends ESIntegTestCase { assertTrue(mappingsResponse.getMappings().get("logstash-2014.03.30").containsKey("logs")); }); } + + /** + * This tests that the {@link TransportBulkAction} evaluates alias routing values correctly when dealing with + * an alias pointing to multiple indices, while a write index exits. + */ + public void testBulkWithWriteIndexAndRouting() { + Map twoShardsSettings = Collections.singletonMap(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2); + client().admin().indices().prepareCreate("index1") + .addAlias(new Alias("alias1").indexRouting("0")).setSettings(twoShardsSettings).get(); + client().admin().indices().prepareCreate("index2") + .addAlias(new Alias("alias1").indexRouting("0").writeIndex(randomFrom(false, null))) + .setSettings(twoShardsSettings).get(); + client().admin().indices().prepareCreate("index3") + .addAlias(new Alias("alias1").indexRouting("1").writeIndex(true)).setSettings(twoShardsSettings).get(); + + IndexRequest indexRequestWithAlias = new IndexRequest("alias1", "type", "id"); + if (randomBoolean()) { + indexRequestWithAlias.routing("1"); + } + indexRequestWithAlias.source(Collections.singletonMap("foo", "baz")); + BulkResponse bulkResponse = client().prepareBulk().add(indexRequestWithAlias).get(); + assertThat(bulkResponse.getItems()[0].getResponse().getIndex(), equalTo("index3")); + assertThat(bulkResponse.getItems()[0].getResponse().getShardId().getId(), equalTo(0)); + assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(1L)); + assertThat(bulkResponse.getItems()[0].getResponse().status(), equalTo(RestStatus.CREATED)); + assertThat(client().prepareGet("index3", "type", "id").setRouting("1").get().getSource().get("foo"), equalTo("baz")); + + bulkResponse = client().prepareBulk().add(client().prepareUpdate("alias1", "type", "id").setDoc("foo", "updated")).get(); + assertFalse(bulkResponse.hasFailures()); + assertThat(client().prepareGet("index3", "type", "id").setRouting("1").get().getSource().get("foo"), equalTo("updated")); + bulkResponse = client().prepareBulk().add(client().prepareDelete("alias1", "type", "id")).get(); + assertFalse(bulkResponse.hasFailures()); + assertFalse(client().prepareGet("index3", "type", "id").setRouting("1").get().isExists()); + } } diff --git a/server/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java b/server/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java index d72b4c5f1ec..e8c152abdc2 100644 --- a/server/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/server/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasA import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -57,6 +58,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import static org.elasticsearch.client.Requests.createIndexRequest; +import static org.elasticsearch.client.Requests.deleteRequest; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_METADATA_BLOCK; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_READ_ONLY_BLOCK; @@ -85,6 +87,17 @@ public class IndexAliasesIT extends ESIntegTestCase { ensureGreen(); + logger.info("--> aliasing index [test] with [alias1]"); + assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1", false)); + + logger.info("--> indexing against [alias1], should fail now"); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> client().index(indexRequest("alias1").type("type1").id("1").source(source("2", "test"), + XContentType.JSON)).actionGet()); + assertThat(exception.getMessage(), equalTo("no write index is defined for alias [alias1]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index")); + logger.info("--> aliasing index [test] with [alias1]"); assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1")); @@ -98,6 +111,44 @@ public class IndexAliasesIT extends ESIntegTestCase { ensureGreen(); + logger.info("--> add index [test_x] with [alias1]"); + assertAcked(admin().indices().prepareAliases().addAlias("test_x", "alias1")); + + logger.info("--> indexing against [alias1], should fail now"); + exception = expectThrows(IllegalArgumentException.class, + () -> client().index(indexRequest("alias1").type("type1").id("1").source(source("2", "test"), + XContentType.JSON)).actionGet()); + assertThat(exception.getMessage(), equalTo("no write index is defined for alias [alias1]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index")); + + logger.info("--> deleting against [alias1], should fail now"); + exception = expectThrows(IllegalArgumentException.class, + () -> client().delete(deleteRequest("alias1").type("type1").id("1")).actionGet()); + assertThat(exception.getMessage(), equalTo("no write index is defined for alias [alias1]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index")); + + logger.info("--> remove aliasing index [test_x] with [alias1]"); + assertAcked(admin().indices().prepareAliases().removeAlias("test_x", "alias1")); + + logger.info("--> indexing against [alias1], should work now"); + indexResponse = client().index(indexRequest("alias1").type("type1").id("1") + .source(source("1", "test"), XContentType.JSON)).actionGet(); + assertThat(indexResponse.getIndex(), equalTo("test")); + + logger.info("--> add index [test_x] with [alias1] as write-index"); + assertAcked(admin().indices().prepareAliases().addAlias("test_x", "alias1", true)); + + logger.info("--> indexing against [alias1], should work now"); + indexResponse = client().index(indexRequest("alias1").type("type1").id("1") + .source(source("1", "test"), XContentType.JSON)).actionGet(); + assertThat(indexResponse.getIndex(), equalTo("test_x")); + + logger.info("--> deleting against [alias1], should fail now"); + DeleteResponse deleteResponse = client().delete(deleteRequest("alias1").type("type1").id("1")).actionGet(); + assertThat(deleteResponse.getIndex(), equalTo("test_x")); + logger.info("--> remove [alias1], Aliasing index [test_x] with [alias1]"); assertAcked(admin().indices().prepareAliases().removeAlias("test", "alias1").addAlias("test_x", "alias1")); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 0530bd617af..9ad9603b148 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -20,14 +20,20 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.Version; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData.State; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.InvalidIndexNameException; @@ -37,6 +43,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.function.Function; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.arrayContaining; @@ -44,6 +51,7 @@ import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -996,6 +1004,152 @@ public class IndexNameExpressionResolverTests extends ESTestCase { assertArrayEquals(new String[] {"test-alias-0", "test-alias-1", "test-alias-non-filtering"}, strings); } + public void testConcreteWriteIndexSuccessful() { + boolean testZeroWriteIndex = randomBoolean(); + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(testZeroWriteIndex ? true : null))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + String[] strings = indexNameExpressionResolver + .indexAliases(state, "test-0", x -> true, true, "test-*"); + Arrays.sort(strings); + assertArrayEquals(new String[] {"test-alias"}, strings); + IndicesRequest request = new IndicesRequest() { + + @Override + public String[] indices() { + return new String[] { "test-alias" }; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + }; + Index writeIndex = indexNameExpressionResolver.concreteWriteIndex(state, request); + assertThat(writeIndex.getName(), equalTo("test-0")); + + state = ClusterState.builder(state).metaData(MetaData.builder(state.metaData()) + .put(indexBuilder("test-1").putAlias(AliasMetaData.builder("test-alias") + .writeIndex(testZeroWriteIndex ? randomFrom(false, null) : true)))).build(); + writeIndex = indexNameExpressionResolver.concreteWriteIndex(state, request); + assertThat(writeIndex.getName(), equalTo(testZeroWriteIndex ? "test-0" : "test-1")); + } + + public void testConcreteWriteIndexWithInvalidIndicesRequest() { + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias"))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + Function requestGen = (indices) -> new IndicesRequest() { + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + }; + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteWriteIndex(state, requestGen.apply(null))); + assertThat(exception.getMessage(), equalTo("indices request must specify a single index expression")); + exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteWriteIndex(state, requestGen.apply(new String[] {"too", "many"}))); + assertThat(exception.getMessage(), equalTo("indices request must specify a single index expression")); + + + } + + public void testConcreteWriteIndexWithWildcardExpansion() { + boolean testZeroWriteIndex = randomBoolean(); + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-1").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(testZeroWriteIndex ? true : null))) + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(testZeroWriteIndex ? randomFrom(false, null) : true))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + String[] strings = indexNameExpressionResolver + .indexAliases(state, "test-0", x -> true, true, "test-*"); + Arrays.sort(strings); + assertArrayEquals(new String[] {"test-alias"}, strings); + IndicesRequest request = new IndicesRequest() { + + @Override + public String[] indices() { + return new String[] { "test-*"}; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictExpandOpenAndForbidClosed(); + } + }; + + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteWriteIndex(state, request)); + assertThat(exception.getMessage(), + equalTo("The index expression [test-*] and options provided did not point to a single write-index")); + } + + public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(false))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + String[] strings = indexNameExpressionResolver + .indexAliases(state, "test-0", x -> true, true, "test-*"); + Arrays.sort(strings); + assertArrayEquals(new String[] {"test-alias"}, strings); + DocWriteRequest request = randomFrom(new IndexRequest("test-alias"), + new UpdateRequest("test-alias", "_type", "_id"), new DeleteRequest("test-alias")); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteWriteIndex(state, request)); + assertThat(exception.getMessage(), equalTo("no write index is defined for alias [test-alias]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index")); + } + + public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(randomFrom(false, null)))) + .put(indexBuilder("test-1").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(randomFrom(false, null)))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + String[] strings = indexNameExpressionResolver + .indexAliases(state, "test-0", x -> true, true, "test-*"); + Arrays.sort(strings); + assertArrayEquals(new String[] {"test-alias"}, strings); + DocWriteRequest request = randomFrom(new IndexRequest("test-alias"), + new UpdateRequest("test-alias", "_type", "_id"), new DeleteRequest("test-alias")); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteWriteIndex(state, request)); + assertThat(exception.getMessage(), equalTo("no write index is defined for alias [test-alias]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index")); + } + + public void testAliasResolutionNotAllowingMultipleIndices() { + boolean test0WriteIndex = randomBoolean(); + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("test-0").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(randomFrom(test0WriteIndex, null)))) + .put(indexBuilder("test-1").state(State.OPEN) + .putAlias(AliasMetaData.builder("test-alias").writeIndex(randomFrom(!test0WriteIndex, null)))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + String[] strings = indexNameExpressionResolver + .indexAliases(state, "test-0", x -> true, true, "test-*"); + Arrays.sort(strings); + assertArrayEquals(new String[] {"test-alias"}, strings); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> indexNameExpressionResolver.concreteIndexNames(state, IndicesOptions.strictSingleIndexNoExpandForbidClosed(), + "test-alias")); + assertThat(exception.getMessage(), endsWith(", can't execute a single index op")); + } + public void testDeleteIndexIgnoresAliases() { MetaData.Builder mdBuilder = MetaData.builder() .put(indexBuilder("test-index").state(State.OPEN) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 32dd4324ff8..38e3fcc6ea7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -172,6 +172,87 @@ public class MetaDataTests extends ESTestCase { } catch (IllegalArgumentException ex) { assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); } + + IndexMetaData.Builder builder2 = IndexMetaData.builder("index2") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias0").build()); + MetaData metaDataTwoIndices = MetaData.builder(metaData).put(builder2).build(); + + // alias with multiple indices + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> metaDataTwoIndices.resolveIndexRouting("1", "alias0")); + assertThat(exception.getMessage(), startsWith("Alias [alias0] has more than one index associated with it")); + } + + public void testResolveWriteIndexRouting() { + AliasMetaData.Builder aliasZeroBuilder = AliasMetaData.builder("alias0"); + if (randomBoolean()) { + aliasZeroBuilder.writeIndex(true); + } + IndexMetaData.Builder builder = IndexMetaData.builder("index") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(aliasZeroBuilder.build()) + .putAlias(AliasMetaData.builder("alias1").routing("1").build()) + .putAlias(AliasMetaData.builder("alias2").routing("1,2").build()) + .putAlias(AliasMetaData.builder("alias3").writeIndex(false).build()) + .putAlias(AliasMetaData.builder("alias4").routing("1,2").writeIndex(true).build()); + MetaData metaData = MetaData.builder().put(builder).build(); + + // no alias, no index + assertEquals(metaData.resolveWriteIndexRouting(null, null), null); + assertEquals(metaData.resolveWriteIndexRouting("0", null), "0"); + + // index, no alias + assertEquals(metaData.resolveWriteIndexRouting(null, "index"), null); + assertEquals(metaData.resolveWriteIndexRouting("0", "index"), "0"); + + // alias with no index routing + assertEquals(metaData.resolveWriteIndexRouting(null, "alias0"), null); + assertEquals(metaData.resolveWriteIndexRouting("0", "alias0"), "0"); + + // alias with index routing. + assertEquals(metaData.resolveWriteIndexRouting(null, "alias1"), "1"); + Exception exception = expectThrows(IllegalArgumentException.class, () -> metaData.resolveWriteIndexRouting("0", "alias1")); + assertThat(exception.getMessage(), + is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation")); + + // alias with invalid index routing. + exception = expectThrows(IllegalArgumentException.class, () -> metaData.resolveWriteIndexRouting(null, "alias2")); + assertThat(exception.getMessage(), + is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + exception = expectThrows(IllegalArgumentException.class, () -> metaData.resolveWriteIndexRouting("1", "alias2")); + assertThat(exception.getMessage(), + is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + exception = expectThrows(IllegalArgumentException.class, () -> metaData.resolveWriteIndexRouting(randomFrom("1", null), "alias4")); + assertThat(exception.getMessage(), + is("index/alias [alias4] provided with routing value [1,2] that resolved to several routing values, rejecting operation")); + + // alias with no write index + exception = expectThrows(IllegalArgumentException.class, () -> metaData.resolveWriteIndexRouting("1", "alias3")); + assertThat(exception.getMessage(), + is("alias [alias3] does not have a write index")); + + + // aliases with multiple indices + AliasMetaData.Builder aliasZeroBuilderTwo = AliasMetaData.builder("alias0"); + if (randomBoolean()) { + aliasZeroBuilder.writeIndex(false); + } + IndexMetaData.Builder builder2 = IndexMetaData.builder("index2") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(aliasZeroBuilderTwo.build()) + .putAlias(AliasMetaData.builder("alias1").routing("0").writeIndex(true).build()) + .putAlias(AliasMetaData.builder("alias2").writeIndex(true).build()); + MetaData metaDataTwoIndices = MetaData.builder(metaData).put(builder2).build(); + + // verify that new write index is used + assertThat("0", equalTo(metaDataTwoIndices.resolveWriteIndexRouting("0", "alias1"))); } public void testUnknownFieldClusterMetaData() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/get/GetActionIT.java b/server/src/test/java/org/elasticsearch/get/GetActionIT.java index 30f86241cbd..5ed6b957c78 100644 --- a/server/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/server/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -39,6 +40,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -51,6 +53,7 @@ import java.util.Set; import static java.util.Collections.singleton; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; @@ -70,7 +73,7 @@ public class GetActionIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type1", "field1", "type=keyword,store=true", "field2", "type=keyword,store=true") .setSettings(Settings.builder().put("index.refresh_interval", -1)) - .addAlias(new Alias("alias"))); + .addAlias(new Alias("alias").writeIndex(randomFrom(true, false, null)))); ensureGreen(); GetResponse response = client().prepareGet(indexOrAlias(), "type1", "1").get(); @@ -192,12 +195,31 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.isExists(), equalTo(false)); } + public void testGetWithAliasPointingToMultipleIndices() { + client().admin().indices().prepareCreate("index1") + .addAlias(new Alias("alias1").indexRouting("0")).get(); + if (randomBoolean()) { + client().admin().indices().prepareCreate("index2") + .addAlias(new Alias("alias1").indexRouting("0").writeIndex(randomFrom(false, null))).get(); + } else { + client().admin().indices().prepareCreate("index3") + .addAlias(new Alias("alias1").indexRouting("1").writeIndex(true)).get(); + } + IndexResponse indexResponse = client().prepareIndex("index1", "type", "id") + .setSource(Collections.singletonMap("foo", "bar")).get(); + assertThat(indexResponse.status().getStatus(), equalTo(RestStatus.CREATED.getStatus())); + + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + client().prepareGet("alias1", "type", "_alias_id").get()); + assertThat(exception.getMessage(), endsWith("can't execute a single index op")); + } + private static String indexOrAlias() { return randomBoolean() ? "test" : "alias"; } public void testSimpleMultiGet() throws Exception { - assertAcked(prepareCreate("test").addAlias(new Alias("alias")) + assertAcked(prepareCreate("test").addAlias(new Alias("alias").writeIndex(randomFrom(true, false, null))) .addMapping("type1", "field", "type=keyword,store=true") .setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen(); diff --git a/server/src/test/java/org/elasticsearch/update/UpdateIT.java b/server/src/test/java/org/elasticsearch/update/UpdateIT.java index c86dfcb98f7..e4ea078b8f7 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -140,8 +140,7 @@ public class UpdateIT extends ESIntegTestCase { private void createTestIndex() throws Exception { logger.info("--> creating index test"); - - assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); + assertAcked(prepareCreate("test").addAlias(new Alias("alias").writeIndex(randomFrom(true, null)))); } public void testUpsert() throws Exception { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/12_index_alias.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/12_index_alias.yml index 44d91d691e1..1e947c5639d 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/12_index_alias.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/security/authz/12_index_alias.yml @@ -310,3 +310,74 @@ teardown: index: write_index_2 body: { "query": { "terms": { "_id": [ "19" ] } } } - match: { hits.total: 1 } + +--- +"Test bulk indexing into an alias when resolved to write index": + - do: + indices.update_aliases: + body: + actions: + - add: + index: write_index_2 + alias: can_write_2 + is_write_index: true + - add: + index: write_index_2 + alias: can_read_2 + is_write_index: true + - add: + index: write_index_1 + alias: can_write_3 + is_write_index: true + - add: + index: write_index_2 + alias: can_write_3 + is_write_index: false + + - do: + headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user + bulk: + refresh: true + body: + - '{"index": {"_index": "can_read_1", "_type": "doc", "_id": "20"}}' + - '{"name": "doc20"}' + - '{"index": {"_index": "can_write_1", "_type": "doc", "_id": "21"}}' + - '{"name": "doc21"}' + - '{"index": {"_index": "can_read_2", "_type": "doc", "_id": "22"}}' + - '{"name": "doc22"}' + - '{"index": {"_index": "can_write_2", "_type": "doc", "_id": "23"}}' + - '{"name": "doc23"}' + - '{"index": {"_index": "can_write_3", "_type": "doc", "_id": "24"}}' + - '{"name": "doc24"}' + - '{"update": {"_index": "can_write_3", "_type": "doc", "_id": "24"}}' + - '{"doc": { "name": "doc_24"}}' + - '{"delete": {"_index": "can_write_3", "_type": "doc", "_id": "24"}}' + - match: { errors: true } + - match: { items.0.index.status: 403 } + - match: { items.0.index.error.type: "security_exception" } + - match: { items.1.index.status: 201 } + - match: { items.2.index.status: 403 } + - match: { items.2.index.error.type: "security_exception" } + - match: { items.3.index.status: 403 } + - match: { items.3.index.error.type: "security_exception" } + - match: { items.4.index.status: 201 } + - match: { items.5.update.status: 200 } + - match: { items.6.delete.status: 200 } + + - do: # superuser + search: + index: write_index_1 + body: { "query": { "terms": { "_id": [ "21" ] } } } + - match: { hits.total: 1 } + + - do: + indices.delete_alias: + index: "write_index_2" + name: [ "can_write_2", "can_read_2" ] + ignore: 404 + + - do: + indices.delete_alias: + index: "write_index_1" + name: [ "can_write_3" ] + ignore: 404 From 4c68dfe0016673bbdb1885384d1934858180d521 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 19 Jul 2018 17:41:06 -0400 Subject: [PATCH 104/260] Handle missing values in painless (#32207) Throw an exception for doc['field'].value if this document is missing a value for the field. After deprecation changes have been backported to 6.x, make this a default behaviour in 7.0 Closes #29286 --- .../elasticsearch/gradle/BuildPlugin.groovy | 1 - .../painless-getting-started.asciidoc | 17 +- server/build.gradle | 10 - .../index/fielddata/ScriptDocValues.java | 44 +--- .../elasticsearch/script/ScriptModule.java | 13 +- ...criptDocValuesMissingV6BehaviourTests.java | 195 ------------------ 6 files changed, 14 insertions(+), 266 deletions(-) delete mode 100644 server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesMissingV6BehaviourTests.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 219d00ba640..c5dd19de3cc 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -750,7 +750,6 @@ class BuildPlugin implements Plugin { systemProperty 'tests.task', path systemProperty 'tests.security.manager', 'true' systemProperty 'jna.nosys', 'true' - systemProperty 'es.scripting.exception_for_missing_value', 'true' // TODO: remove setting logging level via system property systemProperty 'tests.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { diff --git a/docs/painless/painless-getting-started.asciidoc b/docs/painless/painless-getting-started.asciidoc index 887769e49ab..1dec4a33bb5 100644 --- a/docs/painless/painless-getting-started.asciidoc +++ b/docs/painless/painless-getting-started.asciidoc @@ -123,21 +123,8 @@ GET hockey/_search [float] ===== Missing values -If you request the value from a field `field` that isn’t in -the document, `doc['field'].value` for this document returns: - -- `0` if a `field` has a numeric datatype (long, double etc.) -- `false` is a `field` has a boolean datatype -- epoch date if a `field` has a date datatype -- `null` if a `field` has a string datatype -- `null` if a `field` has a geo datatype -- `""` if a `field` has a binary datatype - -IMPORTANT: Starting in 7.0, `doc['field'].value` throws an exception if -the field is missing in a document. To enable this behavior now, -set a {ref}/jvm-options.html[`jvm.option`] -`-Des.scripting.exception_for_missing_value=true` on a node. If you do not enable -this behavior, a deprecation warning is logged on start up. +`doc['field'].value` throws an exception if +the field is missing in a document. To check if a document is missing a value, you can call `doc['field'].size() == 0`. diff --git a/server/build.gradle b/server/build.gradle index 7db073f43a5..c71cc4c7dbd 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -156,16 +156,6 @@ if (isEclipse) { compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" -// TODO: remove ScriptDocValuesMissingV6BehaviourTests in 7.0 -additionalTest('testScriptDocValuesMissingV6Behaviour'){ - include '**/ScriptDocValuesMissingV6BehaviourTests.class' - systemProperty 'es.scripting.exception_for_missing_value', 'false' -} -test { - // these are tested explicitly in separate test tasks - exclude '**/*ScriptDocValuesMissingV6BehaviourTests.class' -} - forbiddenPatterns { exclude '**/*.json' exclude '**/*.jmx' diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index 6d888bd63e3..fedad6e134b 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLoggerFactory; -import org.elasticsearch.script.ScriptModule; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; @@ -126,11 +125,8 @@ public abstract class ScriptDocValues extends AbstractList { public long getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return 0L; + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); } return values[0]; } @@ -172,11 +168,8 @@ public abstract class ScriptDocValues extends AbstractList { */ public ReadableDateTime getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return EPOCH; + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); } return get(0); } @@ -277,11 +270,8 @@ public abstract class ScriptDocValues extends AbstractList { public double getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return 0d; + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); } return values[0]; } @@ -337,11 +327,8 @@ public abstract class ScriptDocValues extends AbstractList { public GeoPoint getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + + throw new IllegalStateException("A document doesn't have a value for a field! " + "Use doc[].size()==0 to check if a document is missing a field!"); - } - return null; } return values[0]; } @@ -454,11 +441,8 @@ public abstract class ScriptDocValues extends AbstractList { public boolean getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + - "Use doc[].size()==0 to check if a document is missing a field!"); - } - return false; + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); } return values[0]; } @@ -544,11 +528,8 @@ public abstract class ScriptDocValues extends AbstractList { public String getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + + throw new IllegalStateException("A document doesn't have a value for a field! " + "Use doc[].size()==0 to check if a document is missing a field!"); - } - return null; } return get(0); } @@ -572,11 +553,8 @@ public abstract class ScriptDocValues extends AbstractList { public BytesRef getValue() { if (count == 0) { - if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { - throw new IllegalStateException("A document doesn't have a value for a field! " + + throw new IllegalStateException("A document doesn't have a value for a field! " + "Use doc[].size()==0 to check if a document is missing a field!"); - } - return new BytesRef(); } return get(0); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index bf4bd9c57ce..a3da1dafe48 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -31,9 +31,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; -import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; + /** * Manages building {@link ScriptService}. @@ -64,11 +62,6 @@ public class ScriptModule { ).collect(Collectors.toMap(c -> c.name, Function.identity())); } - public static final boolean EXCEPTION_FOR_MISSING_VALUE = - Booleans.parseBoolean(System.getProperty("es.scripting.exception_for_missing_value", "false")); - - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ScriptModule.class)); - private final ScriptService scriptService; public ScriptModule(Settings settings, List scriptPlugins) { @@ -92,10 +85,6 @@ public class ScriptModule { } } } - if (EXCEPTION_FOR_MISSING_VALUE == false) - DEPRECATION_LOGGER.deprecated("Script: returning default values for missing document values is deprecated. " + - "Set system property '-Des.scripting.exception_for_missing_value=true' " + - "to make behaviour compatible with future major versions."); scriptService = new ScriptService(settings, Collections.unmodifiableMap(engines), Collections.unmodifiableMap(contexts)); } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesMissingV6BehaviourTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesMissingV6BehaviourTests.java deleted file mode 100644 index 1dc836874d8..00000000000 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesMissingV6BehaviourTests.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata; - -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.fielddata.ScriptDocValues.Longs; -import org.elasticsearch.index.fielddata.ScriptDocValues.Dates; -import org.elasticsearch.index.fielddata.ScriptDocValues.Booleans; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.MockScriptEngine; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.script.ScriptModule; -import org.elasticsearch.test.ESTestCase; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.ReadableDateTime; -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; - -import static java.util.Collections.singletonList; - -public class ScriptDocValuesMissingV6BehaviourTests extends ESTestCase { - - public void testScriptMissingValuesWarning(){ - new ScriptModule(Settings.EMPTY, singletonList(new ScriptPlugin() { - @Override - public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap("1", script -> "1")); - } - })); - assertWarnings("Script: returning default values for missing document values is deprecated. " + - "Set system property '-Des.scripting.exception_for_missing_value=true' " + - "to make behaviour compatible with future major versions."); - } - - public void testZeroForMissingValueLong() throws IOException { - long[][] values = new long[between(3, 10)][]; - for (int d = 0; d < values.length; d++) { - values[d] = new long[0]; - } - Longs longs = wrap(values); - for (int round = 0; round < 10; round++) { - int d = between(0, values.length - 1); - longs.setNextDocId(d); - assertEquals(0, longs.getValue()); - } - } - - public void testEpochForMissingValueDate() throws IOException { - final ReadableDateTime EPOCH = new DateTime(0, DateTimeZone.UTC); - long[][] values = new long[between(3, 10)][]; - for (int d = 0; d < values.length; d++) { - values[d] = new long[0]; - } - Dates dates = wrapDates(values); - for (int round = 0; round < 10; round++) { - int d = between(0, values.length - 1); - dates.setNextDocId(d); - assertEquals(EPOCH, dates.getValue()); - } - } - - public void testFalseForMissingValueBoolean() throws IOException { - long[][] values = new long[between(3, 10)][]; - for (int d = 0; d < values.length; d++) { - values[d] = new long[0]; - } - Booleans bools = wrapBooleans(values); - for (int round = 0; round < 10; round++) { - int d = between(0, values.length - 1); - bools.setNextDocId(d); - assertEquals(false, bools.getValue()); - } - } - - public void testNullForMissingValueGeo() throws IOException{ - final MultiGeoPointValues values = wrap(new GeoPoint[0]); - final ScriptDocValues.GeoPoints script = new ScriptDocValues.GeoPoints(values); - script.setNextDocId(0); - assertEquals(null, script.getValue()); - } - - - private Longs wrap(long[][] values) { - return new Longs(new AbstractSortedNumericDocValues() { - long[] current; - int i; - @Override - public boolean advanceExact(int doc) { - i = 0; - current = values[doc]; - return current.length > 0; - } - @Override - public int docValueCount() { - return current.length; - } - @Override - public long nextValue() { - return current[i++]; - } - }); - } - - private Booleans wrapBooleans(long[][] values) { - return new Booleans(new AbstractSortedNumericDocValues() { - long[] current; - int i; - @Override - public boolean advanceExact(int doc) { - i = 0; - current = values[doc]; - return current.length > 0; - } - @Override - public int docValueCount() { - return current.length; - } - @Override - public long nextValue() { - return current[i++]; - } - }); - } - - private Dates wrapDates(long[][] values) { - return new Dates(new AbstractSortedNumericDocValues() { - long[] current; - int i; - @Override - public boolean advanceExact(int doc) { - current = values[doc]; - i = 0; - return current.length > 0; - } - @Override - public int docValueCount() { - return current.length; - } - @Override - public long nextValue() { - return current[i++]; - } - }); - } - - - private static MultiGeoPointValues wrap(final GeoPoint... points) { - return new MultiGeoPointValues() { - int docID = -1; - int i; - @Override - public GeoPoint nextValue() { - if (docID != 0) { - fail(); - } - return points[i++]; - } - @Override - public boolean advanceExact(int docId) { - docID = docId; - return points.length > 0; - } - @Override - public int docValueCount() { - if (docID != 0) { - return 0; - } - return points.length; - } - }; - } - -} From c7a41c501ac0c8ef6a6cd96c2abc7c4fbd7fb76e Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Thu, 19 Jul 2018 16:35:03 -0700 Subject: [PATCH 105/260] Painless: Simplify Naming in Lookup Package (#32177) This removes some extraneous naming syntax and makes clear the meaning of certain naming conventions without ambiguities (stricter) within the lookup package. Purely mechanical change. Note this does not cover a large portion of the PainlessLookupBuilder and PainlessLookup yet as there are several more follow up PRs for these incoming. --- .../painless/AnalyzerCaster.java | 4 +- .../java/org/elasticsearch/painless/Def.java | 6 +- .../elasticsearch/painless/FunctionRef.java | 2 +- .../org/elasticsearch/painless/Locals.java | 2 +- .../painless/ScriptClassInfo.java | 2 +- .../painless/lookup/PainlessLookup.java | 2 +- .../lookup/PainlessLookupBuilder.java | 248 ++++------ .../lookup/PainlessLookupUtility.java | 466 ++++++++++-------- .../painless/lookup/PainlessMethod.java | 14 +- .../painless/node/AExpression.java | 2 +- .../elasticsearch/painless/node/EBinary.java | 44 +- .../painless/node/ECapturingFunctionRef.java | 8 +- .../elasticsearch/painless/node/ECast.java | 2 +- .../elasticsearch/painless/node/EComp.java | 32 +- .../painless/node/EFunctionRef.java | 4 +- .../painless/node/EInstanceof.java | 6 +- .../elasticsearch/painless/node/ELambda.java | 10 +- .../elasticsearch/painless/node/ENull.java | 2 +- .../elasticsearch/painless/node/EUnary.java | 6 +- .../elasticsearch/painless/node/PBrace.java | 2 +- .../painless/node/PCallInvoke.java | 2 +- .../elasticsearch/painless/node/PField.java | 7 +- .../painless/node/PSubField.java | 2 +- .../elasticsearch/painless/node/SEach.java | 2 +- .../painless/node/SFunction.java | 4 +- .../painless/node/SSubEachArray.java | 2 +- .../painless/node/SSubEachIterable.java | 4 +- .../painless/PainlessDocGenerator.java | 4 +- 28 files changed, 464 insertions(+), 427 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index 6cfc7ff6ebf..fe53a3c1100 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -466,8 +466,8 @@ public final class AnalyzerCaster { return PainlessCast.standard(actual, expected, explicit); } else { throw location.createError(new ClassCastException("Cannot cast from " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(actual) + "] to " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] to " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index f3388fc4bb2..dad8da06e76 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -302,7 +302,7 @@ public final class Def { nestedType, 0, DefBootstrap.REFERENCE, - PainlessLookupUtility.anyTypeToPainlessTypeName(interfaceType)); + PainlessLookupUtility.typeToCanonicalTypeName(interfaceType)); filter = nested.dynamicInvoker(); } else { throw new AssertionError(); @@ -334,7 +334,7 @@ public final class Def { int arity = interfaceMethod.arguments.size(); PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity); return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType, - PainlessLookupUtility.anyTypeToPainlessTypeName(implMethod.target), implMethod.name, receiverClass); + PainlessLookupUtility.typeToCanonicalTypeName(implMethod.target), implMethod.name, receiverClass); } /** Returns a method handle to an implementation of clazz, given method reference signature. */ @@ -347,7 +347,7 @@ public final class Def { PainlessMethod interfaceMethod = painlessLookup.getPainlessStructFromJavaClass(clazz).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(clazz) + "], not a functional interface"); + "to [" + PainlessLookupUtility.typeToCanonicalTypeName(clazz) + "], not a functional interface"); } int arity = interfaceMethod.arguments.size() + captures.length; final MethodHandle handle; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index d64e833912f..aa72724b930 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -168,7 +168,7 @@ public class FunctionRef { PainlessMethod method = painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod; if (method == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface"); + "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); } // lookup requested method diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index 6c1010a3450..804f6aa2b68 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -292,7 +292,7 @@ public final class Locals { @Override public String toString() { StringBuilder b = new StringBuilder(); - b.append("Variable[type=").append(PainlessLookupUtility.anyTypeToPainlessTypeName(clazz)); + b.append("Variable[type=").append(PainlessLookupUtility.typeToCanonicalTypeName(clazz)); b.append(",name=").append(name); b.append(",slot=").append(slot); if (readonly) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java index ff2061a9a4b..6d4b4552696 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java @@ -183,7 +183,7 @@ public class ScriptClassInfo { private static Class definitionTypeForClass(PainlessLookup painlessLookup, Class type, Function, String> unknownErrorMessageSource) { - type = PainlessLookupUtility.javaObjectTypeToPainlessDefType(type); + type = PainlessLookupUtility.javaTypeToType(type); Class componentType = type; while (componentType.isArray()) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index 6111d12317b..752c0c205dd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -54,6 +54,6 @@ public final class PainlessLookup { } public Class getJavaClassFromPainlessType(String painlessType) { - return PainlessLookupUtility.painlessTypeNameToPainlessType(painlessType, painlessTypesToJavaClasses); + return PainlessLookupUtility.canonicalTypeNameToType(painlessType, painlessTypesToJavaClasses); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index ecf15c7ad2c..06773d3ffdd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -30,7 +30,6 @@ import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.reflect.Modifier; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -39,22 +38,21 @@ import java.util.Objects; import java.util.Stack; import java.util.regex.Pattern; -import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_PAINLESS_CLASS_NAME; -import static org.elasticsearch.painless.lookup.PainlessLookupUtility.anyTypeNameToPainlessTypeName; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_TYPE_NAME; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; public class PainlessLookupBuilder { private static class PainlessMethodCacheKey { - private final Class javaClass; + private final Class targetType; private final String methodName; - private final List> painlessTypeParameters; + private final List> typeParameters; - private PainlessMethodCacheKey(Class javaClass, String methodName, List> painlessTypeParameters) { - this.javaClass = javaClass; + private PainlessMethodCacheKey(Class targetType, String methodName, List> typeParameters) { + this.targetType = targetType; this.methodName = methodName; - this.painlessTypeParameters = Collections.unmodifiableList(painlessTypeParameters); + this.typeParameters = Collections.unmodifiableList(typeParameters); } @Override @@ -69,27 +67,27 @@ public class PainlessLookupBuilder { PainlessMethodCacheKey that = (PainlessMethodCacheKey)object; - return Objects.equals(javaClass, that.javaClass) && + return Objects.equals(targetType, that.targetType) && Objects.equals(methodName, that.methodName) && - Objects.equals(painlessTypeParameters, that.painlessTypeParameters); + Objects.equals(typeParameters, that.typeParameters); } @Override public int hashCode() { - return Objects.hash(javaClass, methodName, painlessTypeParameters); + return Objects.hash(targetType, methodName, typeParameters); } } private static class PainlessFieldCacheKey { - private final Class javaClass; + private final Class targetType; private final String fieldName; - private final Class painlessType; + private final Class typeParameter; - private PainlessFieldCacheKey(Class javaClass, String fieldName, Class painlessType) { - this.javaClass = javaClass; + private PainlessFieldCacheKey(Class targetType, String fieldName, Class typeParameter) { + this.targetType = targetType; this.fieldName = fieldName; - this.painlessType = painlessType; + this.typeParameter = typeParameter; } @Override @@ -104,14 +102,14 @@ public class PainlessLookupBuilder { PainlessFieldCacheKey that = (PainlessFieldCacheKey) object; - return Objects.equals(javaClass, that.javaClass) && - Objects.equals(fieldName, that.fieldName) && - Objects.equals(painlessType, that.painlessType); + return Objects.equals(targetType, that.targetType) && + Objects.equals(fieldName, that.fieldName) && + Objects.equals(typeParameter, that.typeParameter); } @Override public int hashCode() { - return Objects.hash(javaClass, fieldName, painlessType); + return Objects.hash(targetType, fieldName, typeParameter); } } @@ -122,157 +120,115 @@ public class PainlessLookupBuilder { private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); private static final Pattern FIELD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); - private static String anyTypesArrayToCanonicalString(Class[] anyTypesArray, boolean toPainlessTypes) { - return anyTypesListToCanonicalString(Arrays.asList(anyTypesArray), toPainlessTypes); - } - - private static String anyTypesListToCanonicalString(List> anyTypesList, boolean toPainlessTypes) { - StringBuilder anyTypesCanonicalStringBuilder = new StringBuilder("["); - - int anyTypesSize = anyTypesList.size(); - int anyTypesIndex = 0; - - for (Class anyType : anyTypesList) { - String anyTypeCanonicalName = anyType.getCanonicalName(); - - if (toPainlessTypes) { - anyTypeCanonicalName = anyTypeNameToPainlessTypeName(anyTypeCanonicalName); - } - - anyTypesCanonicalStringBuilder.append(anyTypeCanonicalName); - - if (++anyTypesIndex < anyTypesSize) { - anyTypesCanonicalStringBuilder.append(","); - } - } - - anyTypesCanonicalStringBuilder.append("]"); - - return anyTypesCanonicalStringBuilder.toString(); - } - private final List whitelists; - private final Map> painlessClassNamesToJavaClasses; - private final Map, PainlessClassBuilder> javaClassesToPainlessClassBuilders; + private final Map> canonicalClassNamesToClasses; + private final Map, PainlessClassBuilder> classesToPainlessClasses; public PainlessLookupBuilder(List whitelists) { this.whitelists = whitelists; - painlessClassNamesToJavaClasses = new HashMap<>(); - javaClassesToPainlessClassBuilders = new HashMap<>(); + canonicalClassNamesToClasses = new HashMap<>(); + classesToPainlessClasses = new HashMap<>(); - painlessClassNamesToJavaClasses.put(DEF_PAINLESS_CLASS_NAME, def.class); - javaClassesToPainlessClassBuilders.put(def.class, - new PainlessClassBuilder(DEF_PAINLESS_CLASS_NAME, Object.class, Type.getType(Object.class))); + canonicalClassNamesToClasses.put(DEF_TYPE_NAME, def.class); + classesToPainlessClasses.put(def.class, + new PainlessClassBuilder(DEF_TYPE_NAME, Object.class, Type.getType(Object.class))); } - private Class painlessTypeNameToPainlessType(String painlessTypeName) { - return PainlessLookupUtility.painlessTypeNameToPainlessType(painlessTypeName, painlessClassNamesToJavaClasses); + private Class canonicalTypeNameToType(String canonicalTypeName) { + return PainlessLookupUtility.canonicalTypeNameToType(canonicalTypeName, canonicalClassNamesToClasses); } - private void validatePainlessType(Class painlessType) { - PainlessLookupUtility.validatePainlessType(painlessType, javaClassesToPainlessClassBuilders.keySet()); + private void validateType(Class type) { + PainlessLookupUtility.validateType(type, classesToPainlessClasses.keySet()); } - public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importPainlessClassName) { + public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importClassName) { Objects.requireNonNull(classLoader); Objects.requireNonNull(javaClassName); - String painlessClassName = anyTypeNameToPainlessTypeName(javaClassName); + Class clazz; - if (CLASS_NAME_PATTERN.matcher(painlessClassName).matches() == false) { - throw new IllegalArgumentException("invalid painless class name [" + painlessClassName + "]"); - } - - String importedPainlessClassName = anyTypeNameToPainlessTypeName(javaClassName.substring(javaClassName.lastIndexOf('.') + 1)); - - Class javaClass; - - if ("void".equals(javaClassName)) javaClass = void.class; - else if ("boolean".equals(javaClassName)) javaClass = boolean.class; - else if ("byte".equals(javaClassName)) javaClass = byte.class; - else if ("short".equals(javaClassName)) javaClass = short.class; - else if ("char".equals(javaClassName)) javaClass = char.class; - else if ("int".equals(javaClassName)) javaClass = int.class; - else if ("long".equals(javaClassName)) javaClass = long.class; - else if ("float".equals(javaClassName)) javaClass = float.class; - else if ("double".equals(javaClassName)) javaClass = double.class; + if ("void".equals(javaClassName)) clazz = void.class; + else if ("boolean".equals(javaClassName)) clazz = boolean.class; + else if ("byte".equals(javaClassName)) clazz = byte.class; + else if ("short".equals(javaClassName)) clazz = short.class; + else if ("char".equals(javaClassName)) clazz = char.class; + else if ("int".equals(javaClassName)) clazz = int.class; + else if ("long".equals(javaClassName)) clazz = long.class; + else if ("float".equals(javaClassName)) clazz = float.class; + else if ("double".equals(javaClassName)) clazz = double.class; else { try { - javaClass = Class.forName(javaClassName, true, classLoader); - - if (javaClass == def.class) { - throw new IllegalArgumentException("cannot add reserved painless class [" + DEF_PAINLESS_CLASS_NAME + "]"); - } - - if (javaClass.isArray()) { - throw new IllegalArgumentException("cannot add an array type java class [" + javaClassName + "] as a painless class"); - } + clazz = Class.forName(javaClassName, true, classLoader); } catch (ClassNotFoundException cnfe) { - throw new IllegalArgumentException("java class [" + javaClassName + "] not found", cnfe); + throw new IllegalArgumentException("class [" + javaClassName + "] not found", cnfe); } } - addPainlessClass(painlessClassName, importedPainlessClassName, javaClass, importPainlessClassName); + addPainlessClass(clazz, importClassName); } - public void addPainlessClass(Class javaClass, boolean importPainlessClassName) { - Objects.requireNonNull(javaClass); + public void addPainlessClass(Class clazz, boolean importClassName) { + Objects.requireNonNull(clazz); - if (javaClass == def.class) { - throw new IllegalArgumentException("cannot specify reserved painless class [" + DEF_PAINLESS_CLASS_NAME + "]"); + if (clazz == def.class) { + throw new IllegalArgumentException("cannot add reserved class [" + DEF_TYPE_NAME + "]"); } - String javaClassName = javaClass.getCanonicalName(); - String painlessClassName = anyTypeNameToPainlessTypeName(javaClassName); - String importedPainlessClassName = anyTypeNameToPainlessTypeName(javaClassName.substring(javaClassName.lastIndexOf('.') + 1)); + String canonicalClassName = clazz.getCanonicalName(); - addPainlessClass(painlessClassName, importedPainlessClassName, javaClass, importPainlessClassName); - } + if (clazz.isArray()) { + throw new IllegalArgumentException("cannot add array type [" + canonicalClassName + "] as a class"); + } - private void addPainlessClass( - String painlessClassName, String importedPainlessClassName, Class javaClass, boolean importPainlessClassName) { - PainlessClassBuilder existingPainlessClassBuilder = javaClassesToPainlessClassBuilders.get(javaClass); + if (CLASS_NAME_PATTERN.matcher(canonicalClassName).matches() == false) { + throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]"); + } + + PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClasses.get(clazz); if (existingPainlessClassBuilder == null) { - PainlessClassBuilder painlessClassBuilder = new PainlessClassBuilder(painlessClassName, javaClass, Type.getType(javaClass)); - painlessClassNamesToJavaClasses.put(painlessClassName, javaClass); - javaClassesToPainlessClassBuilders.put(javaClass, painlessClassBuilder); - } else if (existingPainlessClassBuilder.clazz.equals(javaClass) == false) { - throw new IllegalArgumentException("painless class [" + painlessClassName + "] illegally represents multiple java classes " + - "[" + javaClass.getCanonicalName() + "] and [" + existingPainlessClassBuilder.clazz.getCanonicalName() + "]"); + PainlessClassBuilder painlessClassBuilder = new PainlessClassBuilder(canonicalClassName, clazz, Type.getType(clazz)); + + canonicalClassNamesToClasses.put(canonicalClassName, clazz); + classesToPainlessClasses.put(clazz, painlessClassBuilder); + } else if (existingPainlessClassBuilder.clazz.equals(clazz) == false) { + throw new IllegalArgumentException("class [" + canonicalClassName + "] " + + "cannot represent multiple java classes with the same name from different class loaders"); } - if (painlessClassName.equals(importedPainlessClassName)) { - if (importPainlessClassName == true) { - throw new IllegalArgumentException( - "must use only_fqn parameter on painless class [" + painlessClassName + "] with no package"); + String javaClassName = clazz.getName(); + String importedCanonicalClassName = javaClassName.substring(javaClassName.lastIndexOf('.') + 1).replace('$', '.'); + + if (canonicalClassName.equals(importedCanonicalClassName)) { + if (importClassName == true) { + throw new IllegalArgumentException("must use only_fqn parameter on class [" + canonicalClassName + "] with no package"); } } else { - Class importedJavaClass = painlessClassNamesToJavaClasses.get(importedPainlessClassName); + Class importedPainlessType = canonicalClassNamesToClasses.get(importedCanonicalClassName); - if (importedJavaClass == null) { - if (importPainlessClassName) { + if (importedPainlessType == null) { + if (importClassName) { if (existingPainlessClassBuilder != null) { throw new IllegalArgumentException( - "inconsistent only_fqn parameters found for painless class [" + painlessClassName + "]"); + "inconsistent only_fqn parameters found for painless type [" + canonicalClassName + "]"); } - painlessClassNamesToJavaClasses.put(importedPainlessClassName, javaClass); + canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); } - } else if (importedJavaClass.equals(javaClass) == false) { - throw new IllegalArgumentException("painless class [" + importedPainlessClassName + "] illegally represents multiple " + - "java classes [" + javaClass.getCanonicalName() + "] and [" + importedJavaClass.getCanonicalName() + "]"); - } else if (importPainlessClassName == false) { - throw new IllegalArgumentException( - "inconsistent only_fqn parameters found for painless class [" + painlessClassName + "]"); + } else if (importedPainlessType.equals(clazz) == false) { + throw new IllegalArgumentException("painless type [" + importedCanonicalClassName + "] illegally represents multiple " + + "java types [" + clazz.getCanonicalName() + "] and [" + importedPainlessType.getCanonicalName() + "]"); + } else if (importClassName == false) { + throw new IllegalArgumentException("inconsistent only_fqn parameters found for painless type [" + canonicalClassName + "]"); } } } private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) { - PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " + @@ -286,10 +242,10 @@ public class PainlessLookupBuilder { String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount); try { - Class painlessParameterClass = painlessTypeNameToPainlessType(painlessParameterTypeName); + Class painlessParameterClass = canonicalTypeNameToType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); - javaClassParameters[parameterCount] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessParameterClass); + javaClassParameters[parameterCount] = PainlessLookupUtility.typeToJavaType(painlessParameterClass); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " + "with owner struct [" + ownerStructName + "] and constructor parameters " + @@ -333,7 +289,7 @@ public class PainlessLookupBuilder { } private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) { - PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + @@ -372,11 +328,11 @@ public class PainlessLookupBuilder { String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount); try { - Class painlessParameterClass = painlessTypeNameToPainlessType(painlessParameterTypeName); + Class painlessParameterClass = canonicalTypeNameToType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); javaClassParameters[parameterCount + augmentedOffset] = - PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessParameterClass); + PainlessLookupUtility.typeToJavaType(painlessParameterClass); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " + "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + @@ -398,14 +354,14 @@ public class PainlessLookupBuilder { Class painlessReturnClass; try { - painlessReturnClass = painlessTypeNameToPainlessType(whitelistMethod.painlessReturnTypeName); + painlessReturnClass = canonicalTypeNameToType(whitelistMethod.painlessReturnTypeName); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " + "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + "and parameters " + whitelistMethod.painlessParameterTypeNames, iae); } - if (javaMethod.getReturnType() != PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessReturnClass)) { + if (javaMethod.getReturnType() != PainlessLookupUtility.typeToJavaType(painlessReturnClass)) { throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " + "does not match the return type class [" + javaMethod.getReturnType() + "] for the " + "method with name [" + whitelistMethod.javaMethodName + "] " + @@ -471,7 +427,7 @@ public class PainlessLookupBuilder { } private void addField(String ownerStructName, WhitelistField whitelistField) { - PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + @@ -495,7 +451,7 @@ public class PainlessLookupBuilder { Class painlessFieldClass; try { - painlessFieldClass = painlessTypeNameToPainlessType(whitelistField.painlessFieldTypeName); + painlessFieldClass = canonicalTypeNameToType(whitelistField.painlessFieldTypeName); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " + "with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae); @@ -552,7 +508,7 @@ public class PainlessLookupBuilder { } private void copyStruct(String struct, List children) { - final PainlessClassBuilder owner = javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(struct)); + final PainlessClassBuilder owner = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(struct)); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); @@ -560,7 +516,7 @@ public class PainlessLookupBuilder { for (int count = 0; count < children.size(); ++count) { final PainlessClassBuilder child = - javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(children.get(count))); + classesToPainlessClasses.get(canonicalClassNamesToClasses.get(children.get(count))); if (child == null) { throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + @@ -734,7 +690,7 @@ public class PainlessLookupBuilder { for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); PainlessClassBuilder painlessStruct = - javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(painlessTypeName)); + classesToPainlessClasses.get(canonicalClassNamesToClasses.get(painlessTypeName)); if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + @@ -745,8 +701,8 @@ public class PainlessLookupBuilder { addPainlessClass( whitelist.javaClassLoader, whitelistStruct.javaClassName, whitelistStruct.onlyFQNJavaClassName == false); - painlessStruct = javaClassesToPainlessClassBuilders.get(painlessClassNamesToJavaClasses.get(painlessTypeName)); - javaClassesToPainlessClassBuilders.put(painlessStruct.clazz, painlessStruct); + painlessStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(painlessTypeName)); + classesToPainlessClasses.put(painlessStruct.clazz, painlessStruct); } } @@ -779,8 +735,8 @@ public class PainlessLookupBuilder { // goes through each Painless struct and determines the inheritance list, // and then adds all inherited types to the Painless struct's whitelist - for (Class javaClass : javaClassesToPainlessClassBuilders.keySet()) { - PainlessClassBuilder painlessStruct = javaClassesToPainlessClassBuilders.get(javaClass); + for (Class javaClass : classesToPainlessClasses.keySet()) { + PainlessClassBuilder painlessStruct = classesToPainlessClasses.get(javaClass); List painlessSuperStructs = new ArrayList<>(); Class javaSuperClass = painlessStruct.clazz.getSuperclass(); @@ -791,7 +747,7 @@ public class PainlessLookupBuilder { // adds super classes to the inheritance list if (javaSuperClass != null && javaSuperClass.isInterface() == false) { while (javaSuperClass != null) { - PainlessClassBuilder painlessSuperStruct = javaClassesToPainlessClassBuilders.get(javaSuperClass); + PainlessClassBuilder painlessSuperStruct = classesToPainlessClasses.get(javaSuperClass); if (painlessSuperStruct != null) { painlessSuperStructs.add(painlessSuperStruct.name); @@ -807,7 +763,7 @@ public class PainlessLookupBuilder { Class javaInterfaceLookup = javaInteraceLookups.pop(); for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) { - PainlessClassBuilder painlessInterfaceStruct = javaClassesToPainlessClassBuilders.get(javaSuperInterface); + PainlessClassBuilder painlessInterfaceStruct = classesToPainlessClasses.get(javaSuperInterface); if (painlessInterfaceStruct != null) { String painlessInterfaceStructName = painlessInterfaceStruct.name; @@ -828,7 +784,7 @@ public class PainlessLookupBuilder { // copies methods and fields from Object into interface types if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) { - PainlessClassBuilder painlessObjectStruct = javaClassesToPainlessClassBuilders.get(Object.class); + PainlessClassBuilder painlessObjectStruct = classesToPainlessClasses.get(Object.class); if (painlessObjectStruct != null) { copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name)); @@ -837,18 +793,18 @@ public class PainlessLookupBuilder { } // precompute runtime classes - for (PainlessClassBuilder painlessStruct : javaClassesToPainlessClassBuilders.values()) { + for (PainlessClassBuilder painlessStruct : classesToPainlessClasses.values()) { addRuntimeClass(painlessStruct); } Map, PainlessClass> javaClassesToPainlessClasses = new HashMap<>(); // copy all structs to make them unmodifiable for outside users: - for (Map.Entry,PainlessClassBuilder> entry : javaClassesToPainlessClassBuilders.entrySet()) { + for (Map.Entry,PainlessClassBuilder> entry : classesToPainlessClasses.entrySet()) { entry.getValue().functionalMethod = computeFunctionalInterfaceMethod(entry.getValue()); javaClassesToPainlessClasses.put(entry.getKey(), entry.getValue().build()); } - return new PainlessLookup(painlessClassNamesToJavaClasses, javaClassesToPainlessClasses); + return new PainlessLookup(canonicalClassNamesToClasses, javaClassesToPainlessClasses); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index 0f7c8fb915c..1f698b7c673 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -19,35 +19,177 @@ package org.elasticsearch.painless.lookup; -import org.objectweb.asm.Type; - import java.util.Arrays; import java.util.Collection; +import java.util.List; import java.util.Map; +import java.util.Objects; /** - * This class contains methods shared by {@link PainlessLookupBuilder}, {@link PainlessLookup}, and other classes within + * PainlessLookupUtility contains methods shared by {@link PainlessLookupBuilder}, {@link PainlessLookup}, and other classes within * Painless for conversion between type names and types along with some other various utility methods. * * The following terminology is used for variable names throughout the lookup package: * - * - javaClass (Class) - a java class including def and excluding array type java classes - * - javaClassName (String) - the fully qualified java class name for a javaClass - * - painlessClassName (String) - the fully qualified painless name or imported painless name for a painlessClass - * - anyClassName (String) - either a javaClassName or a painlessClassName - * - javaType (Class) - a java class excluding def and array type java classes - * - painlessType (Class) - a java class including def and array type java classes - * - javaTypeName (String) - the fully qualified java Type name for a javaType - * - painlessTypeName (String) - the fully qualified painless name or imported painless name for a painlessType - * - anyTypeName (String) - either a javaTypeName or a painlessTypeName - * - painlessClass (PainlessClass) - a painless class object + * A class is a set of methods and fields under a specific class name. A type is either a class or an array under a specific type name. + * Note the distinction between class versus type is class means that no array classes will be be represented whereas type allows array + * classes to be represented. The set of available classes will always be a subset of the available types. * - * Under ambiguous circumstances most variable names are prefixed with asm, java, or painless. - * If the variable name is the same for asm, java, and painless, no prefix is used. + * Under ambiguous circumstances most variable names are prefixed with asm, java, or painless. If the variable value is the same for asm, + * java, and painless, no prefix is used. + * + *
    + *
  • - javaClassName (String) - the fully qualified java class name where '$' tokens represent inner classes excluding + * def and array types
  • + * + *
  • - javaClass (Class) - a java class excluding def and array types
  • + * + *
  • - javaType (Class) - a java class excluding def and including array types
  • + * + *
  • - importedClassName (String) - the imported painless class name where the java canonical class name is used without + * the package qualifier + * + *
  • - canonicalClassName (String) - the fully qualified painless class name equivalent to the fully + * qualified java canonical class name or imported painless class name for a class + * including def and excluding array types where '.' tokens represent inner classes
  • + * + *
  • - canonicalTypeName (String) - the fully qualified painless type name equivalent to the fully + * qualified java canonical type name or imported painless type name for a type + * including def where '.' tokens represent inner classes and each set of '[]' tokens + * at the end of the type name represent a single dimension for an array type
  • + * + *
  • - class/clazz (Class) - a painless class represented by a java class including def and excluding array + * types
  • + * + *
  • - type (Class) - a painless type represented by a java class including def and array types
  • + * + *
  • - painlessClass (PainlessClass) - a painless class object
  • + * + *
  • - painlessMethod (PainlessMethod) - a painless method object
  • + * + *
  • - painlessField (PainlessField) - a painless field object
  • + *
*/ public final class PainlessLookupUtility { - public static Class javaObjectTypeToPainlessDefType(Class javaType) { + /** + * Converts a canonical type name to a type based on the terminology specified as part of the documentation for + * {@link PainlessLookupUtility}. Since canonical class names are a subset of canonical type names, this method will + * safely convert a canonical class name to a class as well. + */ + public static Class canonicalTypeNameToType(String canonicalTypeName, Map> canonicalClassNamesToClasses) { + Objects.requireNonNull(canonicalTypeName); + Objects.requireNonNull(canonicalClassNamesToClasses); + + Class type = canonicalClassNamesToClasses.get(canonicalTypeName); + + if (type != null) { + return type; + } + + int arrayDimensions = 0; + int arrayIndex = canonicalTypeName.indexOf('['); + + if (arrayIndex != -1) { + int typeNameLength = canonicalTypeName.length(); + + while (arrayIndex < typeNameLength) { + if (canonicalTypeName.charAt(arrayIndex) == '[' && + ++arrayIndex < typeNameLength && + canonicalTypeName.charAt(arrayIndex++) == ']') { + ++arrayDimensions; + } else { + throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found"); + } + } + + canonicalTypeName = canonicalTypeName.substring(0, canonicalTypeName.indexOf('[')); + type = canonicalClassNamesToClasses.get(canonicalTypeName); + + char arrayBraces[] = new char[arrayDimensions]; + Arrays.fill(arrayBraces, '['); + String javaTypeName = new String(arrayBraces); + + if (type == boolean.class) { + javaTypeName += "Z"; + } else if (type == byte.class) { + javaTypeName += "B"; + } else if (type == short.class) { + javaTypeName += "S"; + } else if (type == char.class) { + javaTypeName += "C"; + } else if (type == int.class) { + javaTypeName += "I"; + } else if (type == long.class) { + javaTypeName += "J"; + } else if (type == float.class) { + javaTypeName += "F"; + } else if (type == double.class) { + javaTypeName += "D"; + } else { + javaTypeName += "L" + type.getName() + ";"; + } + + try { + return Class.forName(javaTypeName); + } catch (ClassNotFoundException cnfe) { + throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found", cnfe); + } + } + + throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found"); + } + + /** + * Converts a type to a canonical type name based on the terminology specified as part of the documentation for + * {@link PainlessLookupUtility}. Since classes are a subset of types, this method will safely convert a class + * to a canonical class name as well. + */ + public static String typeToCanonicalTypeName(Class type) { + Objects.requireNonNull(type); + + String canonicalTypeName = type.getCanonicalName(); + + if (canonicalTypeName.startsWith(def.class.getName())) { + canonicalTypeName = canonicalTypeName.replace(def.class.getName(), DEF_TYPE_NAME); + } + + return canonicalTypeName; + } + + /** + * Converts a list of types to a list of canonical type names as a string based on the terminology specified as part of the + * documentation for {@link PainlessLookupUtility}. Since classes are a subset of types, this method will safely convert a list + * of classes or a mixed list of classes and types to a list of canonical type names as a string as well. + */ + public static String typesToCanonicalTypeNames(List> types) { + StringBuilder typesStringBuilder = new StringBuilder("["); + + int anyTypesSize = types.size(); + int anyTypesIndex = 0; + + for (Class painlessType : types) { + String canonicalTypeName = typeToCanonicalTypeName(painlessType); + + typesStringBuilder.append(canonicalTypeName); + + if (++anyTypesIndex < anyTypesSize) { + typesStringBuilder.append(","); + } + } + + typesStringBuilder.append("]"); + + return typesStringBuilder.toString(); + } + /** + * Converts a java type to a type based on the terminology specified as part of {@link PainlessLookupUtility} where if a type is an + * object class or object array, the returned type will be the equivalent def class or def array. Otherwise, this behaves as an + * identity function. + */ + public static Class javaTypeToType(Class javaType) { + Objects.requireNonNull(javaType); + if (javaType.isArray()) { Class javaTypeComponent = javaType.getComponentType(); int arrayDimensions = 1; @@ -58,14 +200,11 @@ public final class PainlessLookupUtility { } if (javaTypeComponent == Object.class) { - char[] asmDescriptorBraces = new char[arrayDimensions]; - Arrays.fill(asmDescriptorBraces, '['); - - String asmDescriptor = new String(asmDescriptorBraces) + Type.getType(def.class).getDescriptor(); - Type asmType = Type.getType(asmDescriptor); + char[] arrayBraces = new char[arrayDimensions]; + Arrays.fill(arrayBraces, '['); try { - return Class.forName(asmType.getInternalName().replace('/', '.')); + return Class.forName(new String(arrayBraces) + "L" + def.class.getName() + ";"); } catch (ClassNotFoundException cnfe) { throw new IllegalStateException("internal error", cnfe); } @@ -77,206 +216,147 @@ public final class PainlessLookupUtility { return javaType; } - public static Class painlessDefTypeToJavaObjectType(Class painlessType) { - if (painlessType.isArray()) { - Class painlessTypeComponent = painlessType.getComponentType(); + /** + * Converts a type to a java type based on the terminology specified as part of {@link PainlessLookupUtility} where if a type is a + * def class or def array, the returned type will be the equivalent object class or object array. Otherwise, this behaves as an + * identity function. + */ + public static Class typeToJavaType(Class type) { + Objects.requireNonNull(type); + + if (type.isArray()) { + Class typeComponent = type.getComponentType(); int arrayDimensions = 1; - while (painlessTypeComponent.isArray()) { - painlessTypeComponent = painlessTypeComponent.getComponentType(); + while (typeComponent.isArray()) { + typeComponent = typeComponent.getComponentType(); ++arrayDimensions; } - if (painlessTypeComponent == def.class) { - char[] asmDescriptorBraces = new char[arrayDimensions]; - Arrays.fill(asmDescriptorBraces, '['); - - String asmDescriptor = new String(asmDescriptorBraces) + Type.getType(Object.class).getDescriptor(); - Type asmType = Type.getType(asmDescriptor); + if (typeComponent == def.class) { + char[] arrayBraces = new char[arrayDimensions]; + Arrays.fill(arrayBraces, '['); try { - return Class.forName(asmType.getInternalName().replace('/', '.')); - } catch (ClassNotFoundException exception) { - throw new IllegalStateException("internal error", exception); + return Class.forName(new String(arrayBraces) + "L" + Object.class.getName() + ";"); + } catch (ClassNotFoundException cnfe) { + throw new IllegalStateException("internal error", cnfe); } } - } else if (painlessType == def.class) { + } else if (type == def.class) { return Object.class; } - return painlessType; + return type; } - public static String anyTypeNameToPainlessTypeName(String anyTypeName) { - return anyTypeName.replace(def.class.getName(), DEF_PAINLESS_CLASS_NAME).replace('$', '.'); - } + /** + * Ensures a type exists based on the terminology specified as part of {@link PainlessLookupUtility}. Throws an + * {@link IllegalArgumentException} if the type does not exist. + */ + public static void validateType(Class type, Collection> classes) { + String canonicalTypeName = typeToCanonicalTypeName(type); - public static String anyTypeToPainlessTypeName(Class anyType) { - if (anyType.isLocalClass() || anyType.isAnonymousClass()) { - return null; - } else if (anyType.isArray()) { - Class anyTypeComponent = anyType.getComponentType(); - int arrayDimensions = 1; - - while (anyTypeComponent.isArray()) { - anyTypeComponent = anyTypeComponent.getComponentType(); - ++arrayDimensions; - } - - if (anyTypeComponent == def.class) { - StringBuilder painlessDefTypeNameArrayBuilder = new StringBuilder(DEF_PAINLESS_CLASS_NAME); - - for (int dimension = 0; dimension < arrayDimensions; dimension++) { - painlessDefTypeNameArrayBuilder.append("[]"); - } - - return painlessDefTypeNameArrayBuilder.toString(); - } - } else if (anyType == def.class) { - return DEF_PAINLESS_CLASS_NAME; + while (type.getComponentType() != null) { + type = type.getComponentType(); } - return anyType.getCanonicalName().replace('$', '.'); - } - - public static Class painlessTypeNameToPainlessType(String painlessTypeName, Map> painlessClassNamesToJavaClasses) { - Class javaClass = painlessClassNamesToJavaClasses.get(painlessTypeName); - - if (javaClass != null) { - return javaClass; - } - - int arrayDimensions = 0; - int arrayIndex = painlessTypeName.indexOf('['); - - if (arrayIndex != -1) { - int painlessTypeNameLength = painlessTypeName.length(); - - while (arrayIndex < painlessTypeNameLength) { - if (painlessTypeName.charAt(arrayIndex) == '[' && - ++arrayIndex < painlessTypeNameLength && - painlessTypeName.charAt(arrayIndex++) == ']') { - ++arrayDimensions; - } else { - throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found"); - } - } - - painlessTypeName = painlessTypeName.substring(0, painlessTypeName.indexOf('[')); - javaClass = painlessClassNamesToJavaClasses.get(painlessTypeName); - - char javaDescriptorBraces[] = new char[arrayDimensions]; - Arrays.fill(javaDescriptorBraces, '['); - String javaDescriptor = new String(javaDescriptorBraces); - - if (javaClass == boolean.class) { - javaDescriptor += "Z"; - } else if (javaClass == byte.class) { - javaDescriptor += "B"; - } else if (javaClass == short.class) { - javaDescriptor += "S"; - } else if (javaClass == char.class) { - javaDescriptor += "C"; - } else if (javaClass == int.class) { - javaDescriptor += "I"; - } else if (javaClass == long.class) { - javaDescriptor += "J"; - } else if (javaClass == float.class) { - javaDescriptor += "F"; - } else if (javaClass == double.class) { - javaDescriptor += "D"; - } else { - javaDescriptor += "L" + javaClass.getName() + ";"; - } - - try { - return Class.forName(javaDescriptor); - } catch (ClassNotFoundException cnfe) { - throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found", cnfe); - } - } - - throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found"); - } - - public static void validatePainlessType(Class painlessType, Collection> javaClasses) { - String painlessTypeName = anyTypeNameToPainlessTypeName(painlessType.getName()); - - while (painlessType.getComponentType() != null) { - painlessType = painlessType.getComponentType(); - } - - if (javaClasses.contains(painlessType) == false) { - throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found"); + if (classes.contains(type) == false) { + throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found"); } } + /** + * Converts a type to its boxed type equivalent if one exists based on the terminology specified as part of + * {@link PainlessLookupUtility}. Otherwise, this behaves as an identity function. + */ + public static Class typeToBoxedType(Class type) { + if (type == boolean.class) { + return Boolean.class; + } else if (type == byte.class) { + return Byte.class; + } else if (type == short.class) { + return Short.class; + } else if (type == char.class) { + return Character.class; + } else if (type == int.class) { + return Integer.class; + } else if (type == long.class) { + return Long.class; + } else if (type == float.class) { + return Float.class; + } else if (type == double.class) { + return Double.class; + } + + return type; + } + + /** + * Converts a type to its unboxed type equivalent if one exists based on the terminology specified as part of + * {@link PainlessLookupUtility}. Otherwise, this behaves as an identity function. + */ + public static Class typeToUnboxedType(Class type) { + if (type == Boolean.class) { + return boolean.class; + } else if (type == Byte.class) { + return byte.class; + } else if (type == Short.class) { + return short.class; + } else if (type == Character.class) { + return char.class; + } else if (type == Integer.class) { + return int.class; + } else if (type == Long.class) { + return long.class; + } else if (type == Float.class) { + return float.class; + } else if (type == Double.class) { + return double.class; + } + + return type; + } + + /** + * Checks if a type based on the terminology specified as part of {@link PainlessLookupUtility} is available as a constant type + * where {@code true} is returned if the type is a constant type and {@code false} otherwise. + */ + public static boolean isConstantType(Class type) { + return type == boolean.class || + type == byte.class || + type == short.class || + type == char.class || + type == int.class || + type == long.class || + type == float.class || + type == double.class || + type == String.class; + } + + /** + * Constructs a painless method key used to lookup painless methods from a painless class. + */ public static String buildPainlessMethodKey(String methodName, int methodArity) { return methodName + "/" + methodArity; } + /** + * Constructs a painless field key used to lookup painless fields from a painless class. + */ public static String buildPainlessFieldKey(String fieldName) { return fieldName; } - public static Class getBoxedAnyType(Class anyType) { - if (anyType == boolean.class) { - return Boolean.class; - } else if (anyType == byte.class) { - return Byte.class; - } else if (anyType == short.class) { - return Short.class; - } else if (anyType == char.class) { - return Character.class; - } else if (anyType == int.class) { - return Integer.class; - } else if (anyType == long.class) { - return Long.class; - } else if (anyType == float.class) { - return Float.class; - } else if (anyType == double.class) { - return Double.class; - } + /** + * The def type name as specified in the source for a script. + */ + public static final String DEF_TYPE_NAME = "def"; - return anyType; - } - - public static Class getUnboxedAnyType(Class anyType) { - if (anyType == Boolean.class) { - return boolean.class; - } else if (anyType == Byte.class) { - return byte.class; - } else if (anyType == Short.class) { - return short.class; - } else if (anyType == Character.class) { - return char.class; - } else if (anyType == Integer.class) { - return int.class; - } else if (anyType == Long.class) { - return long.class; - } else if (anyType == Float.class) { - return float.class; - } else if (anyType == Double.class) { - return double.class; - } - - return anyType; - } - - public static boolean isAnyTypeConstant(Class anyType) { - return anyType == boolean.class || - anyType == byte.class || - anyType == short.class || - anyType == char.class || - anyType == int.class || - anyType == long.class || - anyType == float.class || - anyType == double.class || - anyType == String.class; - } - - public static final String DEF_PAINLESS_CLASS_NAME = def.class.getSimpleName(); - public static final String CONSTRUCTOR_ANY_NAME = ""; + /** + * The method name for all constructors. + */ + public static final String CONSTRUCTOR_NAME = ""; private PainlessLookupUtility() { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java index 2b0d44e7176..3321de94a26 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java @@ -70,21 +70,21 @@ public class PainlessMethod { params = new Class[1 + arguments.size()]; params[0] = augmentation; for (int i = 0; i < arguments.size(); i++) { - params[i + 1] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); + params[i + 1] = PainlessLookupUtility.typeToJavaType(arguments.get(i)); } - returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); + returnValue = PainlessLookupUtility.typeToJavaType(rtn); } else if (Modifier.isStatic(modifiers)) { // static method: straightforward copy params = new Class[arguments.size()]; for (int i = 0; i < arguments.size(); i++) { - params[i] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); + params[i] = PainlessLookupUtility.typeToJavaType(arguments.get(i)); } - returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); + returnValue = PainlessLookupUtility.typeToJavaType(rtn); } else if ("".equals(name)) { // constructor: returns the owner class params = new Class[arguments.size()]; for (int i = 0; i < arguments.size(); i++) { - params[i] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); + params[i] = PainlessLookupUtility.typeToJavaType(arguments.get(i)); } returnValue = target; } else { @@ -92,9 +92,9 @@ public class PainlessMethod { params = new Class[1 + arguments.size()]; params[0] = target; for (int i = 0; i < arguments.size(); i++) { - params[i + 1] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); + params[i + 1] = PainlessLookupUtility.typeToJavaType(arguments.get(i)); } - returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); + returnValue = PainlessLookupUtility.typeToJavaType(rtn); } return MethodType.methodType(returnValue, params); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java index dd813f73c3d..ddf289564b1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java @@ -157,7 +157,7 @@ public abstract class AExpression extends ANode { return ecast; } else { - if (PainlessLookupUtility.isAnyTypeConstant(expected)) { + if (PainlessLookupUtility.isConstantType(expected)) { // For the case where a cast is required, a constant is set, // and the constant can be immediately cast to the expected type. // An EConstant replaces this node with the constant cast appropriately diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 65776ca76f1..00abe788bf4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -106,8 +106,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply multiply [*] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -149,8 +149,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply divide [/] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -197,8 +197,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply remainder [%] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -245,8 +245,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply add [+] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -304,8 +304,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply subtract [-] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -363,8 +363,8 @@ public final class EBinary extends AExpression { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply left shift [<<] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote = lhspromote; @@ -411,8 +411,8 @@ public final class EBinary extends AExpression { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply right shift [>>] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote = lhspromote; @@ -462,8 +462,8 @@ public final class EBinary extends AExpression { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply unsigned shift [>>>] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (lhspromote == def.class || rhspromote == def.class) { @@ -506,8 +506,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply and [&] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -546,8 +546,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply xor [^] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; @@ -587,8 +587,8 @@ public final class EBinary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply or [|] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } actual = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index e8ad9d85ed6..7b35bc1b48e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -69,7 +69,7 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda defPointer = "D" + variable + "." + call + ",1"; } else { // typed implementation - defPointer = "S" + PainlessLookupUtility.anyTypeToPainlessTypeName(captured.clazz) + "." + call + ",1"; + defPointer = "S" + PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz) + "." + call + ",1"; } actual = String.class; } else { @@ -77,8 +77,8 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda // static case if (captured.clazz != def.class) { try { - ref = new FunctionRef( - locals.getPainlessLookup(), expected, PainlessLookupUtility.anyTypeToPainlessTypeName(captured.clazz), call, 1); + ref = new FunctionRef(locals.getPainlessLookup(), expected, + PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1); // check casts between the interface method and the delegate method are legal for (int i = 0; i < ref.interfaceMethod.arguments.size(); ++i) { @@ -110,7 +110,7 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda // typed interface, dynamic implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); Type methodType = Type.getMethodType(MethodWriter.getType(expected), MethodWriter.getType(captured.clazz)); - writer.invokeDefCall(call, methodType, DefBootstrap.REFERENCE, PainlessLookupUtility.anyTypeToPainlessTypeName(expected)); + writer.invokeDefCall(call, methodType, DefBootstrap.REFERENCE, PainlessLookupUtility.typeToCanonicalTypeName(expected)); } else { // typed interface, typed implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index b0451b685b5..b07613714b8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -63,6 +63,6 @@ final class ECast extends AExpression { @Override public String toString() { - return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(cast.to), child); + return singleLineToString(PainlessLookupUtility.typeToCanonicalTypeName(cast.to), child); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index edf18f501bc..4d8a71ae3eb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -93,8 +93,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply equals [==] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -143,8 +143,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply reference equals [===] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } left.expected = promotedType; @@ -184,8 +184,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply not equals [!=] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -234,8 +234,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply reference not equals [!==] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } left.expected = promotedType; @@ -275,8 +275,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply greater than or equals [>=] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -315,8 +315,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply greater than [>] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -355,8 +355,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply less than or equals [<=] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -395,8 +395,8 @@ public final class EComp extends AExpression { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply less than [>=] to types " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); } if (promotedType == def.class) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index 92b14a885a1..d787db5d41c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -68,13 +68,13 @@ public final class EFunctionRef extends AExpression implements ILambda { PainlessMethod interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface"); + "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); } PainlessMethod delegateMethod = locals.getMethod(PainlessLookupUtility.buildPainlessMethodKey(call, interfaceMethod.arguments.size())); if (delegateMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], function not found"); + "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], function not found"); } ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index 05564a2952e..2fa8ca8ca95 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -64,8 +64,8 @@ public final class EInstanceof extends AExpression { } // map to wrapped type for primitive types - resolvedType = clazz.isPrimitive() ? PainlessLookupUtility.getBoxedAnyType(clazz) : - PainlessLookupUtility.painlessDefTypeToJavaObjectType(clazz); + resolvedType = clazz.isPrimitive() ? PainlessLookupUtility.typeToBoxedType(clazz) : + PainlessLookupUtility.typeToJavaType(clazz); // analyze and cast the expression expression.analyze(locals); @@ -76,7 +76,7 @@ public final class EInstanceof extends AExpression { primitiveExpression = expression.actual.isPrimitive(); // map to wrapped type for primitive types expressionType = expression.actual.isPrimitive() ? - PainlessLookupUtility.getBoxedAnyType(expression.actual) : PainlessLookupUtility.painlessDefTypeToJavaObjectType(clazz); + PainlessLookupUtility.typeToBoxedType(expression.actual) : PainlessLookupUtility.typeToJavaType(clazz); actual = boolean.class; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index 8e8d164b03d..ab1442be805 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -123,12 +123,12 @@ public final class ELambda extends AExpression implements ILambda { interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { throw createError(new IllegalArgumentException("Cannot pass lambda to " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface")); } // check arity before we manipulate parameters if (interfaceMethod.arguments.size() != paramTypeStrs.size()) throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name + - "] in [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "]"); + "] in [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "]"); // for method invocation, its allowed to ignore the return value if (interfaceMethod.rtn == void.class) { returnType = def.class; @@ -140,7 +140,7 @@ public final class ELambda extends AExpression implements ILambda { for (int i = 0; i < paramTypeStrs.size(); i++) { String paramType = paramTypeStrs.get(i); if (paramType == null) { - actualParamTypeStrs.add(PainlessLookupUtility.anyTypeToPainlessTypeName(interfaceMethod.arguments.get(i))); + actualParamTypeStrs.add(PainlessLookupUtility.typeToCanonicalTypeName(interfaceMethod.arguments.get(i))); } else { actualParamTypeStrs.add(paramType); } @@ -162,14 +162,14 @@ public final class ELambda extends AExpression implements ILambda { List paramTypes = new ArrayList<>(captures.size() + actualParamTypeStrs.size()); List paramNames = new ArrayList<>(captures.size() + paramNameStrs.size()); for (Variable var : captures) { - paramTypes.add(PainlessLookupUtility.anyTypeToPainlessTypeName(var.clazz)); + paramTypes.add(PainlessLookupUtility.typeToCanonicalTypeName(var.clazz)); paramNames.add(var.name); } paramTypes.addAll(actualParamTypeStrs); paramNames.addAll(paramNameStrs); // desugar lambda body into a synthetic method - desugared = new SFunction(reserved, location, PainlessLookupUtility.anyTypeToPainlessTypeName(returnType), name, + desugared = new SFunction(reserved, location, PainlessLookupUtility.typeToCanonicalTypeName(returnType), name, paramTypes, paramNames, statements, true); desugared.generateSignature(locals.getPainlessLookup()); desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java index 6bc5331cb1d..3a47dfc725f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java @@ -53,7 +53,7 @@ public final class ENull extends AExpression { if (expected != null) { if (expected.isPrimitive()) { throw createError(new IllegalArgumentException( - "Cannot cast null to a primitive type [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "].")); + "Cannot cast null to a primitive type [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "].")); } actual = expected; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index d34399db779..1c0fce81876 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -94,7 +94,7 @@ public final class EUnary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply not [~] to type " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(child.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(child.actual) + "].")); } child.expected = promote; @@ -124,7 +124,7 @@ public final class EUnary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply positive [+] to type " + - "[" + PainlessLookupUtility.painlessDefTypeToJavaObjectType(child.actual) + "].")); + "[" + PainlessLookupUtility.typeToJavaType(child.actual) + "].")); } child.expected = promote; @@ -158,7 +158,7 @@ public final class EUnary extends AExpression { if (promote == null) { throw createError(new ClassCastException("Cannot apply negative [-] to type " + - "[" + PainlessLookupUtility.painlessDefTypeToJavaObjectType(child.actual) + "].")); + "[" + PainlessLookupUtility.typeToJavaType(child.actual) + "].")); } child.expected = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java index c45107a37ac..7b55cb5a804 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java @@ -68,7 +68,7 @@ public final class PBrace extends AStoreable { sub = new PSubListShortcut(location, locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual), index); } else { throw createError(new IllegalArgumentException("Illegal array access on type " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual) + "].")); } sub.write = write; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index cd5d6483791..8fc8a612b84 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -73,7 +73,7 @@ public final class PCallInvoke extends AExpression { PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual); if (prefix.actual.isPrimitive()) { - struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookupUtility.getBoxedAnyType(prefix.actual)); + struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookupUtility.typeToBoxedType(prefix.actual)); } String methodKey = PainlessLookupUtility.buildPainlessMethodKey(name, arguments.size()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index b5df74358d3..abf398d0e67 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -63,7 +63,7 @@ public final class PField extends AStoreable { prefix = prefix.cast(locals); if (prefix.actual.isArray()) { - sub = new PSubArrayLength(location, PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual), value); + sub = new PSubArrayLength(location, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), value); } else if (prefix.actual == def.class) { sub = new PSubDefField(location, value); } else { @@ -85,7 +85,8 @@ public final class PField extends AStoreable { "set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); if (getter != null || setter != null) { - sub = new PSubShortcut(location, value, PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual), getter, setter); + sub = new PSubShortcut( + location, value, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), getter, setter); } else { EConstant index = new EConstant(location, value); index.analyze(locals); @@ -103,7 +104,7 @@ public final class PField extends AStoreable { if (sub == null) { throw createError(new IllegalArgumentException( - "Unknown field [" + value + "] for type [" + PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual) + "].")); + "Unknown field [" + value + "] for type [" + PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual) + "].")); } if (nullSafe) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java index a1a0ee1dade..007a599e9f8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -53,7 +53,7 @@ final class PSubField extends AStoreable { void analyze(Locals locals) { if (write && Modifier.isFinal(field.modifiers)) { throw createError(new IllegalArgumentException("Cannot write to read-only field [" + field.name + "] for type " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(field.clazz) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(field.clazz) + "].")); } actual = field.clazz; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index de1a7062a24..9ff57e6b913 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -85,7 +85,7 @@ public class SEach extends AStatement { sub = new SSubEachIterable(location, variable, expression, block); } else { throw createError(new IllegalArgumentException("Illegal for each type " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expression.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expression.actual) + "].")); } sub.analyze(locals); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 1c801d509b5..7c243e296c7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -136,7 +136,7 @@ public final class SFunction extends AStatement { try { Class paramType = painlessLookup.getJavaClassFromPainlessType(this.paramTypeStrs.get(param)); - paramClasses[param] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(paramType); + paramClasses[param] = PainlessLookupUtility.typeToJavaType(paramType); paramTypes.add(paramType); parameters.add(new Parameter(location, paramNameStrs.get(param), paramType)); } catch (IllegalArgumentException exception) { @@ -146,7 +146,7 @@ public final class SFunction extends AStatement { } org.objectweb.asm.commons.Method method = new org.objectweb.asm.commons.Method(name, MethodType.methodType( - PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtnType), paramClasses).toMethodDescriptorString()); + PainlessLookupUtility.typeToJavaType(rtnType), paramClasses).toMethodDescriptorString()); this.method = new PainlessMethod(name, null, null, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java index fea8c8953b6..7e0d74865f9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java @@ -109,6 +109,6 @@ final class SSubEachArray extends AStatement { @Override public String toString() { - return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(variable.clazz), variable.name, expression, block); + return singleLineToString(PainlessLookupUtility.typeToCanonicalTypeName(variable.clazz), variable.name, expression, block); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index 798b30e2b6d..12e3154eb56 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -81,7 +81,7 @@ final class SSubEachIterable extends AStatement { if (method == null) { throw createError(new IllegalArgumentException("Unable to create iterator for the type " + - "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expression.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expression.actual) + "].")); } } @@ -132,6 +132,6 @@ final class SSubEachIterable extends AStatement { @Override public String toString() { - return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(variable.clazz), variable.name, expression, block); + return singleLineToString(PainlessLookupUtility.typeToCanonicalTypeName(variable.clazz), variable.name, expression, block); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index cc596dcc395..e26a5a38c76 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -210,7 +210,7 @@ public class PainlessDocGenerator { */ private static void emitAnchor(PrintStream stream, Class clazz) { stream.print("painless-api-reference-"); - stream.print(PainlessLookupUtility.anyTypeToPainlessTypeName(clazz).replace('.', '-')); + stream.print(PainlessLookupUtility.typeToCanonicalTypeName(clazz).replace('.', '-')); } /** @@ -234,7 +234,7 @@ public class PainlessDocGenerator { } private static String methodName(PainlessMethod method) { - return method.name.equals("") ? PainlessLookupUtility.anyTypeToPainlessTypeName(method.target) : method.name; + return method.name.equals("") ? PainlessLookupUtility.typeToCanonicalTypeName(method.target) : method.name; } /** From c32981db6bc4b04383a801ba40c6781d22f262fe Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Fri, 20 Jul 2018 10:09:28 +1000 Subject: [PATCH 106/260] Detect old trial licenses and mimic behaviour (#32209) Prior to 6.3 a trial license default to security enabled. Since 6.3 they default to security disabled. If a cluster is upgraded from <6.3 to >6.3, then we detect this and mimic the old behaviour with respect to security. --- .../elasticsearch/license/LicenseService.java | 25 ++++++-- .../license/XPackLicenseState.java | 29 ++++++++- .../org/elasticsearch/license/TestUtils.java | 11 ++-- .../license/XPackLicenseStateTests.java | 64 ++++++++++++++----- .../MachineLearningLicensingTests.java | 8 +-- .../elasticsearch/license/LicensingTests.java | 4 +- .../xpack/security/SecurityTests.java | 3 +- .../authz/store/CompositeRolesStoreTests.java | 6 +- .../action/saml/SamlBaseRestHandlerTests.java | 4 +- 9 files changed, 111 insertions(+), 43 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java index a39e9f412d7..d5e38fc0cb8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java @@ -120,7 +120,8 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste this.scheduler = new SchedulerEngine(clock); this.licenseState = licenseState; this.operationModeFileWatcher = new OperationModeFileWatcher(resourceWatcherService, - XPackPlugin.resolveConfigFile(env, "license_mode"), logger, () -> updateLicenseState(getLicense())); + XPackPlugin.resolveConfigFile(env, "license_mode"), logger, + () -> updateLicenseState(getLicensesMetaData())); this.scheduler.register(this); populateExpirationCallbacks(); } @@ -265,11 +266,11 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste @Override public void triggered(SchedulerEngine.Event event) { - final LicensesMetaData licensesMetaData = clusterService.state().metaData().custom(LicensesMetaData.TYPE); + final LicensesMetaData licensesMetaData = getLicensesMetaData(); if (licensesMetaData != null) { final License license = licensesMetaData.getLicense(); if (event.getJobName().equals(LICENSE_JOB)) { - updateLicenseState(license); + updateLicenseState(license, licensesMetaData.getMostRecentTrialVersion()); } else if (event.getJobName().startsWith(ExpirationCallback.EXPIRATION_JOB_PREFIX)) { expirationCallbacks.stream() .filter(expirationCallback -> expirationCallback.getId().equals(event.getJobName())) @@ -311,6 +312,10 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste return license == LicensesMetaData.LICENSE_TOMBSTONE ? null : license; } + private LicensesMetaData getLicensesMetaData() { + return this.clusterService.state().metaData().custom(LicensesMetaData.TYPE); + } + void startTrialLicense(PostStartTrialRequest request, final ActionListener listener) { if (VALID_TRIAL_TYPES.contains(request.getType()) == false) { throw new IllegalArgumentException("Cannot start trial of type [" + request.getType() + "]. Valid trial types are " @@ -422,10 +427,16 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste } } - protected void updateLicenseState(final License license) { + private void updateLicenseState(LicensesMetaData licensesMetaData) { + if (licensesMetaData != null) { + updateLicenseState(getLicense(licensesMetaData), licensesMetaData.getMostRecentTrialVersion()); + } + } + + protected void updateLicenseState(final License license, Version mostRecentTrialVersion) { if (license == LicensesMetaData.LICENSE_TOMBSTONE) { // implies license has been explicitly deleted - licenseState.update(License.OperationMode.MISSING, false); + licenseState.update(License.OperationMode.MISSING, false, mostRecentTrialVersion); return; } if (license != null) { @@ -438,7 +449,7 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste // date that is near Long.MAX_VALUE active = time >= license.issueDate() && time - GRACE_PERIOD_DURATION.getMillis() < license.expiryDate(); } - licenseState.update(license.operationMode(), active); + licenseState.update(license.operationMode(), active, mostRecentTrialVersion); if (active) { if (time < license.expiryDate()) { @@ -480,7 +491,7 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste logger.info("license [{}] mode [{}] - valid", license.uid(), license.operationMode().name().toLowerCase(Locale.ROOT)); } - updateLicenseState(license); + updateLicenseState(license, currentLicensesMetaData.getMostRecentTrialVersion()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index ea30e30ae3c..722c9d0e711 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -5,8 +5,11 @@ */ package org.elasticsearch.license; +import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.LoggerMessageFormat; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.License.OperationMode; import org.elasticsearch.xpack.core.XPackField; @@ -266,6 +269,7 @@ public class XPackLicenseState { private final List listeners = new CopyOnWriteArrayList<>(); private final boolean isSecurityEnabled; private final boolean isSecurityExplicitlyEnabled; + private volatile boolean isSecurityEnabledByTrialVersion; public XPackLicenseState(Settings settings) { this.isSecurityEnabled = XPackSettings.SECURITY_ENABLED.get(settings); @@ -274,11 +278,30 @@ public class XPackLicenseState { // setting is not explicitly set this.isSecurityExplicitlyEnabled = isSecurityEnabled && (settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) || XPackSettings.TRANSPORT_SSL_ENABLED.get(settings)); + this.isSecurityEnabledByTrialVersion = false; } - /** Updates the current state of the license, which will change what features are available. */ - void update(OperationMode mode, boolean active) { + /** + * Updates the current state of the license, which will change what features are available. + * + * @param mode The mode (type) of the current license. + * @param active True if the current license exists and is within its allowed usage period; false if it is expired or missing. + * @param mostRecentTrialVersion If this cluster has, at some point commenced a trial, the most recent version on which they did that. + * May be {@code null} if they have never generated a trial license on this cluster, or the most recent + * trial was prior to this metadata being tracked (6.1) + */ + void update(OperationMode mode, boolean active, @Nullable Version mostRecentTrialVersion) { status = new Status(mode, active); + if (isSecurityEnabled == true && isSecurityExplicitlyEnabled == false && mode == OperationMode.TRIAL + && isSecurityEnabledByTrialVersion == false) { + // Before 6.3, Trial licenses would default having security enabled. + // If this license was generated before that version, then treat it as if security is explicitly enabled + if (mostRecentTrialVersion == null || mostRecentTrialVersion.before(Version.V_6_3_0)) { + Loggers.getLogger(getClass()).info("Automatically enabling security for older trial license ({})", + mostRecentTrialVersion == null ? "[pre 6.1.0]" : mostRecentTrialVersion.toString()); + isSecurityEnabledByTrialVersion = true; + } + } listeners.forEach(Runnable::run); } @@ -587,6 +610,6 @@ public class XPackLicenseState { public boolean isSecurityEnabled() { final OperationMode mode = status.mode; - return mode == OperationMode.TRIAL ? isSecurityExplicitlyEnabled : isSecurityEnabled; + return mode == OperationMode.TRIAL ? (isSecurityExplicitlyEnabled || isSecurityEnabledByTrialVersion) : isSecurityEnabled; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java index d236dacaa4d..3b7906ae567 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java @@ -6,6 +6,7 @@ package org.elasticsearch.license; import com.carrotsearch.randomizedtesting.RandomizedTest; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; @@ -353,20 +354,22 @@ public class TestUtils { public static class AssertingLicenseState extends XPackLicenseState { public final List modeUpdates = new ArrayList<>(); public final List activeUpdates = new ArrayList<>(); + public final List trialVersionUpdates = new ArrayList<>(); public AssertingLicenseState() { super(Settings.EMPTY); } @Override - void update(License.OperationMode mode, boolean active) { + void update(License.OperationMode mode, boolean active, Version mostRecentTrialVersion) { modeUpdates.add(mode); activeUpdates.add(active); + trialVersionUpdates.add(mostRecentTrialVersion); } } /** - * A license state that makes the {@link #update(License.OperationMode, boolean)} + * A license state that makes the {@link #update(License.OperationMode, boolean, Version)} * method public for use in tests. */ public static class UpdatableLicenseState extends XPackLicenseState { @@ -379,8 +382,8 @@ public class TestUtils { } @Override - public void update(License.OperationMode mode, boolean active) { - super.update(mode, active); + public void update(License.OperationMode mode, boolean active, Version mostRecentTrialVersion) { + super.update(mode, active, mostRecentTrialVersion); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index f1503919570..bb21ddbd1a1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -5,9 +5,11 @@ */ package org.elasticsearch.license; +import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.License.OperationMode; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; @@ -31,7 +33,7 @@ public class XPackLicenseStateTests extends ESTestCase { /** Creates a license state with the given license type and active state, and checks the given method returns expected. */ void assertAllowed(OperationMode mode, boolean active, Predicate predicate, boolean expected) { XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); - licenseState.update(mode, active); + licenseState.update(mode, active, null); assertEquals(expected, predicate.test(licenseState)); } @@ -102,7 +104,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSecurityBasic() { XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY, Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build())); - licenseState.update(BASIC, true); + licenseState.update(BASIC, true, null); assertThat(licenseState.isAuthAllowed(), is(false)); assertThat(licenseState.isIpFilteringAllowed(), is(false)); @@ -116,7 +118,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSecurityBasicExpired() { XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY, Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build())); - licenseState.update(BASIC, false); + licenseState.update(BASIC, false, null); assertThat(licenseState.isAuthAllowed(), is(false)); assertThat(licenseState.isIpFilteringAllowed(), is(false)); @@ -130,7 +132,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSecurityStandard() { XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY, Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build())); - licenseState.update(STANDARD, true); + licenseState.update(STANDARD, true, null); assertThat(licenseState.isAuthAllowed(), is(true)); assertThat(licenseState.isIpFilteringAllowed(), is(false)); @@ -144,7 +146,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSecurityStandardExpired() { XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY, Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build())); - licenseState.update(STANDARD, false); + licenseState.update(STANDARD, false, null); assertThat(licenseState.isAuthAllowed(), is(true)); assertThat(licenseState.isIpFilteringAllowed(), is(false)); @@ -158,7 +160,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSecurityGold() { XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY, Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build())); - licenseState.update(GOLD, true); + licenseState.update(GOLD, true, null); assertThat(licenseState.isAuthAllowed(), is(true)); assertThat(licenseState.isIpFilteringAllowed(), is(true)); @@ -172,7 +174,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSecurityGoldExpired() { XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY, Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build())); - licenseState.update(GOLD, false); + licenseState.update(GOLD, false, null); assertThat(licenseState.isAuthAllowed(), is(true)); assertThat(licenseState.isIpFilteringAllowed(), is(true)); @@ -186,7 +188,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSecurityPlatinum() { XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY, Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build())); - licenseState.update(PLATINUM, true); + licenseState.update(PLATINUM, true, null); assertThat(licenseState.isAuthAllowed(), is(true)); assertThat(licenseState.isIpFilteringAllowed(), is(true)); @@ -200,7 +202,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSecurityPlatinumExpired() { XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY, Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build())); - licenseState.update(PLATINUM, false); + licenseState.update(PLATINUM, false, null); assertThat(licenseState.isAuthAllowed(), is(true)); assertThat(licenseState.isIpFilteringAllowed(), is(true)); @@ -211,6 +213,34 @@ public class XPackLicenseStateTests extends ESTestCase { assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false)); } + public void testNewTrialDefaultsSecurityOff() { + XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); + licenseState.update(TRIAL, true, VersionUtils.randomVersionBetween(random(), Version.V_6_3_0, Version.CURRENT)); + + assertThat(licenseState.isSecurityEnabled(), is(false)); + assertThat(licenseState.isAuthAllowed(), is(true)); + assertThat(licenseState.isIpFilteringAllowed(), is(true)); + assertThat(licenseState.isAuditingAllowed(), is(true)); + assertThat(licenseState.isStatsAndHealthAllowed(), is(true)); + assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(true)); + assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.ALL)); + assertThat(licenseState.isCustomRoleProvidersAllowed(), is(true)); + } + + public void testOldTrialDefaultsSecurityOn() { + XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); + licenseState.update(TRIAL, true, rarely() ? null : VersionUtils.randomVersionBetween(random(), Version.V_5_6_0, Version.V_6_2_4)); + + assertThat(licenseState.isSecurityEnabled(), is(true)); + assertThat(licenseState.isAuthAllowed(), is(true)); + assertThat(licenseState.isIpFilteringAllowed(), is(true)); + assertThat(licenseState.isAuditingAllowed(), is(true)); + assertThat(licenseState.isStatsAndHealthAllowed(), is(true)); + assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(true)); + assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.ALL)); + assertThat(licenseState.isCustomRoleProvidersAllowed(), is(true)); + } + public void testSecurityAckBasicToNotGoldOrStandard() { OperationMode toMode = randomFrom(OperationMode.values(), mode -> mode != GOLD && mode != STANDARD); assertAckMesssages(XPackField.SECURITY, BASIC, toMode, 0); @@ -354,7 +384,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSqlBasic() { XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); - licenseState.update(BASIC, true); + licenseState.update(BASIC, true, null); assertThat(licenseState.isSqlAllowed(), is(true)); assertThat(licenseState.isJdbcAllowed(), is(false)); @@ -362,7 +392,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSqlBasicExpired() { XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); - licenseState.update(BASIC, false); + licenseState.update(BASIC, false, null); assertThat(licenseState.isSqlAllowed(), is(false)); assertThat(licenseState.isJdbcAllowed(), is(false)); @@ -370,7 +400,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSqlStandard() { XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); - licenseState.update(STANDARD, true); + licenseState.update(STANDARD, true, null); assertThat(licenseState.isSqlAllowed(), is(true)); assertThat(licenseState.isJdbcAllowed(), is(false)); @@ -378,7 +408,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSqlStandardExpired() { XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); - licenseState.update(STANDARD, false); + licenseState.update(STANDARD, false, null); assertThat(licenseState.isSqlAllowed(), is(false)); assertThat(licenseState.isJdbcAllowed(), is(false)); @@ -386,7 +416,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSqlGold() { XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); - licenseState.update(GOLD, true); + licenseState.update(GOLD, true, null); assertThat(licenseState.isSqlAllowed(), is(true)); assertThat(licenseState.isJdbcAllowed(), is(false)); @@ -394,7 +424,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSqlGoldExpired() { XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); - licenseState.update(GOLD, false); + licenseState.update(GOLD, false, null); assertThat(licenseState.isSqlAllowed(), is(false)); assertThat(licenseState.isJdbcAllowed(), is(false)); @@ -402,7 +432,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSqlPlatinum() { XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); - licenseState.update(PLATINUM, true); + licenseState.update(PLATINUM, true, null); assertThat(licenseState.isSqlAllowed(), is(true)); assertThat(licenseState.isJdbcAllowed(), is(true)); @@ -410,7 +440,7 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSqlPlatinumExpired() { XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); - licenseState.update(PLATINUM, false); + licenseState.update(PLATINUM, false, null); assertThat(licenseState.isSqlAllowed(), is(false)); assertThat(licenseState.isJdbcAllowed(), is(false)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java index 558f76a7536..57a97641791 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java @@ -99,7 +99,7 @@ public class MachineLearningLicensingTests extends BaseMlIntegTestCase { PutJobAction.Response response = putJobListener.actionGet(); assertNotNull(response); } - + // Pick a license that does not allow machine learning License.OperationMode mode = randomInvalidLicenseType(); enableLicensing(mode); @@ -151,7 +151,7 @@ public class MachineLearningLicensingTests extends BaseMlIntegTestCase { PutJobAction.Response putJobResponse = putJobListener.actionGet(); assertNotNull(putJobResponse); } - + // Pick a license that does not allow machine learning License.OperationMode mode = randomInvalidLicenseType(); enableLicensing(mode); @@ -551,7 +551,7 @@ public class MachineLearningLicensingTests extends BaseMlIntegTestCase { public static void disableLicensing(License.OperationMode operationMode) { for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { - licenseState.update(operationMode, false); + licenseState.update(operationMode, false, null); } } @@ -561,7 +561,7 @@ public class MachineLearningLicensingTests extends BaseMlIntegTestCase { public static void enableLicensing(License.OperationMode operationMode) { for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { - licenseState.update(operationMode, true); + licenseState.update(operationMode, true, null); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java index f30fe5f8ec8..012050f4259 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java @@ -307,7 +307,7 @@ public class LicensingTests extends SecurityIntegTestCase { public static void disableLicensing(License.OperationMode operationMode) { for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { - licenseState.update(operationMode, false); + licenseState.update(operationMode, false, null); } } @@ -317,7 +317,7 @@ public class LicensingTests extends SecurityIntegTestCase { public static void enableLicensing(License.OperationMode operationMode) { for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { - licenseState.update(operationMode, true); + licenseState.update(operationMode, true, null); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index b1d8d4b67bf..e88b1905a7a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -399,7 +399,8 @@ public class SecurityTests extends ESTestCase { createComponents(Settings.EMPTY); Function> fieldFilter = security.getFieldFilter(); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); - licenseState.update(randomFrom(License.OperationMode.BASIC, License.OperationMode.STANDARD, License.OperationMode.GOLD), true); + licenseState.update( + randomFrom(License.OperationMode.BASIC, License.OperationMode.STANDARD, License.OperationMode.GOLD), true, null); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); assertSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply(randomAlphaOfLengthBetween(3, 6))); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index b1728fd5f04..b33e93d0806 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -409,7 +409,7 @@ public class CompositeRolesStoreTests extends ESTestCase { UpdatableLicenseState xPackLicenseState = new UpdatableLicenseState(SECURITY_ENABLED_SETTINGS); // these licenses don't allow custom role providers - xPackLicenseState.update(randomFrom(OperationMode.BASIC, OperationMode.GOLD, OperationMode.STANDARD), true); + xPackLicenseState.update(randomFrom(OperationMode.BASIC, OperationMode.GOLD, OperationMode.STANDARD), true, null); CompositeRolesStore compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState); @@ -427,7 +427,7 @@ public class CompositeRolesStoreTests extends ESTestCase { Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState); // these licenses allow custom role providers - xPackLicenseState.update(randomFrom(OperationMode.PLATINUM, OperationMode.TRIAL), true); + xPackLicenseState.update(randomFrom(OperationMode.PLATINUM, OperationMode.TRIAL), true, null); roleNames = Sets.newHashSet("roleA"); future = new PlainActionFuture<>(); fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); @@ -441,7 +441,7 @@ public class CompositeRolesStoreTests extends ESTestCase { compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState); - xPackLicenseState.update(randomFrom(OperationMode.PLATINUM, OperationMode.TRIAL), false); + xPackLicenseState.update(randomFrom(OperationMode.PLATINUM, OperationMode.TRIAL), false, null); roleNames = Sets.newHashSet("roleA"); future = new PlainActionFuture<>(); fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/saml/SamlBaseRestHandlerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/saml/SamlBaseRestHandlerTests.java index 5942c206cac..5b442deacf6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/saml/SamlBaseRestHandlerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/saml/SamlBaseRestHandlerTests.java @@ -48,7 +48,7 @@ public class SamlBaseRestHandlerTests extends ESTestCase { .put(XPackSettings.SECURITY_ENABLED.getKey(), true) .build(); final TestUtils.UpdatableLicenseState licenseState = new TestUtils.UpdatableLicenseState(settings); - licenseState.update(licenseMode, true); + licenseState.update(licenseMode, true, null); return new SamlBaseRestHandler(settings, licenseState) { @@ -64,4 +64,4 @@ public class SamlBaseRestHandlerTests extends ESTestCase { }; } -} \ No newline at end of file +} From 6f2b7dc9fe6143d2cc2bf26d83983a3a49c40b90 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Fri, 20 Jul 2018 12:51:52 +1000 Subject: [PATCH 107/260] Fix AD / vagrant based tests for #30953 These tests were creating a SSL service that was not aware of the realm that they were trying to test. This no longer works. --- .../ADLdapUserSearchSessionFactoryTests.java | 2 +- .../ActiveDirectorySessionFactoryTests.java | 50 +++++++++---------- 2 files changed, 25 insertions(+), 27 deletions(-) diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java index 32452a609e2..9f97ebc6d03 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java @@ -85,7 +85,7 @@ public class ADLdapUserSearchSessionFactoryTests extends AbstractActiveDirectory Settings.Builder builder = Settings.builder() .put(globalSettings); settings.keySet().forEach(k -> { - builder.copy("xpack.security.authc.realms.ldap." + k, k, settings); + builder.copy("xpack.security.authc.realms.ad-as-ldap-test." + k, k, settings); }); Settings fullSettings = builder.build(); diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index 614d6659f2d..ec4e8824a19 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -59,9 +60,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT @SuppressWarnings("unchecked") public void testAdAuth() throws Exception { - RealmConfig config = new RealmConfig("ad-test", - buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false), - globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(Settings.EMPTY)); + RealmConfig config = configureRealm("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false)); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { String userName = "ironman"; @@ -82,11 +81,21 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } } + private RealmConfig configureRealm(String name, Settings settings) { + final Environment env = TestEnvironment.newEnvironment(globalSettings); + final Settings mergedSettings = Settings.builder() + .put(settings) + .normalizePrefix("xpack.security.authc.realms." + name + ".") + .put(globalSettings) + .build(); + this.sslService = new SSLService(mergedSettings, env); + return new RealmConfig(name, settings, globalSettings, env, new ThreadContext(globalSettings)); + } + @SuppressWarnings("unchecked") public void testNetbiosAuth() throws Exception { final String adUrl = randomFrom(AD_LDAP_URL, AD_LDAP_GC_URL); - RealmConfig config = new RealmConfig("ad-test", buildAdSettings(adUrl, AD_DOMAIN, false), globalSettings, - TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); + RealmConfig config = configureRealm("ad-test", buildAdSettings(adUrl, AD_DOMAIN, false)); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { String userName = "ades\\ironman"; @@ -108,8 +117,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } public void testAdAuthAvengers() throws Exception { - RealmConfig config = new RealmConfig("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false), globalSettings, - TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); + RealmConfig config = configureRealm("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false)); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { String[] users = new String[]{"cap", "hawkeye", "hulk", "ironman", "thor", "blackwidow"}; @@ -126,8 +134,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT public void testAuthenticate() throws Exception { Settings settings = buildAdSettings(AD_LDAP_URL, AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, false); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), - new ThreadContext(globalSettings)); + RealmConfig config = configureRealm("ad-test", settings); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { String userName = "hulk"; @@ -151,8 +158,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT public void testAuthenticateBaseUserSearch() throws Exception { Settings settings = buildAdSettings(AD_LDAP_URL, AD_DOMAIN, "CN=Bruce Banner, CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.BASE, false); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), - new ThreadContext(globalSettings)); + RealmConfig config = configureRealm("ad-test", settings); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { String userName = "hulk"; @@ -180,8 +186,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT "CN=Avengers,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com") .put(ActiveDirectorySessionFactorySettings.AD_GROUP_SEARCH_SCOPE_SETTING, LdapSearchScope.BASE) .build(); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), - new ThreadContext(globalSettings)); + RealmConfig config = configureRealm("ad-test", settings); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { String userName = "hulk"; @@ -198,8 +203,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT public void testAuthenticateWithUserPrincipalName() throws Exception { Settings settings = buildAdSettings(AD_LDAP_URL, AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, false); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), - new ThreadContext(globalSettings)); + RealmConfig config = configureRealm("ad-test", settings); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { //Login with the UserPrincipalName @@ -220,8 +224,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT public void testAuthenticateWithSAMAccountName() throws Exception { Settings settings = buildAdSettings(AD_LDAP_URL, AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, false); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), - new ThreadContext(globalSettings)); + RealmConfig config = configureRealm("ad-test", settings); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { //login with sAMAccountName @@ -247,8 +250,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT .put(ActiveDirectorySessionFactorySettings.AD_USER_SEARCH_FILTER_SETTING, "(&(objectclass=user)(userPrincipalName={0}@ad.test.elasticsearch.com))") .build(); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), - new ThreadContext(globalSettings)); + RealmConfig config = configureRealm("ad-test", settings); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { //Login with the UserPrincipalName @@ -284,8 +286,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT .putList("ssl.certificate_authorities", certificatePaths) .build(); } - RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), - new ThreadContext(globalSettings)); + RealmConfig config = configureRealm("ad-as-ldap-test", settings); LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool); String user = "Bruce Banner"; @@ -348,8 +349,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT .putList("ssl.certificate_authorities", certificatePaths) .build(); } - RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), - new ThreadContext(globalSettings)); + RealmConfig config = configureRealm("ad-as-ldap-test", settings); LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool); String user = "Bruce Banner"; @@ -366,9 +366,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } public void testADLookup() throws Exception { - RealmConfig config = new RealmConfig("ad-test", - buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false, true), - globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(Settings.EMPTY)); + RealmConfig config = configureRealm("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false, true)); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { List users = randomSubsetOf(Arrays.asList("cap", "hawkeye", "hulk", "ironman", "thor", "blackwidow", From a9a9598e02ac6edb27bd4940196a2e27a78d7288 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Fri, 20 Jul 2018 05:20:35 +0000 Subject: [PATCH 108/260] Require Gradle 4.9 as minimum version (#32200) Do the check in the build plugin as well to be more informative to users of build-tools. We use APIs specific to Gradle 4.9 so earlier will not work. --- buildSrc/build.gradle | 4 ++-- .../main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index eb95ff148f6..55520728812 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -25,8 +25,8 @@ plugins { group = 'org.elasticsearch.gradle' -if (GradleVersion.current() < GradleVersion.version('3.3')) { - throw new GradleException('Gradle 3.3+ is required to build elasticsearch') +if (GradleVersion.current() < GradleVersion.version('4.9')) { + throw new GradleException('Gradle 4.9+ is required to build elasticsearch') } if (JavaVersion.current() < JavaVersion.VERSION_1_8) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index c5dd19de3cc..9f54ae8b682 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -67,6 +67,9 @@ class BuildPlugin implements Plugin { + 'elasticearch.standalone-rest-test, and elasticsearch.build ' + 'are mutually exclusive') } + if (GradleVersion.current() < GradleVersion.version('4.9')) { + throw new GradleException('Gradle 4.9+ is required to use elasticsearch.build plugin') + } project.pluginManager.apply('java') project.pluginManager.apply('carrotsearch.randomized-testing') // these plugins add lots of info to our jars From 24068a773dcd5725ff72c1a55f3ab4bd4a0df576 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 20 Jul 2018 09:12:32 +0200 Subject: [PATCH 109/260] TESTS: Check for Netty resource leaks (#31861) * Enabled advanced leak detection when loading `EsTestCase` * Added custom `Appender` to collect leak logs and check for logged errors in a way similar to what is done for the `StatusLogger` * Fixes #20398 --- .../org/elasticsearch/test/ESTestCase.java | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 4c00527a932..5d555ece438 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -32,8 +32,12 @@ import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.AbstractAppender; import org.apache.logging.log4j.core.config.Configurator; +import org.apache.logging.log4j.core.layout.PatternLayout; import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; import org.apache.logging.log4j.status.StatusLogger; @@ -183,6 +187,8 @@ public abstract class ESTestCase extends LuceneTestCase { private static final AtomicInteger portGenerator = new AtomicInteger(); + private static final Collection nettyLoggedLeaks = new ArrayList<>(); + @AfterClass public static void resetPortCounter() { portGenerator.set(0); @@ -192,8 +198,28 @@ public abstract class ESTestCase extends LuceneTestCase { System.setProperty("log4j.shutdownHookEnabled", "false"); System.setProperty("log4j2.disable.jmx", "true"); + // Enable Netty leak detection and monitor logger for logged leak errors + System.setProperty("io.netty.leakDetection.level", "advanced"); + String leakLoggerName = "io.netty.util.ResourceLeakDetector"; + Logger leakLogger = LogManager.getLogger(leakLoggerName); + Appender leakAppender = new AbstractAppender(leakLoggerName, null, + PatternLayout.newBuilder().withPattern("%m").build()) { + @Override + public void append(LogEvent event) { + String message = event.getMessage().getFormattedMessage(); + if (Level.ERROR.equals(event.getLevel()) && message.contains("LEAK:")) { + synchronized (nettyLoggedLeaks) { + nettyLoggedLeaks.add(message); + } + } + } + }; + leakAppender.start(); + Loggers.addAppender(leakLogger, leakAppender); + // shutdown hook so that when the test JVM exits, logging is shutdown too Runtime.getRuntime().addShutdownHook(new Thread(() -> { + leakAppender.stop(); LoggerContext context = (LoggerContext) LogManager.getContext(false); Configurator.shutdown(context); })); @@ -440,6 +466,13 @@ public abstract class ESTestCase extends LuceneTestCase { statusData.clear(); } } + synchronized (nettyLoggedLeaks) { + try { + assertThat(nettyLoggedLeaks, empty()); + } finally { + nettyLoggedLeaks.clear(); + } + } } // this must be a separate method from other ensure checks above so suite scoped integ tests can call...TODO: fix that From 0f0068b91cad02fe504e2ecc1a23742275120758 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Fri, 20 Jul 2018 00:18:54 -0700 Subject: [PATCH 110/260] Ensure that field aliases cannot be used in multi-fields. (#32219) --- docs/reference/mapping/types/alias.asciidoc | 2 +- .../index/mapper/TypeParsers.java | 4 +++- .../mapper/DocumentMapperParserTests.java | 24 +++++++++++++++++++ 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/docs/reference/mapping/types/alias.asciidoc b/docs/reference/mapping/types/alias.asciidoc index d2b5ccdce8a..b4243d80e6d 100644 --- a/docs/reference/mapping/types/alias.asciidoc +++ b/docs/reference/mapping/types/alias.asciidoc @@ -74,7 +74,7 @@ field alias to query over multiple target fields in a single clause. ==== Unsupported APIs Writes to field aliases are not supported: attempting to use an alias in an index or update request -will result in a failure. Likewise, aliases cannot be used as the target of `copy_to`. +will result in a failure. Likewise, aliases cannot be used as the target of `copy_to` or in multi-fields. Because alias names are not present in the document source, aliases cannot be used when performing source filtering. For example, the following request will return an empty result for `_source`: diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index 52dfadfe273..a6a5fab0d04 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -230,7 +230,9 @@ public class TypeParsers { } else { throw new MapperParsingException("no type specified for property [" + multiFieldName + "]"); } - if (type.equals(ObjectMapper.CONTENT_TYPE) || type.equals(ObjectMapper.NESTED_CONTENT_TYPE)) { + if (type.equals(ObjectMapper.CONTENT_TYPE) + || type.equals(ObjectMapper.NESTED_CONTENT_TYPE) + || type.equals(FieldAliasMapper.CONTENT_TYPE)) { throw new MapperParsingException("Type [" + type + "] cannot be used in multi field"); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java index 268b03d046c..8e164c86ebe 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java @@ -76,4 +76,28 @@ public class DocumentMapperParserTests extends ESSingleNodeTestCase { mapperParser.parse("type", new CompressedXContent(mapping))); assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] of different type")); } + + public void testMultiFieldsWithFieldAlias() throws Exception { + IndexService indexService = createIndex("test"); + DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "text") + .startObject("fields") + .startObject("alias") + .field("type", "alias") + .field("path", "other-field") + .endObject() + .endObject() + .endObject() + .startObject("other-field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject().endObject()); + MapperParsingException e = expectThrows(MapperParsingException.class, () -> + mapperParser.parse("type", new CompressedXContent(mapping))); + assertEquals("Type [alias] cannot be used in multi field", e.getMessage()); + } } From 00a6ad0e9ee874bc211a2fc55824b0c14975c7a5 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 20 Jul 2018 09:23:32 +0200 Subject: [PATCH 111/260] Remove aliases resolution limitations when security is enabled (#31952) Resolving wildcards in aliases expression is challenging as we may end up with no aliases to replace the original expression with, but if we replace with an empty array that means _all which is quite the opposite. Now that we support and serialize the original requested aliases, whenever aliases are replaced we will be able to know what was initially requested. `MetaData#findAliases` can then be updated to not return anything in case it gets empty aliases, but the original aliases were not empty. That means that empty aliases are interpreted as _all only if they were originally requested that way. Relates to #31516 --- .../migration/migrate_7_0/api.asciidoc | 8 +++ .../elasticsearch/action/AliasesRequest.java | 5 ++ .../indices/alias/IndicesAliasesRequest.java | 16 ++++- .../alias/TransportIndicesAliasesAction.java | 4 +- .../alias/get/TransportGetAliasesAction.java | 2 +- .../indices/get/TransportGetIndexAction.java | 5 +- .../cluster/metadata/MetaData.java | 41 +++++++++++-- .../cluster/metadata/MetaDataTests.java | 59 ++++++++++++++++++ x-pack/docs/en/security/limitations.asciidoc | 2 - .../authz/IndicesAndAliasesResolver.java | 20 ++----- .../security/authz/IndexAliasesTests.java | 60 +++++++++++-------- .../authz/IndicesAndAliasesResolverTests.java | 23 +++---- .../build.gradle | 5 +- 13 files changed, 181 insertions(+), 69 deletions(-) diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index f7b6f9b2e00..689b941ef6b 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -79,3 +79,11 @@ the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in ==== The deprecated stored script contexts have now been removed When putting stored scripts, support for storing them with the deprecated `template` context or without a context is now removed. Scripts must be stored using the `script` context as mentioned in the documentation. + +==== Get Aliases API limitations when {security} is enabled removed + +The behavior and response codes of the get aliases API no longer vary +depending on whether {security} is enabled. Previously a +404 - NOT FOUND (IndexNotFoundException) could be returned in case the +current user was not authorized for any alias. An empty response with +status 200 - OK is now returned instead at all times. diff --git a/server/src/main/java/org/elasticsearch/action/AliasesRequest.java b/server/src/main/java/org/elasticsearch/action/AliasesRequest.java index bf7ceb28d50..419287f28eb 100644 --- a/server/src/main/java/org/elasticsearch/action/AliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/AliasesRequest.java @@ -32,6 +32,11 @@ public interface AliasesRequest extends IndicesRequest.Replaceable { */ String[] aliases(); + /** + * Returns the aliases as they were originally requested, before any potential name resolution + */ + String[] getOriginalAliases(); + /** * Replaces current aliases with the provided aliases. * diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index 9249550871c..22e8554ed6a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -214,6 +214,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest aliases = new HashSet<>(); for (AliasActions action : actions) { String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), action.indices()); - Collections.addAll(aliases, action.aliases()); + Collections.addAll(aliases, action.getOriginalAliases()); for (String index : concreteIndices) { switch (action.actionType()) { case ADD: @@ -142,7 +142,7 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction> aliasMetaData = metaData.findAliases(action.aliases(), indexAsArray); + ImmutableOpenMap> aliasMetaData = metaData.findAliases(action, indexAsArray); List finalAliases = new ArrayList<>(); for (ObjectCursor> curAliases : aliasMetaData.values()) { for (AliasMetaData aliasMeta: curAliases.value) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java index 1bacd652ee7..2b71e85a537 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java @@ -63,7 +63,7 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction listener) { String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request); - ImmutableOpenMap> aliases = state.metaData().findAliases(request.aliases(), concreteIndices); + ImmutableOpenMap> aliases = state.metaData().findAliases(request, concreteIndices); listener.onResponse(new GetAliasesResponse(postProcess(request, concreteIndices, aliases))); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java index 060c345454a..584ad0bc55a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java @@ -32,15 +32,14 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.common.settings.IndexScopedSettings; import java.io.IOException; import java.util.List; @@ -110,7 +109,7 @@ public class TransportGetIndexAction extends TransportClusterInfoAction, Diffable, To } /** - * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and - * that point to the specified concrete indices or match partially with the indices via wildcards. + * Finds the specific index aliases that point to the specified concrete indices or match partially with the indices via wildcards. * - * @param aliases The names of the index aliases to find * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. * @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are * present for that index */ - public ImmutableOpenMap> findAliases(final String[] aliases, String[] concreteIndices) { + public ImmutableOpenMap> findAllAliases(String[] concreteIndices) { + return findAliases(Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, concreteIndices); + } + + /** + * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and + * that point to the specified concrete indices or match partially with the indices via wildcards. + * + * @param aliasesRequest The request to find aliases for + * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. + * @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are + * present for that index + */ + public ImmutableOpenMap> findAliases(final AliasesRequest aliasesRequest, String[] concreteIndices) { + return findAliases(aliasesRequest.getOriginalAliases(), aliasesRequest.aliases(), concreteIndices); + } + + /** + * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and + * that point to the specified concrete indices or match partially with the indices via wildcards. + * + * @param aliases The aliases to look for + * @param originalAliases The original aliases that the user originally requested + * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. + * @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are + * present for that index + */ + private ImmutableOpenMap> findAliases(String[] originalAliases, String[] aliases, + String[] concreteIndices) { assert aliases != null; + assert originalAliases != null; assert concreteIndices != null; if (concreteIndices.length == 0) { return ImmutableOpenMap.of(); } + //if aliases were provided but they got replaced with empty aliases, return empty map + if (originalAliases.length > 0 && aliases.length == 0) { + return ImmutableOpenMap.of(); + } + boolean matchAllAliases = matchAllAliases(aliases); ImmutableOpenMap.Builder> mapBuilder = ImmutableOpenMap.builder(); for (String index : concreteIndices) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 38e3fcc6ea7..0b9055cb758 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; @@ -41,6 +42,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; @@ -50,6 +52,63 @@ import static org.hamcrest.Matchers.startsWith; public class MetaDataTests extends ESTestCase { + public void testFindAliases() { + MetaData metaData = MetaData.builder().put(IndexMetaData.builder("index") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias1").build()) + .putAlias(AliasMetaData.builder("alias2").build())).build(); + + { + ImmutableOpenMap> aliases = metaData.findAliases(new GetAliasesRequest(), Strings.EMPTY_ARRAY); + assertThat(aliases.size(), equalTo(0)); + } + { + ImmutableOpenMap> aliases = metaData.findAliases(new GetAliasesRequest(), new String[]{"index"}); + assertThat(aliases.size(), equalTo(1)); + List aliasMetaDataList = aliases.get("index"); + assertThat(aliasMetaDataList.size(), equalTo(2)); + assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1")); + assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2")); + } + { + GetAliasesRequest getAliasesRequest = new GetAliasesRequest("alias1"); + getAliasesRequest.replaceAliases(Strings.EMPTY_ARRAY); + ImmutableOpenMap> aliases = metaData.findAliases(getAliasesRequest, new String[]{"index"}); + assertThat(aliases.size(), equalTo(0)); + } + { + ImmutableOpenMap> aliases = + metaData.findAliases(new GetAliasesRequest("alias*"), new String[]{"index"}); + assertThat(aliases.size(), equalTo(1)); + List aliasMetaDataList = aliases.get("index"); + assertThat(aliasMetaDataList.size(), equalTo(2)); + assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1")); + assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2")); + } + { + ImmutableOpenMap> aliases = + metaData.findAliases(new GetAliasesRequest("alias1"), new String[]{"index"}); + assertThat(aliases.size(), equalTo(1)); + List aliasMetaDataList = aliases.get("index"); + assertThat(aliasMetaDataList.size(), equalTo(1)); + assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1")); + } + { + ImmutableOpenMap> aliases = metaData.findAllAliases(new String[]{"index"}); + assertThat(aliases.size(), equalTo(1)); + List aliasMetaDataList = aliases.get("index"); + assertThat(aliasMetaDataList.size(), equalTo(2)); + assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1")); + assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2")); + } + { + ImmutableOpenMap> aliases = metaData.findAllAliases(Strings.EMPTY_ARRAY); + assertThat(aliases.size(), equalTo(0)); + } + } + public void testIndexAndAliasWithSameName() { IndexMetaData.Builder builder = IndexMetaData.builder("index") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) diff --git a/x-pack/docs/en/security/limitations.asciidoc b/x-pack/docs/en/security/limitations.asciidoc index c127ee3d796..fb8b826d5dd 100644 --- a/x-pack/docs/en/security/limitations.asciidoc +++ b/x-pack/docs/en/security/limitations.asciidoc @@ -19,8 +19,6 @@ with {security} enabled. Elasticsearch clusters with {security} enabled apply the `/_all` wildcard, and all other wildcards, to the indices that the current user has privileges for, not the set of all indices on the cluster. -While creating or retrieving aliases by providing wildcard expressions for alias names, if there are no existing authorized aliases -that match the wildcard expression provided an IndexNotFoundException is returned. [float] === Multi Document APIs diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index 77170f7a1cb..2247cbe02a8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -20,7 +20,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; @@ -200,6 +199,8 @@ class IndicesAndAliasesResolver { if (aliasesRequest.expandAliasesWildcards()) { List aliases = replaceWildcardsWithAuthorizedAliases(aliasesRequest.aliases(), loadAuthorizedAliases(authorizedIndices.get(), metaData)); + //it may be that we replace aliases with an empty array, in case there are no authorized aliases for the action. + //MetaData#findAliases will return nothing when some alias was originally requested, which was replaced with empty. aliasesRequest.replaceAliases(aliases.toArray(new String[aliases.size()])); } if (indicesReplacedWithNoIndices) { @@ -240,8 +241,7 @@ class IndicesAndAliasesResolver { } else { // the user is not authorized to put mappings for this index, but could have been // authorized for a write using an alias that triggered a dynamic mapping update - ImmutableOpenMap> foundAliases = - metaData.findAliases(Strings.EMPTY_ARRAY, new String[] { concreteIndexName }); + ImmutableOpenMap> foundAliases = metaData.findAllAliases(new String[] { concreteIndexName }); List aliasMetaData = foundAliases.get(concreteIndexName); if (aliasMetaData != null) { Optional foundAlias = aliasMetaData.stream() @@ -279,14 +279,12 @@ class IndicesAndAliasesResolver { List finalAliases = new ArrayList<>(); //IndicesAliasesRequest doesn't support empty aliases (validation fails) but GetAliasesRequest does (in which case empty means _all) - boolean matchAllAliases = aliases.length == 0; - if (matchAllAliases) { + if (aliases.length == 0) { finalAliases.addAll(authorizedAliases); } for (String aliasPattern : aliases) { if (aliasPattern.equals(MetaData.ALL)) { - matchAllAliases = true; finalAliases.addAll(authorizedAliases); } else if (Regex.isSimpleMatchPattern(aliasPattern)) { for (String authorizedAlias : authorizedAliases) { @@ -298,16 +296,6 @@ class IndicesAndAliasesResolver { finalAliases.add(aliasPattern); } } - - //Throw exception if the wildcards expansion to authorized aliases resulted in no indices. - //We always need to replace wildcards for security reasons, to make sure that the operation is executed on the aliases that we - //authorized it to execute on. Empty set gets converted to _all by es core though, and unlike with indices, here we don't have - //a special expression to replace empty set with, which gives us the guarantee that nothing will be returned. - //This is because existing aliases can contain all kinds of special characters, they are only validated since 5.1. - if (finalAliases.isEmpty()) { - String indexName = matchAllAliases ? MetaData.ALL : Arrays.toString(aliases); - throw new IndexNotFoundException(indexName); - } return finalAliases; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index c6cb8bb662c..036f1667e14 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; import org.elasticsearch.test.SecurityIntegTestCase; import org.junit.Before; @@ -235,15 +236,19 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //ok: user has manage_aliases on test_* assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*").get()); - //fails: all aliases have been deleted, no existing aliases match test_alias_* - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, + { + //fails: all aliases have been deleted, no existing aliases match test_alias_* + AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*")::get); - assertThat(indexNotFoundException.toString(), containsString("[test_alias_*]")); + assertThat(exception.getMessage(), equalTo("aliases [test_alias_*] missing")); + } - //fails: all aliases have been deleted, no existing aliases match _all - indexNotFoundException = expectThrows(IndexNotFoundException.class, + { + //fails: all aliases have been deleted, no existing aliases match _all + AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "_all")::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + assertThat(exception.getMessage(), equalTo("aliases [_all] missing")); + } //fails: user doesn't have manage_aliases on alias_1 assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() @@ -383,24 +388,27 @@ public class IndexAliasesTests extends SecurityIntegTestCase { getAliasesResponse = client.admin().indices().prepareGetAliases().setAliases("test_alias").get(); assertEquals(0, getAliasesResponse.getAliases().size()); - //fails: no existing aliases to replace wildcards - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, - client.admin().indices().prepareGetAliases().setIndices("test_1").setAliases("test_*")::get); - assertThat(indexNotFoundException.toString(), containsString("[test_*]")); - - //fails: no existing aliases to replace _all - indexNotFoundException = expectThrows(IndexNotFoundException.class, - client.admin().indices().prepareGetAliases().setIndices("test_1").setAliases("_all")::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); - - //fails: no existing aliases to replace empty aliases - indexNotFoundException = expectThrows(IndexNotFoundException.class, - client.admin().indices().prepareGetAliases().setIndices("test_1")::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); - - //fails: no existing aliases to replace empty aliases - indexNotFoundException = expectThrows(IndexNotFoundException.class, client.admin().indices().prepareGetAliases()::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + { + //fails: no existing aliases to replace wildcards + assertThrowsAuthorizationException( + client.admin().indices().prepareGetAliases().setIndices("test_1").setAliases("test_*")::get, + GetAliasesAction.NAME, "create_test_aliases_alias"); + } + { + //fails: no existing aliases to replace _all + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases().setIndices("test_1").setAliases("_all")::get, + GetAliasesAction.NAME, "create_test_aliases_alias"); + } + { + //fails: no existing aliases to replace empty aliases + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases().setIndices("test_1")::get, + GetAliasesAction.NAME, "create_test_aliases_alias"); + } + { + //fails: no existing aliases to replace empty aliases + GetAliasesResponse response = client.admin().indices().prepareGetAliases().get(); + assertThat(response.getAliases().size(), equalTo(0)); + } } public void testCreateIndexThenAliasesCreateAndAliasesPermission3() { @@ -447,9 +455,9 @@ public class IndexAliasesTests extends SecurityIntegTestCase { assertAcked(client.admin().indices().prepareAliases().removeAlias("test_*", "_all")); //fails: all aliases have been deleted, _all can't be resolved to any existing authorized aliases - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, + AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, client.admin().indices().prepareAliases().removeAlias("test_1", "_all")::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + assertThat(exception.getMessage(), equalTo("aliases [_all] missing")); } public void testGetAliasesCreateAndAliasesPermission3() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index d7c974bdc6e..bd5acdec818 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -80,6 +80,7 @@ import java.util.Set; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; @@ -781,10 +782,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testResolveAliasesWildcardsIndicesAliasesRequestDeleteActionsNoAuthorizedIndices() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("foo*").alias("foo*")); - //no authorized aliases match bar*, hence this action fails and makes the whole request fail + //no authorized aliases match bar*, hence aliases are replaced with empty string for that action request.addAliasAction(AliasActions.remove().index("*bar").alias("bar*")); - expectThrows(IndexNotFoundException.class, () -> resolveIndices( - request, buildAuthorizedIndices(user, IndicesAliasesAction.NAME))); + resolveIndices(request, buildAuthorizedIndices(user, IndicesAliasesAction.NAME)); + assertThat(request.getAliasActions().get(0).aliases().length, equalTo(1)); + assertThat(request.getAliasActions().get(1).aliases().length, equalTo(0)); } public void testResolveWildcardsIndicesAliasesRequestAddAndDeleteActions() { @@ -1086,12 +1088,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testResolveAliasesWildcardsGetAliasesRequestNoAuthorizedIndices() { GetAliasesRequest request = new GetAliasesRequest(); - //no authorized aliases match bar*, hence the request fails + //no authorized aliases match bar*, hence aliases are replaced with empty array request.aliases("bar*"); request.indices("*bar"); - IndexNotFoundException e = expectThrows(IndexNotFoundException.class, - () -> resolveIndices(request, buildAuthorizedIndices(user, GetAliasesAction.NAME))); - assertEquals("no such index", e.getMessage()); + resolveIndices(request, buildAuthorizedIndices(user, GetAliasesAction.NAME)); + assertThat(request.aliases().length, equalTo(0)); } public void testResolveAliasesAllGetAliasesRequestNoAuthorizedIndices() { @@ -1100,10 +1101,10 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { request.aliases("_all"); } request.indices("non_existing"); - //current user is not authorized for any index, foo* resolves to no indices, the request fails - IndexNotFoundException e = expectThrows(IndexNotFoundException.class, - () -> resolveIndices(request, buildAuthorizedIndices(userNoIndices, GetAliasesAction.NAME))); - assertEquals("no such index", e.getMessage()); + //current user is not authorized for any index, foo* resolves to no indices, aliases are replaced with empty array + ResolvedIndices resolvedIndices = resolveIndices(request, buildAuthorizedIndices(userNoIndices, GetAliasesAction.NAME)); + assertThat(resolvedIndices.getLocal(), contains("non_existing")); + assertThat(request.aliases().length, equalTo(0)); } /** diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 7f2706a773a..35b08de0125 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -12,10 +12,9 @@ integTest { integTestRunner { systemProperty 'tests.rest.blacklist', - ['cat.aliases/10_basic/Empty cluster', + [ 'index/10_with_id/Index with ID', - 'indices.get_alias/10_basic/Get alias against closed indices', - 'indices.get_alias/20_empty/Check empty aliases when getting all aliases via /_alias', + 'indices.get_alias/10_basic/Get alias against closed indices' ].join(',') systemProperty 'tests.rest.cluster.username', System.getProperty('tests.rest.cluster.username', 'test_user') From c5cde96691df84353f1f796db40c9df0efc6825e Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Fri, 20 Jul 2018 10:18:38 +0200 Subject: [PATCH 112/260] Dependencies: Upgrade to joda time 2.10 (#32160) Changelog: http://www.joda.org/joda-time/changes-report.html --- server/build.gradle | 2 +- server/licenses/joda-time-2.10.jar.sha1 | 1 + server/licenses/joda-time-2.9.9.jar.sha1 | 1 - x-pack/plugin/sql/sql-action/build.gradle | 2 +- x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.jar.sha1 | 1 + x-pack/plugin/sql/sql-action/licenses/joda-time-2.9.9.jar.sha1 | 1 - x-pack/plugin/sql/sql-proto/build.gradle | 2 +- x-pack/plugin/sql/sql-proto/licenses/joda-time-2.10.jar.sha1 | 1 + x-pack/plugin/sql/sql-proto/licenses/joda-time-2.9.9.jar.sha1 | 1 - 9 files changed, 6 insertions(+), 6 deletions(-) create mode 100644 server/licenses/joda-time-2.10.jar.sha1 delete mode 100644 server/licenses/joda-time-2.9.9.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.jar.sha1 delete mode 100644 x-pack/plugin/sql/sql-action/licenses/joda-time-2.9.9.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-proto/licenses/joda-time-2.10.jar.sha1 delete mode 100644 x-pack/plugin/sql/sql-proto/licenses/joda-time-2.9.9.jar.sha1 diff --git a/server/build.gradle b/server/build.gradle index c71cc4c7dbd..deb38398979 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -106,7 +106,7 @@ dependencies { compile 'com.carrotsearch:hppc:0.7.1' // time handling, remove with java 8 time - compile 'joda-time:joda-time:2.9.9' + compile 'joda-time:joda-time:2.10' // percentiles aggregation compile 'com.tdunning:t-digest:3.2' diff --git a/server/licenses/joda-time-2.10.jar.sha1 b/server/licenses/joda-time-2.10.jar.sha1 new file mode 100644 index 00000000000..a597eabc654 --- /dev/null +++ b/server/licenses/joda-time-2.10.jar.sha1 @@ -0,0 +1 @@ +f66c8125d1057ffce6c4e29e624cac863e110e2b \ No newline at end of file diff --git a/server/licenses/joda-time-2.9.9.jar.sha1 b/server/licenses/joda-time-2.9.9.jar.sha1 deleted file mode 100644 index 4009932ea3b..00000000000 --- a/server/licenses/joda-time-2.9.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f7b520c458572890807d143670c9b24f4de90897 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/build.gradle b/x-pack/plugin/sql/sql-action/build.gradle index d8805d2e3db..f6b5177d508 100644 --- a/x-pack/plugin/sql/sql-action/build.gradle +++ b/x-pack/plugin/sql/sql-action/build.gradle @@ -24,7 +24,7 @@ dependencies { } compile xpackProject('plugin:sql:sql-proto') compile "org.apache.lucene:lucene-core:${versions.lucene}" - compile 'joda-time:joda-time:2.9.9' + compile 'joda-time:joda-time:2.10' runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" runtime "org.apache.logging.log4j:log4j-api:${versions.log4j}" runtime "org.apache.logging.log4j:log4j-core:${versions.log4j}" diff --git a/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.jar.sha1 new file mode 100644 index 00000000000..a597eabc654 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.jar.sha1 @@ -0,0 +1 @@ +f66c8125d1057ffce6c4e29e624cac863e110e2b \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/joda-time-2.9.9.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/joda-time-2.9.9.jar.sha1 deleted file mode 100644 index 4009932ea3b..00000000000 --- a/x-pack/plugin/sql/sql-action/licenses/joda-time-2.9.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f7b520c458572890807d143670c9b24f4de90897 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/build.gradle b/x-pack/plugin/sql/sql-proto/build.gradle index 5a1439f4360..7f26176e3c7 100644 --- a/x-pack/plugin/sql/sql-proto/build.gradle +++ b/x-pack/plugin/sql/sql-proto/build.gradle @@ -17,7 +17,7 @@ dependencies { compile (project(':libs:x-content')) { transitive = false } - compile 'joda-time:joda-time:2.9.9' + compile 'joda-time:joda-time:2.10' runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" testCompile "org.elasticsearch.test:framework:${version}" diff --git a/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.10.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.10.jar.sha1 new file mode 100644 index 00000000000..a597eabc654 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.10.jar.sha1 @@ -0,0 +1 @@ +f66c8125d1057ffce6c4e29e624cac863e110e2b \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.9.9.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.9.9.jar.sha1 deleted file mode 100644 index 4009932ea3b..00000000000 --- a/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.9.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f7b520c458572890807d143670c9b24f4de90897 \ No newline at end of file From 5cbd9ad177e4284a2fef799d01920bd09648de70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 20 Jul 2018 11:43:46 +0200 Subject: [PATCH 113/260] Rename ranking evaluation response section (#32166) Currently the ranking evaluation response contains a 'unknown_docs' section for each search use case in the evaluation set. It contains document ids for results in the search hits that currently don't have a quality rating. This change renames it to `unrated_docs`, which better reflects its purpose. --- .../test/java/org/elasticsearch/client/RankEvalIT.java | 6 +++--- docs/reference/search/rank-eval.asciidoc | 4 ++-- .../elasticsearch/index/rankeval/EvalQueryQuality.java | 6 +++--- .../elasticsearch/index/rankeval/EvaluationMetric.java | 5 ++--- .../index/rankeval/DiscountedCumulativeGainTests.java | 10 +++++----- .../index/rankeval/EvalQueryQualityTests.java | 6 ------ .../index/rankeval/RankEvalRequestIT.java | 6 +++--- .../index/rankeval/RankEvalResponseTests.java | 2 +- .../rest-api-spec/test/rank_eval/10_basic.yml | 8 ++++---- .../resources/rest-api-spec/test/rank_eval/20_dcg.yml | 8 ++++---- .../rest-api-spec/test/rank_eval/30_failures.yml | 2 +- .../test/rank_eval/40_rank_eval_templated.yml | 2 +- 12 files changed, 29 insertions(+), 36 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java index d61fccb9371..2890257b236 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java @@ -40,7 +40,7 @@ import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments; +import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments; public class RankEvalIT extends ESRestHighLevelClientTestCase { @@ -84,7 +84,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase { Map partialResults = response.getPartialResults(); assertEquals(2, partialResults.size()); EvalQueryQuality amsterdamQueryQuality = partialResults.get("amsterdam_query"); - assertEquals(2, filterUnknownDocuments(amsterdamQueryQuality.getHitsAndRatings()).size()); + assertEquals(2, filterUnratedDocuments(amsterdamQueryQuality.getHitsAndRatings()).size()); List hitsAndRatings = amsterdamQueryQuality.getHitsAndRatings(); assertEquals(7, hitsAndRatings.size()); for (RatedSearchHit hit : hitsAndRatings) { @@ -96,7 +96,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase { } } EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query"); - assertEquals(6, filterUnknownDocuments(berlinQueryQuality.getHitsAndRatings()).size()); + assertEquals(6, filterUnratedDocuments(berlinQueryQuality.getHitsAndRatings()).size()); hitsAndRatings = berlinQueryQuality.getHitsAndRatings(); assertEquals(7, hitsAndRatings.size()); for (RatedSearchHit hit : hitsAndRatings) { diff --git a/docs/reference/search/rank-eval.asciidoc b/docs/reference/search/rank-eval.asciidoc index 571a4886991..cf13b9f7b06 100644 --- a/docs/reference/search/rank-eval.asciidoc +++ b/docs/reference/search/rank-eval.asciidoc @@ -274,7 +274,7 @@ that shows potential errors of individual queries. The response has the followin "details": { "my_query_id1": { <2> "quality_level": 0.6, <3> - "unknown_docs": [ <4> + "unrated_docs": [ <4> { "_index": "my_index", "_id": "1960795" @@ -309,7 +309,7 @@ that shows potential errors of individual queries. The response has the followin <1> the overall evaluation quality calculated by the defined metric <2> the `details` section contains one entry for every query in the original `requests` section, keyed by the search request id <3> the `quality_level` in the `details` section shows the contribution of this query to the global quality score -<4> the `unknown_docs` section contains an `_index` and `_id` entry for each document in the search result for this +<4> the `unrated_docs` section contains an `_index` and `_id` entry for each document in the search result for this query that didn't have a ratings value. This can be used to ask the user to supply ratings for these documents <5> the `hits` section shows a grouping of the search results with their supplied rating <6> the `metric_details` give additional information about the calculated quality metric (e.g. how many of the retrieved diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java index 2ad3e589bd8..91ba1ce6169 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java @@ -102,8 +102,8 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(queryId); builder.field(QUALITY_LEVEL_FIELD.getPreferredName(), this.evaluationResult); - builder.startArray(UNKNOWN_DOCS_FIELD.getPreferredName()); - for (DocumentKey key : EvaluationMetric.filterUnknownDocuments(ratedHits)) { + builder.startArray(UNRATED_DOCS_FIELD.getPreferredName()); + for (DocumentKey key : EvaluationMetric.filterUnratedDocuments(ratedHits)) { builder.startObject(); builder.field(RatedDocument.INDEX_FIELD.getPreferredName(), key.getIndex()); builder.field(RatedDocument.DOC_ID_FIELD.getPreferredName(), key.getDocId()); @@ -123,7 +123,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable { } private static final ParseField QUALITY_LEVEL_FIELD = new ParseField("quality_level"); - private static final ParseField UNKNOWN_DOCS_FIELD = new ParseField("unknown_docs"); + private static final ParseField UNRATED_DOCS_FIELD = new ParseField("unrated_docs"); private static final ParseField HITS_FIELD = new ParseField("hits"); private static final ParseField METRIC_DETAILS_FIELD = new ParseField("metric_details"); private static final ObjectParser PARSER = new ObjectParser<>("eval_query_quality", diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java index c67511e051f..37898fd9516 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java @@ -76,10 +76,9 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable { /** * filter @link {@link RatedSearchHit} that don't have a rating */ - static List filterUnknownDocuments(List ratedHits) { - List unknownDocs = ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false) + static List filterUnratedDocuments(List ratedHits) { + return ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false) .map(hit -> new DocumentKey(hit.getSearchHit().getIndex(), hit.getSearchHit().getId())).collect(Collectors.toList()); - return unknownDocs; } /** diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java index 56b0c692c41..e768c297333 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -40,7 +40,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments; +import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.hamcrest.CoreMatchers.containsString; @@ -128,7 +128,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase { DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, rated); assertEquals(12.779642067948913, result.getQualityLevel(), DELTA); - assertEquals(2, filterUnknownDocuments(result.getHitsAndRatings()).size()); + assertEquals(2, filterUnratedDocuments(result.getHitsAndRatings()).size()); /** * Check with normalization: to get the maximal possible dcg, sort documents by @@ -185,7 +185,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase { DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs); assertEquals(12.392789260714371, result.getQualityLevel(), DELTA); - assertEquals(1, filterUnknownDocuments(result.getHitsAndRatings()).size()); + assertEquals(1, filterUnratedDocuments(result.getHitsAndRatings()).size()); /** * Check with normalization: to get the maximal possible dcg, sort documents by @@ -224,13 +224,13 @@ public class DiscountedCumulativeGainTests extends ESTestCase { DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs); assertEquals(0.0d, result.getQualityLevel(), DELTA); - assertEquals(0, filterUnknownDocuments(result.getHitsAndRatings()).size()); + assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size()); // also check normalized dcg = new DiscountedCumulativeGain(true, null, 10); result = dcg.evaluate("id", hits, ratedDocs); assertEquals(0.0d, result.getQualityLevel(), DELTA); - assertEquals(0, filterUnknownDocuments(result.getHitsAndRatings()).size()); + assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size()); } public void testParseFromXContent() throws IOException { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java index e9fae6b5c63..c9251bb8090 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; -import org.elasticsearch.index.rankeval.RatedDocument.DocumentKey; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; @@ -52,11 +51,6 @@ public class EvalQueryQualityTests extends ESTestCase { } public static EvalQueryQuality randomEvalQueryQuality() { - List unknownDocs = new ArrayList<>(); - int numberOfUnknownDocs = randomInt(5); - for (int i = 0; i < numberOfUnknownDocs; i++) { - unknownDocs.add(new DocumentKey(randomAlphaOfLength(10), randomAlphaOfLength(10))); - } int numberOfSearchHits = randomInt(5); List ratedHits = new ArrayList<>(); for (int i = 0; i < numberOfSearchHits; i++) { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java index b55c57bae2b..28200e7d5a0 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java @@ -40,7 +40,7 @@ import java.util.List; import java.util.Map.Entry; import java.util.Set; -import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments; +import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.instanceOf; @@ -120,7 +120,7 @@ public class RankEvalRequestIT extends ESIntegTestCase { for (Entry entry : entrySet) { EvalQueryQuality quality = entry.getValue(); if (entry.getKey() == "amsterdam_query") { - assertEquals(2, filterUnknownDocuments(quality.getHitsAndRatings()).size()); + assertEquals(2, filterUnratedDocuments(quality.getHitsAndRatings()).size()); List hitsAndRatings = quality.getHitsAndRatings(); assertEquals(6, hitsAndRatings.size()); for (RatedSearchHit hit : hitsAndRatings) { @@ -133,7 +133,7 @@ public class RankEvalRequestIT extends ESIntegTestCase { } } if (entry.getKey() == "berlin_query") { - assertEquals(5, filterUnknownDocuments(quality.getHitsAndRatings()).size()); + assertEquals(5, filterUnratedDocuments(quality.getHitsAndRatings()).size()); List hitsAndRatings = quality.getHitsAndRatings(); assertEquals(6, hitsAndRatings.size()); for (RatedSearchHit hit : hitsAndRatings) { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index e4fe4848237..1e94e869d25 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -158,7 +158,7 @@ public class RankEvalResponseTests extends ESTestCase { " \"details\": {" + " \"coffee_query\": {" + " \"quality_level\": 0.1," + - " \"unknown_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," + + " \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," + " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"123\",\"_score\":1.0}," + " \"rating\":5}," + " {\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"456\",\"_score\":1.0}," + diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml index 3900b1f32ba..62c246fb320 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml @@ -73,7 +73,7 @@ setup: - match: { quality_level: 1} - match: { details.amsterdam_query.quality_level: 1.0} - - match: { details.amsterdam_query.unknown_docs: [ {"_index": "foo", "_id": "doc4"}]} + - match: { details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]} - match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 2, "docs_retrieved": 2}} - length: { details.amsterdam_query.hits: 3} @@ -85,7 +85,7 @@ setup: - is_false: details.amsterdam_query.hits.2.rating - match: { details.berlin_query.quality_level: 1.0} - - match: { details.berlin_query.unknown_docs: [ {"_index": "foo", "_id": "doc4"}]} + - match: { details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]} - match: { details.berlin_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}} - length: { details.berlin_query.hits: 2} - match: { details.berlin_query.hits.0.hit._id: "doc1" } @@ -155,9 +155,9 @@ setup: - gt: {details.amsterdam_query.quality_level: 0.333} - lt: {details.amsterdam_query.quality_level: 0.334} - match: {details.amsterdam_query.metric_details.mean_reciprocal_rank: {"first_relevant": 3}} - - match: {details.amsterdam_query.unknown_docs: [ {"_index": "foo", "_id": "doc2"}, + - match: {details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc2"}, {"_index": "foo", "_id": "doc3"} ]} - match: {details.berlin_query.quality_level: 0.5} - match: {details.berlin_query.metric_details.mean_reciprocal_rank: {"first_relevant": 2}} - - match: {details.berlin_query.unknown_docs: [ {"_index": "foo", "_id": "doc1"}]} + - match: {details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc1"}]} diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml index fc5e6576ad4..baf10f1542c 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml @@ -73,7 +73,7 @@ - lt: {quality_level: 13.848264 } - gt: {details.dcg_query.quality_level: 13.848263} - lt: {details.dcg_query.quality_level: 13.848264} - - match: {details.dcg_query.unknown_docs: [ ]} + - match: {details.dcg_query.unrated_docs: [ ]} # reverse the order in which the results are returned (less relevant docs first) @@ -100,7 +100,7 @@ - lt: {quality_level: 10.299675} - gt: {details.dcg_query_reverse.quality_level: 10.299674} - lt: {details.dcg_query_reverse.quality_level: 10.299675} - - match: {details.dcg_query_reverse.unknown_docs: [ ]} + - match: {details.dcg_query_reverse.unrated_docs: [ ]} # if we mix both, we should get the average @@ -138,7 +138,7 @@ - lt: {quality_level: 12.073970} - gt: {details.dcg_query.quality_level: 13.848263} - lt: {details.dcg_query.quality_level: 13.848264} - - match: {details.dcg_query.unknown_docs: [ ]} + - match: {details.dcg_query.unrated_docs: [ ]} - gt: {details.dcg_query_reverse.quality_level: 10.299674} - lt: {details.dcg_query_reverse.quality_level: 10.299675} - - match: {details.dcg_query_reverse.unknown_docs: [ ]} + - match: {details.dcg_query_reverse.unrated_docs: [ ]} diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml index 4008f677185..d6119ad3a9e 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml @@ -36,7 +36,7 @@ - match: { quality_level: 1} - match: { details.amsterdam_query.quality_level: 1.0} - - match: { details.amsterdam_query.unknown_docs: [ ]} + - match: { details.amsterdam_query.unrated_docs: [ ]} - match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}} - is_true: failures.invalid_query diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml index f0c564d3639..5e0082d213c 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml @@ -85,7 +85,7 @@ setup: } - match: {quality_level: 0.9} - - match: {details.amsterdam_query.unknown_docs.0._id: "6"} + - match: {details.amsterdam_query.unrated_docs.0._id: "6"} --- "Test illegal request parts": From 6afb661ca5393853df38d9a104568e1ea8b34d0a Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 20 Jul 2018 12:55:20 +0200 Subject: [PATCH 114/260] Remove indices stats timeout from monitoring docs With this commit we remove the documentation for the setting `xpack.monitoring.collection.indices.stats.timeout` which has already been removed in code. Closes #32133 Relates #32229 --- docs/reference/settings/monitoring-settings.asciidoc | 4 ---- 1 file changed, 4 deletions(-) diff --git a/docs/reference/settings/monitoring-settings.asciidoc b/docs/reference/settings/monitoring-settings.asciidoc index 5c812b6f53c..2759944e615 100644 --- a/docs/reference/settings/monitoring-settings.asciidoc +++ b/docs/reference/settings/monitoring-settings.asciidoc @@ -85,10 +85,6 @@ You can update this setting through the Sets the timeout for collecting index statistics. Defaults to `10s`. -`xpack.monitoring.collection.indices.stats.timeout`:: - -Sets the timeout for collecting total indices statistics. Defaults to `10s`. - `xpack.monitoring.collection.index.recovery.active_only`:: Controls whether or not all recoveries are collected. Set to `true` to From ac960bfa6b6bd96e83cd186bf95667f48be1e0e6 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 20 Jul 2018 15:49:37 +0100 Subject: [PATCH 115/260] [ML] Use default request durability for .ml-state index (#32233) The initial decision to use async durability was made a long time ago for performance reasons. That argument no longer applies and we prefer the safety of request durability. --- .../java/org/elasticsearch/xpack/ml/MachineLearning.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 3d1011c47e2..d5461d85599 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -635,11 +635,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu // TODO review these settings .settings(Settings.builder() .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1") - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting) - // Sacrifice durability for performance: in the event of power - // failure we can lose the last 5 seconds of changes, but it's - // much faster - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async")) + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting)) .putMapping(ElasticsearchMappings.DOC_TYPE, Strings.toString(stateMapping)) .version(Version.CURRENT.id) .build(); From 74aa7b0815c048271a5536aad7941ca72a0d8f6c Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Fri, 20 Jul 2018 08:52:45 -0600 Subject: [PATCH 116/260] Enhance Parent circuit breaker error message (#32056) * Enhance Parent circuit breaker error message This adds information about either the current real usage (if tracking "real" memory usage) or the child breaker usages to the exception message when the parent circuit breaker trips. The messages now look like: ``` [parent] Data too large, data for [my_request] would be [211288064/201.5mb], which is larger than the limit of [209715200/200mb], usages [request=157286400/150mb, fielddata=54001664/51.5mb, in_flight_requests=0/0b, accounting=0/0b] ``` Or when tracking real memory usage: ``` [parent] Data too large, data for [request] would be [251/251b], which is larger than the limit of [200/200b], real usage: [181/181b], new bytes reserved: [70/70b] ``` * Only call currentMemoryUsage once by returning structured object --- .../HierarchyCircuitBreakerService.java | 54 +++++++++++++++---- .../HierarchyCircuitBreakerServiceTests.java | 5 ++ 2 files changed, 49 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index ebebcfd253c..7e6a9c29a83 100644 --- a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -37,6 +37,7 @@ import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; /** * CircuitBreakerService that attempts to redistribute space between breakers @@ -215,7 +216,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { } // Manually add the parent breaker settings since they aren't part of the breaker map allStats.add(new CircuitBreakerStats(CircuitBreaker.PARENT, parentSettings.getLimit(), - parentUsed(0L), 1.0, parentTripCount.get())); + parentUsed(0L).totalUsage, 1.0, parentTripCount.get())); return new AllCircuitBreakerStats(allStats.toArray(new CircuitBreakerStats[allStats.size()])); } @@ -225,15 +226,26 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { return new CircuitBreakerStats(breaker.getName(), breaker.getLimit(), breaker.getUsed(), breaker.getOverhead(), breaker.getTrippedCount()); } - private long parentUsed(long newBytesReserved) { + private static class ParentMemoryUsage { + final long baseUsage; + final long totalUsage; + + ParentMemoryUsage(final long baseUsage, final long totalUsage) { + this.baseUsage = baseUsage; + this.totalUsage = totalUsage; + } + } + + private ParentMemoryUsage parentUsed(long newBytesReserved) { if (this.trackRealMemoryUsage) { - return currentMemoryUsage() + newBytesReserved; + final long current = currentMemoryUsage(); + return new ParentMemoryUsage(current, current + newBytesReserved); } else { long parentEstimated = 0; for (CircuitBreaker breaker : this.breakers.values()) { parentEstimated += breaker.getUsed() * breaker.getOverhead(); } - return parentEstimated; + return new ParentMemoryUsage(parentEstimated, parentEstimated); } } @@ -246,15 +258,37 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { * Checks whether the parent breaker has been tripped */ public void checkParentLimit(long newBytesReserved, String label) throws CircuitBreakingException { - long totalUsed = parentUsed(newBytesReserved); + final ParentMemoryUsage parentUsed = parentUsed(newBytesReserved); long parentLimit = this.parentSettings.getLimit(); - if (totalUsed > parentLimit) { + if (parentUsed.totalUsage > parentLimit) { this.parentTripCount.incrementAndGet(); - final String message = "[parent] Data too large, data for [" + label + "]" + - " would be [" + totalUsed + "/" + new ByteSizeValue(totalUsed) + "]" + + final StringBuilder message = new StringBuilder("[parent] Data too large, data for [" + label + "]" + + " would be [" + parentUsed.totalUsage + "/" + new ByteSizeValue(parentUsed.totalUsage) + "]" + ", which is larger than the limit of [" + - parentLimit + "/" + new ByteSizeValue(parentLimit) + "]"; - throw new CircuitBreakingException(message, totalUsed, parentLimit); + parentLimit + "/" + new ByteSizeValue(parentLimit) + "]"); + if (this.trackRealMemoryUsage) { + final long realUsage = parentUsed.baseUsage; + message.append(", real usage: ["); + message.append(realUsage); + message.append("/"); + message.append(new ByteSizeValue(realUsage)); + message.append("], new bytes reserved: ["); + message.append(newBytesReserved); + message.append("/"); + message.append(new ByteSizeValue(newBytesReserved)); + message.append("]"); + } else { + message.append(", usages ["); + message.append(String.join(", ", + this.breakers.entrySet().stream().map(e -> { + final CircuitBreaker breaker = e.getValue(); + final long breakerUsed = (long)(breaker.getUsed() * breaker.getOverhead()); + return e.getKey() + "=" + breakerUsed + "/" + new ByteSizeValue(breakerUsed); + }) + .collect(Collectors.toList()))); + message.append("]"); + } + throw new CircuitBreakingException(message.toString(), parentUsed.totalUsage, parentLimit); } } diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java index a73cf8630fe..6f8689a9664 100644 --- a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java @@ -199,6 +199,8 @@ public class HierarchyCircuitBreakerServiceTests extends ESTestCase { .addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), "should break")); assertThat(exception.getMessage(), containsString("[parent] Data too large, data for [should break] would be")); assertThat(exception.getMessage(), containsString("which is larger than the limit of [209715200/200mb]")); + assertThat(exception.getMessage(), + containsString("usages [request=157286400/150mb, fielddata=54001664/51.5mb, in_flight_requests=0/0b, accounting=0/0b]")); } } @@ -239,6 +241,9 @@ public class HierarchyCircuitBreakerServiceTests extends ESTestCase { // it was the parent that rejected the reservation assertThat(exception.getMessage(), containsString("[parent] Data too large, data for [request] would be")); assertThat(exception.getMessage(), containsString("which is larger than the limit of [200/200b]")); + assertThat(exception.getMessage(), + containsString("real usage: [181/181b], new bytes reserved: [" + (reservationInBytes * 2) + + "/" + new ByteSizeValue(reservationInBytes * 2) + "]")); assertEquals(0, requestBreaker.getTrippedCount()); assertEquals(1, service.stats().getStats(CircuitBreaker.PARENT).getTrippedCount()); From 6ed1ad0b6ffd5eba5be0807d7ea96aefe722840f Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 20 Jul 2018 16:55:11 +0200 Subject: [PATCH 117/260] Fix multi level nested sort (#32204) The parent filter for nested sort should always match **all** parents regardless of the child queries. It is used to find the boundaries of a single parent and we use the child query to match all the filters set in the nested tree so there is no need to repeat the nested filters. With this change we ensure that we build bitset filters only to find the root docs (or the docs at the level where the sort applies) that can be reused among queries. Closes #31554 Closes #32130 Closes #31783 Co-authored-by: Dominic Bevacqua --- .../search/sort/SortBuilder.java | 39 +- .../search/nested/NestedSortingTests.java | 453 ++++++++++++++++++ .../search/nested/SimpleNestedIT.java | 73 +++ 3 files changed, 544 insertions(+), 21 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 9537e288919..9d2a5c9f1e2 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -22,9 +22,7 @@ package org.elasticsearch.search.sort; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ToChildBlockJoinQuery; -import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -186,10 +184,21 @@ public abstract class SortBuilder> implements NamedWrit } protected static Nested resolveNested(QueryShardContext context, NestedSortBuilder nestedSort) throws IOException { - return resolveNested(context, nestedSort, null); + final Query childQuery = resolveNestedQuery(context, nestedSort, null); + if (childQuery == null) { + return null; + } + final ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); + final Query parentQuery; + if (objectMapper == null) { + parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated()); + } else { + parentQuery = objectMapper.nestedTypeFilter(); + } + return new Nested(context.bitsetFilter(parentQuery), childQuery); } - private static Nested resolveNested(QueryShardContext context, NestedSortBuilder nestedSort, Nested nested) throws IOException { + private static Query resolveNestedQuery(QueryShardContext context, NestedSortBuilder nestedSort, Query parentQuery) throws IOException { if (nestedSort == null || nestedSort.getPath() == null) { return null; } @@ -207,15 +216,7 @@ public abstract class SortBuilder> implements NamedWrit if (!nestedObjectMapper.nested().isNested()) { throw new QueryShardException(context, "[nested] nested object under path [" + nestedPath + "] is not of nested type"); } - - // get our parent query which will determines our parent documents - Query parentQuery; ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); - if (objectMapper == null) { - parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated()); - } else { - parentQuery = objectMapper.nestedTypeFilter(); - } // get our child query, potentially applying a users filter Query childQuery; @@ -223,7 +224,7 @@ public abstract class SortBuilder> implements NamedWrit context.nestedScope().nextLevel(nestedObjectMapper); if (nestedFilter != null) { assert nestedFilter == Rewriteable.rewrite(nestedFilter, context) : "nested filter is not rewritten"; - if (nested == null) { + if (parentQuery == null) { // this is for back-compat, original single level nested sorting never applied a nested type filter childQuery = nestedFilter.toFilter(context); } else { @@ -237,27 +238,23 @@ public abstract class SortBuilder> implements NamedWrit } // apply filters from the previous nested level - if (nested != null) { - parentQuery = Queries.filtered(parentQuery, - new ToParentBlockJoinQuery(nested.getInnerQuery(), nested.getRootFilter(), ScoreMode.None)); - + if (parentQuery != null) { if (objectMapper != null) { childQuery = Queries.filtered(childQuery, - new ToChildBlockJoinQuery(nested.getInnerQuery(), context.bitsetFilter(objectMapper.nestedTypeFilter()))); + new ToChildBlockJoinQuery(parentQuery, context.bitsetFilter(objectMapper.nestedTypeFilter()))); } } // wrap up our parent and child and either process the next level of nesting or return - final Nested innerNested = new Nested(context.bitsetFilter(parentQuery), childQuery); if (nestedNestedSort != null) { try { context.nestedScope().nextLevel(nestedObjectMapper); - return resolveNested(context, nestedNestedSort, innerNested); + return resolveNestedQuery(context, nestedNestedSort, childQuery); } finally { context.nestedScope().previousLevel(); } } else { - return innerNested; + return childQuery; } } diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index 57f2310e877..1300debd5eb 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -21,7 +21,11 @@ package org.elasticsearch.index.search.nested; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StringField; +import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; @@ -40,21 +44,37 @@ import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.TestUtil; +import org.elasticsearch.Version; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.fielddata.AbstractFieldDataTestCase; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.NoOrdinalsStringFieldDataTests; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.NestedQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.MultiValueMode; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.NestedSortBuilder; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; +import static org.elasticsearch.index.mapper.SeqNoFieldMapper.PRIMARY_TERM_NAME; import static org.hamcrest.Matchers.equalTo; public class NestedSortingTests extends AbstractFieldDataTestCase { @@ -343,4 +363,437 @@ public class NestedSortingTests extends AbstractFieldDataTestCase { searcher.getIndexReader().close(); } + public void testMultiLevelNestedSorting() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder(); + mapping.startObject(); + { + mapping.startObject("_doc"); + { + mapping.startObject("properties"); + { + { + mapping.startObject("title"); + mapping.field("type", "text"); + mapping.endObject(); + } + { + mapping.startObject("genre"); + mapping.field("type", "keyword"); + mapping.endObject(); + } + { + mapping.startObject("chapters"); + mapping.field("type", "nested"); + { + mapping.startObject("properties"); + { + mapping.startObject("title"); + mapping.field("type", "text"); + mapping.endObject(); + } + { + mapping.startObject("read_time_seconds"); + mapping.field("type", "integer"); + mapping.endObject(); + } + { + mapping.startObject("paragraphs"); + mapping.field("type", "nested"); + { + mapping.startObject("properties"); + { + { + mapping.startObject("header"); + mapping.field("type", "text"); + mapping.endObject(); + } + { + mapping.startObject("content"); + mapping.field("type", "text"); + mapping.endObject(); + } + { + mapping.startObject("word_count"); + mapping.field("type", "integer"); + mapping.endObject(); + } + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + } + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + IndexService indexService = createIndex("nested_sorting", Settings.EMPTY, "_doc", mapping); + + List> books = new ArrayList<>(); + { + List book = new ArrayList<>(); + Document document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "Paragraph 1", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 743)); + document.add(new IntPoint("chapters.paragraphs.word_count", 743)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "chapter 3", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 400)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 400)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "Paragraph 1", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 234)); + document.add(new IntPoint("chapters.paragraphs.word_count", 234)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "chapter 2", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 200)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 200)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "Paragraph 2", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 478)); + document.add(new IntPoint("chapters.paragraphs.word_count", 478)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "Paragraph 1", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 849)); + document.add(new IntPoint("chapters.paragraphs.word_count", 849)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "chapter 1", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 1400)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 1400)); + book.add(document); + document = new Document(); + document.add(new StringField("genre", "science fiction", Field.Store.NO)); + document.add(new StringField("_type", "_doc", Field.Store.NO)); + document.add(new StringField("_id", "1", Field.Store.YES)); + document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0)); + book.add(document); + books.add(book); + } + { + List book = new ArrayList<>(); + Document document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "Introduction", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 76)); + document.add(new IntPoint("chapters.paragraphs.word_count", 76)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "chapter 1", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 20)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 20)); + book.add(document); + document = new Document(); + document.add(new StringField("genre", "romance", Field.Store.NO)); + document.add(new StringField("_type", "_doc", Field.Store.NO)); + document.add(new StringField("_id", "2", Field.Store.YES)); + document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0)); + book.add(document); + books.add(book); + } + { + List book = new ArrayList<>(); + Document document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "A bad dream", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 976)); + document.add(new IntPoint("chapters.paragraphs.word_count", 976)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "The beginning of the end", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 1200)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 1200)); + book.add(document); + document = new Document(); + document.add(new StringField("genre", "horror", Field.Store.NO)); + document.add(new StringField("_type", "_doc", Field.Store.NO)); + document.add(new StringField("_id", "3", Field.Store.YES)); + document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0)); + book.add(document); + books.add(book); + } + { + List book = new ArrayList<>(); + Document document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "macaroni", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 180)); + document.add(new IntPoint("chapters.paragraphs.word_count", 180)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "hamburger", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 150)); + document.add(new IntPoint("chapters.paragraphs.word_count", 150)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "tosti", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 120)); + document.add(new IntPoint("chapters.paragraphs.word_count", 120)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "easy meals", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 800)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 800)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "introduction", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 87)); + document.add(new IntPoint("chapters.paragraphs.word_count", 87)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "introduction", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 10)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 10)); + book.add(document); + document = new Document(); + document.add(new StringField("genre", "cooking", Field.Store.NO)); + document.add(new StringField("_type", "_doc", Field.Store.NO)); + document.add(new StringField("_id", "4", Field.Store.YES)); + document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0)); + book.add(document); + books.add(book); + } + { + List book = new ArrayList<>(); + Document document = new Document(); + document.add(new StringField("genre", "unknown", Field.Store.NO)); + document.add(new StringField("_type", "_doc", Field.Store.NO)); + document.add(new StringField("_id", "5", Field.Store.YES)); + document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0)); + book.add(document); + books.add(book); + } + + Collections.shuffle(books, random()); + for (List book : books) { + writer.addDocuments(book); + if (randomBoolean()) { + writer.commit(); + } + } + DirectoryReader reader = DirectoryReader.open(writer); + reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0)); + IndexSearcher searcher = new IndexSearcher(reader); + QueryShardContext queryShardContext = indexService.newQueryShardContext(0, reader, () -> 0L, null); + + FieldSortBuilder sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count"); + sortBuilder.setNestedSort(new NestedSortBuilder("chapters").setNestedSort(new NestedSortBuilder("chapters.paragraphs"))); + QueryBuilder queryBuilder = new MatchAllQueryBuilder(); + TopFieldDocs topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(5L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); + assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("1")); + assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(234L)); + assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("3")); + assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(976L)); + assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MAX_VALUE)); + + // Specific genre + { + queryBuilder = new TermQueryBuilder("genre", "romance"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); + + queryBuilder = new TermQueryBuilder("genre", "science fiction"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(234L)); + + queryBuilder = new TermQueryBuilder("genre", "horror"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); + + queryBuilder = new TermQueryBuilder("genre", "cooking"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); + } + + // reverse sort order + { + sortBuilder.order(SortOrder.DESC); + queryBuilder = new MatchAllQueryBuilder(); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(5L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(849L)); + assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(180L)); + assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(76L)); + assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MIN_VALUE)); + } + + // Specific genre and reverse sort order + { + queryBuilder = new TermQueryBuilder("genre", "romance"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); + + queryBuilder = new TermQueryBuilder("genre", "science fiction"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(849L)); + + queryBuilder = new TermQueryBuilder("genre", "horror"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); + + queryBuilder = new TermQueryBuilder("genre", "cooking"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(180L)); + } + + // Nested filter + query + { + queryBuilder = new RangeQueryBuilder("chapters.read_time_seconds").to(50L); + sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count"); + sortBuilder.setNestedSort( + new NestedSortBuilder("chapters") + .setFilter(queryBuilder) + .setNestedSort(new NestedSortBuilder("chapters.paragraphs")) + ); + topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(2L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); + + sortBuilder.order(SortOrder.DESC); + topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(2L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(76L)); + } + + // Multiple Nested filters + query + { + queryBuilder = new RangeQueryBuilder("chapters.read_time_seconds").to(50L); + sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count"); + sortBuilder.setNestedSort( + new NestedSortBuilder("chapters") + .setFilter(queryBuilder) + .setNestedSort( + new NestedSortBuilder("chapters.paragraphs") + .setFilter(new RangeQueryBuilder("chapters.paragraphs.word_count").from(80L)) + ) + ); + topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(2L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MAX_VALUE)); + + sortBuilder.order(SortOrder.DESC); + topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(2L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MIN_VALUE)); + } + + // Nested filter + Specific genre + { + sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count"); + sortBuilder.setNestedSort( + new NestedSortBuilder("chapters") + .setFilter(new RangeQueryBuilder("chapters.read_time_seconds").to(50L)) + .setNestedSort(new NestedSortBuilder("chapters.paragraphs")) + ); + + queryBuilder = new TermQueryBuilder("genre", "romance"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); + + queryBuilder = new TermQueryBuilder("genre", "science fiction"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); + + queryBuilder = new TermQueryBuilder("genre", "horror"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); + + queryBuilder = new TermQueryBuilder("genre", "cooking"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); + } + } + + private static TopFieldDocs search(QueryBuilder queryBuilder, FieldSortBuilder sortBuilder, QueryShardContext queryShardContext, + IndexSearcher searcher) throws IOException { + Query query = new BooleanQuery.Builder() + .add(queryBuilder.toQuery(queryShardContext), Occur.MUST) + .add(Queries.newNonNestedFilter(Version.CURRENT), Occur.FILTER) + .build(); + Sort sort = new Sort(sortBuilder.build(queryShardContext).field); + return searcher.search(query, 10, sort); + } + } diff --git a/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 68ef78f4273..6d8bcfb6131 100644 --- a/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -709,6 +709,79 @@ public class SimpleNestedIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("1")); // missing last } + // https://github.com/elastic/elasticsearch/issues/31554 + public void testLeakingSortValues() throws Exception { + assertAcked(prepareCreate("test") + .setSettings(Settings.builder().put("number_of_shards", 1)) + .addMapping("test-type", "{\n" + + " \"dynamic\": \"strict\",\n" + + " \"properties\": {\n" + + " \"nested1\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"nested2\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"nested2_keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"sortVal\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n", XContentType.JSON)); + ensureGreen(); + + client().prepareIndex("test", "test-type", "1").setSource("{\n" + + " \"nested1\": [\n" + + " {\n" + + " \"nested2\": [\n" + + " {\n" + + " \"nested2_keyword\": \"nested2_bar\",\n" + + " \"sortVal\": 1\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + "}", XContentType.JSON).execute().actionGet(); + + client().prepareIndex("test", "test-type", "2").setSource("{\n" + + " \"nested1\": [\n" + + " {\n" + + " \"nested2\": [\n" + + " {\n" + + " \"nested2_keyword\": \"nested2_bar\",\n" + + " \"sortVal\": 2\n" + + " }\n" + + " ]\n" + + " } \n" + + " ]\n" + + "}", XContentType.JSON).execute().actionGet(); + + refresh(); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(termQuery("_id", 2)) + .addSort( + SortBuilders + .fieldSort("nested1.nested2.sortVal") + .setNestedSort(new NestedSortBuilder("nested1") + .setNestedSort(new NestedSortBuilder("nested1.nested2") + .setFilter(termQuery("nested1.nested2.nested2_keyword", "nested2_bar")))) + ) + .execute().actionGet(); + + assertHitCount(searchResponse, 1); + assertThat(searchResponse.getHits().getHits().length, equalTo(1)); + assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("2")); + + } + public void testSortNestedWithNestedFilter() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", XContentFactory.jsonBuilder() From 91a0daf0e49f21e5dc909ad395b3eb8fad4e9075 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 20 Jul 2018 17:14:17 +0200 Subject: [PATCH 118/260] MINOR: Remove unused `IndexDynamicSettings` (#32237) --- .../index/settings/IndexDynamicSettings.java | 39 ------------------- 1 file changed, 39 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/index/settings/IndexDynamicSettings.java diff --git a/server/src/main/java/org/elasticsearch/index/settings/IndexDynamicSettings.java b/server/src/main/java/org/elasticsearch/index/settings/IndexDynamicSettings.java deleted file mode 100644 index 0f686efe926..00000000000 --- a/server/src/main/java/org/elasticsearch/index/settings/IndexDynamicSettings.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.settings; - -import org.elasticsearch.common.inject.BindingAnnotation; - -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.Target; - -import static java.lang.annotation.ElementType.FIELD; -import static java.lang.annotation.ElementType.PARAMETER; -import static java.lang.annotation.RetentionPolicy.RUNTIME; - - -@BindingAnnotation -@Target({FIELD, PARAMETER}) -@Retention(RUNTIME) -@Documented -public @interface IndexDynamicSettings { - -} From 320f1d263f51f7b6c36158b6774a05844dcab86e Mon Sep 17 00:00:00 2001 From: Paul Sanwald Date: Fri, 20 Jul 2018 11:20:51 -0400 Subject: [PATCH 119/260] muting failing test for internal auto date histogram to avoid failure before fix is merged --- .../bucket/histogram/InternalAutoDateHistogramTests.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index 389371efd79..96811ce424c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -117,6 +117,12 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati return ParsedAutoDateHistogram.class; } + @Override + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32215") + public void testReduceRandom() { + super.testReduceRandom(); + } + @Override protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram instance) { String name = instance.getName(); From c6c9075ca49af7a1aebdcbb01aa36c5adcc4a55f Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 20 Jul 2018 12:01:50 -0400 Subject: [PATCH 120/260] Switch rolling restart to new style Requests (#32147) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes all calls in the `qa/rolling-upgrade` project to use the new versions. --- .../elasticsearch/upgrades/RecoveryIT.java | 33 +++++++++---------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 809cd40d698..de5681ebe1a 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.upgrades; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.Version; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Request; @@ -32,14 +30,12 @@ import org.elasticsearch.test.rest.yaml.ObjectPath; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.Future; import java.util.function.Predicate; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength; -import static java.util.Collections.emptyMap; import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY; @@ -65,8 +61,9 @@ public class RecoveryIT extends AbstractRollingTestCase { createIndex(index, settings.build()); } else if (CLUSTER_TYPE == ClusterType.UPGRADED) { ensureGreen(index); - Response response = client().performRequest("GET", index + "/_stats", Collections.singletonMap("level", "shards")); - assertOK(response); + Request shardStatsRequest = new Request("GET", index + "/_stats"); + shardStatsRequest.addParameter("level", "shards"); + Response response = client().performRequest(shardStatsRequest); ObjectPath objectPath = ObjectPath.createFromResponse(response); List shardStats = objectPath.evaluate("indices." + index + ".shards.0"); assertThat(shardStats, hasSize(2)); @@ -87,8 +84,9 @@ public class RecoveryIT extends AbstractRollingTestCase { private int indexDocs(String index, final int idStart, final int numDocs) throws IOException { for (int i = 0; i < numDocs; i++) { final int id = idStart + i; - assertOK(client().performRequest("PUT", index + "/test/" + id, emptyMap(), - new StringEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}", ContentType.APPLICATION_JSON))); + Request indexDoc = new Request("PUT", index + "/test/" + id); + indexDoc.setJsonEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}"); + client().performRequest(indexDoc); } return numDocs; } @@ -113,7 +111,7 @@ public class RecoveryIT extends AbstractRollingTestCase { public void testRecoveryWithConcurrentIndexing() throws Exception { final String index = "recovery_with_concurrent_indexing"; - Response response = client().performRequest("GET", "_nodes"); + Response response = client().performRequest(new Request("GET", "_nodes")); ObjectPath objectPath = ObjectPath.createFromResponse(response); final Map nodeMap = objectPath.evaluate("nodes"); List nodes = new ArrayList<>(nodeMap.keySet()); @@ -139,7 +137,7 @@ public class RecoveryIT extends AbstractRollingTestCase { updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null)); asyncIndexDocs(index, 10, 50).get(); ensureGreen(index); - assertOK(client().performRequest("POST", index + "/_refresh")); + client().performRequest(new Request("POST", index + "/_refresh")); assertCount(index, "_only_nodes:" + nodes.get(0), 60); assertCount(index, "_only_nodes:" + nodes.get(1), 60); assertCount(index, "_only_nodes:" + nodes.get(2), 60); @@ -150,7 +148,7 @@ public class RecoveryIT extends AbstractRollingTestCase { updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null)); asyncIndexDocs(index, 60, 50).get(); ensureGreen(index); - assertOK(client().performRequest("POST", index + "/_refresh")); + client().performRequest(new Request("POST", index + "/_refresh")); assertCount(index, "_only_nodes:" + nodes.get(0), 110); assertCount(index, "_only_nodes:" + nodes.get(1), 110); assertCount(index, "_only_nodes:" + nodes.get(2), 110); @@ -161,15 +159,16 @@ public class RecoveryIT extends AbstractRollingTestCase { } private void assertCount(final String index, final String preference, final int expectedCount) throws IOException { - final Response response = client().performRequest("GET", index + "/_count", Collections.singletonMap("preference", preference)); - assertOK(response); + final Request request = new Request("GET", index + "/_count"); + request.addParameter("preference", preference); + final Response response = client().performRequest(request); final int actualCount = Integer.parseInt(ObjectPath.createFromResponse(response).evaluate("count").toString()); assertThat(actualCount, equalTo(expectedCount)); } private String getNodeId(Predicate versionPredicate) throws IOException { - Response response = client().performRequest("GET", "_nodes"); + Response response = client().performRequest(new Request("GET", "_nodes")); ObjectPath objectPath = ObjectPath.createFromResponse(response); Map nodesAsMap = objectPath.evaluate("nodes"); for (String id : nodesAsMap.keySet()) { @@ -216,7 +215,7 @@ public class RecoveryIT extends AbstractRollingTestCase { updateIndexSettings(index, Settings.builder().put("index.routing.allocation.include._id", newNode)); asyncIndexDocs(index, 10, 50).get(); ensureGreen(index); - assertOK(client().performRequest("POST", index + "/_refresh")); + client().performRequest(new Request("POST", index + "/_refresh")); assertCount(index, "_only_nodes:" + newNode, 60); break; case UPGRADED: @@ -226,8 +225,8 @@ public class RecoveryIT extends AbstractRollingTestCase { ); asyncIndexDocs(index, 60, 50).get(); ensureGreen(index); - assertOK(client().performRequest("POST", index + "/_refresh")); - Response response = client().performRequest("GET", "_nodes"); + client().performRequest(new Request("POST", index + "/_refresh")); + Response response = client().performRequest(new Request("GET", "_nodes")); ObjectPath objectPath = ObjectPath.createFromResponse(response); final Map nodeMap = objectPath.evaluate("nodes"); List nodes = new ArrayList<>(nodeMap.keySet()); From af0c1d30fe8570d33cd413b6a81e737ecd754524 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Fri, 20 Jul 2018 10:06:07 -0700 Subject: [PATCH 121/260] Make sure that field aliases count towards the total fields limit. (#32222) --- .../index/mapper/MapperService.java | 2 +- .../index/mapper/MapperServiceTests.java | 31 +++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 936e7334002..921e472c94f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -427,7 +427,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { // the master node restoring mappings from disk or data nodes // deserializing cluster state that was sent by the master node, // this check will be skipped. - checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size()); + checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size() + fieldAliasMappers.size()); } results.put(newMapper.type(), newMapper); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 20e0dd4639c..51b6e9d7168 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -270,6 +270,37 @@ public class MapperServiceTests extends ESSingleNodeTestCase { assertThat(e.getMessage(), containsString("Invalid [path] value [nested.field] for field alias [alias]")); } + public void testTotalFieldsLimitWithFieldAlias() throws Throwable { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("alias") + .field("type", "alias") + .field("path", "field") + .endObject() + .startObject("field") + .field("type", "text") + .endObject() + .endObject() + .endObject().endObject()); + + DocumentMapper documentMapper = createIndex("test1").mapperService() + .merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + // Set the total fields limit to the number of non-alias fields, to verify that adding + // a field alias pushes the mapping over the limit. + int numFields = documentMapper.mapping().metadataMappers.length + 2; + int numNonAliasFields = numFields - 1; + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + Settings settings = Settings.builder() + .put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), numNonAliasFields) + .build(); + createIndex("test2", settings).mapperService() + .merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + }); + assertEquals("Limit of total fields [" + numNonAliasFields + "] in index [test2] has been exceeded", e.getMessage()); + } + public void testForbidMultipleTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); From 54d896c4ede28500a01e308be7c0f504a1835b63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 20 Jul 2018 19:08:59 +0200 Subject: [PATCH 122/260] [Tests] Remove QueryStringQueryBuilderTests#toQuery class assertions (#32236) Currently we check that the queries that QueryStringQueryBuilder#toQuery returns is one out of a list of many Lucene query classes. This list has extended a lot over time, since QueryStringQueryBuilder can build all sort of queries. This makes the test hard to maintain. The recent addition of alias fields which build a BlendedTermQuery show how easy this test breaks. Also the current assertions doesn't add a lot in terms of catching errors. This is why we decided to remove this check. Closes #32234 --- .../index/query/QueryStringQueryBuilderTests.java | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 4b9e0f5a66e..591ee5af080 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -42,7 +42,6 @@ import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; @@ -51,7 +50,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -74,7 +72,6 @@ import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQuery import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertDisjunctionSubQuery; -import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -381,11 +378,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase Date: Fri, 20 Jul 2018 11:23:29 -0700 Subject: [PATCH 123/260] Add region ISO code to GeoIP Ingest plugin (#31669) --- docs/plugins/ingest-geoip.asciidoc | 4 ++-- .../index/mapper/filebeat-6.0.template.json | 8 ++++---- .../ingest/geoip/GeoIpProcessor.java | 18 +++++++++++++++--- .../geoip/GeoIpProcessorFactoryTests.java | 2 +- .../ingest/geoip/GeoIpProcessorTests.java | 3 ++- .../test/ingest_geoip/20_geoip_processor.yml | 11 +++++++---- 6 files changed, 31 insertions(+), 15 deletions(-) diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc index 32516d07bef..688b36042ea 100644 --- a/docs/plugins/ingest-geoip.asciidoc +++ b/docs/plugins/ingest-geoip.asciidoc @@ -26,14 +26,14 @@ include::install_remove.asciidoc[] | `field` | yes | - | The field to get the ip address from for the geographical lookup. | `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. | `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb, GeoLite2-Country.mmdb and GeoLite2-ASN.mmdb files. -| `properties` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup. +| `properties` | no | [`continent_name`, `country_iso_code`, `region_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup. | `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |====== *Depends on what is available in `database_field`: * If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`, -`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` +`country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` and `location`. The fields actually added depend on what has been found and which properties were configured in `properties`. * If the GeoLite2 Country database is used, then the following fields may be added under the `target_field`: `ip`, `country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which properties diff --git a/modules/mapper-extras/src/test/resources/org/elasticsearch/index/mapper/filebeat-6.0.template.json b/modules/mapper-extras/src/test/resources/org/elasticsearch/index/mapper/filebeat-6.0.template.json index 0129509b5c0..fbaaf92351c 100644 --- a/modules/mapper-extras/src/test/resources/org/elasticsearch/index/mapper/filebeat-6.0.template.json +++ b/modules/mapper-extras/src/test/resources/org/elasticsearch/index/mapper/filebeat-6.0.template.json @@ -1,7 +1,5 @@ { - "index_patterns": [ - "filebeat-6.0.0-*" - ], + "index_patterns": ["filebeat-6.0.0-*"], "mappings": { "doc": { "_meta": { @@ -67,12 +65,14 @@ "type": "keyword" }, "country_iso_code": { - "ignore_above": 1024, "type": "keyword" }, "location": { "type": "geo_point" }, + "region_iso_code": { + "type": "keyword" + }, "region_name": { "ignore_above": 1024, "type": "keyword" diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index f1b4b33017e..366b6ffc1d2 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -185,6 +185,16 @@ public final class GeoIpProcessor extends AbstractProcessor { geoData.put("continent_name", continentName); } break; + case REGION_ISO_CODE: + // ISO 3166-2 code for country subdivisions. + // See iso.org/iso-3166-country-codes.html + String countryIso = country.getIsoCode(); + String subdivisionIso = subdivision.getIsoCode(); + if (countryIso != null && subdivisionIso != null) { + String regionIsoCode = countryIso + "-" + subdivisionIso; + geoData.put("region_iso_code", regionIsoCode); + } + break; case REGION_NAME: String subdivisionName = subdivision.getName(); if (subdivisionName != null) { @@ -300,8 +310,8 @@ public final class GeoIpProcessor extends AbstractProcessor { public static final class Factory implements Processor.Factory { static final Set DEFAULT_CITY_PROPERTIES = EnumSet.of( - Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME, - Property.CITY_NAME, Property.LOCATION + Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_ISO_CODE, + Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION ); static final Set DEFAULT_COUNTRY_PROPERTIES = EnumSet.of( Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE @@ -377,6 +387,7 @@ public final class GeoIpProcessor extends AbstractProcessor { COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, + REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, @@ -386,7 +397,8 @@ public final class GeoIpProcessor extends AbstractProcessor { static final EnumSet ALL_CITY_PROPERTIES = EnumSet.of( Property.IP, Property.COUNTRY_ISO_CODE, Property.COUNTRY_NAME, Property.CONTINENT_NAME, - Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE, Property.LOCATION + Property.REGION_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE, + Property.LOCATION ); static final EnumSet ALL_COUNTRY_PROPERTIES = EnumSet.of( Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 0aa2eb9fdfa..7a5d6f5808f 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -284,7 +284,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config1.put("properties", Collections.singletonList("invalid")); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config1)); assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " + - "COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]")); + "COUNTRY_NAME, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]")); Map config2 = new HashMap<>(); config2.put("field", "_field"); diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 48a1769cbf8..4c04d4e340a 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -117,11 +117,12 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(address)); @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData.size(), equalTo(8)); + assertThat(geoData.size(), equalTo(9)); assertThat(geoData.get("ip"), equalTo(address)); assertThat(geoData.get("country_iso_code"), equalTo("US")); assertThat(geoData.get("country_name"), equalTo("United States")); assertThat(geoData.get("continent_name"), equalTo("North America")); + assertThat(geoData.get("region_iso_code"), equalTo("US-FL")); assertThat(geoData.get("region_name"), equalTo("Florida")); assertThat(geoData.get("city_name"), equalTo("Hollywood")); assertThat(geoData.get("timezone"), equalTo("America/New_York")); diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml index 0c400c3c0ea..012ca717318 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml @@ -30,11 +30,12 @@ type: test id: 1 - match: { _source.field1: "128.101.101.101" } - - length: { _source.geoip: 5 } + - length: { _source.geoip: 6 } - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - match: { _source.geoip.location.lon: -93.2166 } - match: { _source.geoip.location.lat: 44.9759 } + - match: { _source.geoip.region_iso_code: "US-MN" } - match: { _source.geoip.region_name: "Minnesota" } - match: { _source.geoip.continent_name: "North America" } @@ -54,7 +55,7 @@ { "geoip" : { "field" : "field1", - "properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_name", "continent_name"] + "properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_iso_code", "region_name", "continent_name"] } } ] @@ -75,7 +76,7 @@ type: test id: 1 - match: { _source.field1: "128.101.101.101" } - - length: { _source.geoip: 8 } + - length: { _source.geoip: 9 } - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - match: { _source.geoip.ip: "128.101.101.101" } @@ -83,6 +84,7 @@ - match: { _source.geoip.location.lat: 44.9759 } - match: { _source.geoip.timezone: "America/Chicago" } - match: { _source.geoip.country_name: "United States" } + - match: { _source.geoip.region_iso_code: "US-MN" } - match: { _source.geoip.region_name: "Minnesota" } - match: { _source.geoip.continent_name: "North America" } @@ -188,11 +190,12 @@ type: test id: 2 - match: { _source.field1: "128.101.101.101" } - - length: { _source.geoip: 5 } + - length: { _source.geoip: 6 } - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - match: { _source.geoip.location.lon: -93.2166 } - match: { _source.geoip.location.lat: 44.9759 } + - match: { _source.geoip.region_iso_code: "US-MN" } - match: { _source.geoip.region_name: "Minnesota" } - match: { _source.geoip.continent_name: "North America" } From e21692e3870a9e4437f9884628bfb90ca19fe3dd Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 20 Jul 2018 21:10:35 +0200 Subject: [PATCH 124/260] INGEST: Make a few Processors callable by Painless (#32170) * INGEST: Make a few Processors callable by Painless * Extracted a few stateless String processors as well as the json processor to static methods and whitelisted them in Painless * provide whitelist from processors plugin --- modules/ingest-common/build.gradle | 6 + .../ingest/common/BytesProcessor.java | 6 +- .../ingest/common/JsonProcessor.java | 38 +-- .../ingest/common/LowercaseProcessor.java | 6 +- .../ingest/common/Processors.java | 49 ++++ .../common/ProcessorsWhitelistExtension.java | 41 ++++ .../ingest/common/URLDecodeProcessor.java | 10 +- .../ingest/common/UppercaseProcessor.java | 6 +- ...asticsearch.painless.spi.PainlessExtension | 1 + .../ingest/common/processors_whitelist.txt | 29 +++ .../ingest/common/BytesProcessorTests.java | 4 +- .../test/ingest/190_script_processor.yml | 216 ++++++++++++++++++ .../painless/spi/org.elasticsearch.txt | 2 +- 13 files changed, 391 insertions(+), 23 deletions(-) create mode 100644 modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/Processors.java create mode 100644 modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ProcessorsWhitelistExtension.java create mode 100644 modules/ingest-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension create mode 100644 modules/ingest-common/src/main/resources/org/elasticsearch/ingest/common/processors_whitelist.txt create mode 100644 modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/190_script_processor.yml diff --git a/modules/ingest-common/build.gradle b/modules/ingest-common/build.gradle index 424c1197da3..4f35bbee28d 100644 --- a/modules/ingest-common/build.gradle +++ b/modules/ingest-common/build.gradle @@ -20,11 +20,17 @@ esplugin { description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources' classname 'org.elasticsearch.ingest.common.IngestCommonPlugin' + extendedPlugins = ['lang-painless'] } dependencies { + compileOnly project(':modules:lang-painless') compile project(':libs:grok') } compileJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes" compileTestJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes" + +integTestCluster { + module project(':modules:lang-painless') +} \ No newline at end of file diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java index dfe9a054acf..d07b56e1b3d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java @@ -35,9 +35,13 @@ public final class BytesProcessor extends AbstractStringProcessor { super(processorTag, field, ignoreMissing, targetField); } + public static long apply(String value) { + return ByteSizeValue.parseBytesSizeValue(value, null, "Ingest Field").getBytes(); + } + @Override protected Long process(String value) { - return ByteSizeValue.parseBytesSizeValue(value, null, getField()).getBytes(); + return apply(value); } @Override diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java index 2f217735df2..c0a9d37abda 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java @@ -67,13 +67,11 @@ public final class JsonProcessor extends AbstractProcessor { return addToRoot; } - @Override - public void execute(IngestDocument document) throws Exception { - Object fieldValue = document.getFieldValue(field, Object.class); - BytesReference bytesRef = (fieldValue == null) ? new BytesArray("null") : new BytesArray(fieldValue.toString()); + public static Object apply(Object fieldValue) { + BytesReference bytesRef = fieldValue == null ? new BytesArray("null") : new BytesArray(fieldValue.toString()); try (InputStream stream = bytesRef.streamInput(); XContentParser parser = JsonXContent.jsonXContent - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) { + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) { XContentParser.Token token = parser.nextToken(); Object value = null; if (token == XContentParser.Token.VALUE_NULL) { @@ -91,20 +89,32 @@ public final class JsonProcessor extends AbstractProcessor { } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { throw new IllegalArgumentException("cannot read binary value"); } - if (addToRoot && (value instanceof Map)) { - for (Map.Entry entry : ((Map) value).entrySet()) { - document.setFieldValue(entry.getKey(), entry.getValue()); - } - } else if (addToRoot) { - throw new IllegalArgumentException("cannot add non-map fields to root of document"); - } else { - document.setFieldValue(targetField, value); - } + return value; } catch (IOException e) { throw new IllegalArgumentException(e); } } + public static void apply(Map ctx, String fieldName) { + Object value = apply(ctx.get(fieldName)); + if (value instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) value; + ctx.putAll(map); + } else { + throw new IllegalArgumentException("cannot add non-map fields to root of document"); + } + } + + @Override + public void execute(IngestDocument document) throws Exception { + if (addToRoot) { + apply(document.getSourceAndMetadata(), field); + } else { + document.setFieldValue(targetField, apply(document.getFieldValue(field, Object.class))); + } + } + @Override public String getType() { return TYPE; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java index aef8b0cce24..4269cb05257 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java @@ -35,9 +35,13 @@ public final class LowercaseProcessor extends AbstractStringProcessor { super(processorTag, field, ignoreMissing, targetField); } + public static String apply(String value) { + return value.toLowerCase(Locale.ROOT); + } + @Override protected String process(String value) { - return value.toLowerCase(Locale.ROOT); + return apply(value); } @Override diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/Processors.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/Processors.java new file mode 100644 index 00000000000..8a0b1529892 --- /dev/null +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/Processors.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import java.util.Map; + +public final class Processors { + + public static long bytes(String value) { + return BytesProcessor.apply(value); + } + + public static String lowercase(String value) { + return LowercaseProcessor.apply(value); + } + + public static String uppercase(String value) { + return UppercaseProcessor.apply(value); + } + + public static Object json(Object fieldValue) { + return JsonProcessor.apply(fieldValue); + } + + public static void json(Map ctx, String field) { + JsonProcessor.apply(ctx, field); + } + + public static String urlDecode(String value) { + return URLDecodeProcessor.apply(value); + } +} diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ProcessorsWhitelistExtension.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ProcessorsWhitelistExtension.java new file mode 100644 index 00000000000..ced84057c7a --- /dev/null +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ProcessorsWhitelistExtension.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.painless.spi.PainlessExtension; +import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.painless.spi.WhitelistLoader; +import org.elasticsearch.script.IngestScript; +import org.elasticsearch.script.ScriptContext; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class ProcessorsWhitelistExtension implements PainlessExtension { + + private static final Whitelist WHITELIST = + WhitelistLoader.loadFromResourceFiles(ProcessorsWhitelistExtension.class, "processors_whitelist.txt"); + + @Override + public Map, List> getContextWhitelists() { + return Collections.singletonMap(IngestScript.CONTEXT, Collections.singletonList(WHITELIST)); + } +} diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java index 945419499ad..fb6c5acf98b 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java @@ -34,15 +34,19 @@ public final class URLDecodeProcessor extends AbstractStringProcessor { super(processorTag, field, ignoreMissing, targetField); } - @Override - protected String process(String value) { + public static String apply(String value) { try { return URLDecoder.decode(value, "UTF-8"); } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("could not URL-decode field[" + getField() + "]", e); + throw new IllegalArgumentException("Could not URL-decode value.", e); } } + @Override + protected String process(String value) { + return apply(value); + } + @Override public String getType() { return TYPE; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java index af93f06a8f2..6c428627c7d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java @@ -34,9 +34,13 @@ public final class UppercaseProcessor extends AbstractStringProcessor { super(processorTag, field, ignoreMissing, targetField); } + public static String apply(String value) { + return value.toUpperCase(Locale.ROOT); + } + @Override protected String process(String value) { - return value.toUpperCase(Locale.ROOT); + return apply(value); } @Override diff --git a/modules/ingest-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension b/modules/ingest-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension new file mode 100644 index 00000000000..8a98f034be5 --- /dev/null +++ b/modules/ingest-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension @@ -0,0 +1 @@ +org.elasticsearch.ingest.common.ProcessorsWhitelistExtension \ No newline at end of file diff --git a/modules/ingest-common/src/main/resources/org/elasticsearch/ingest/common/processors_whitelist.txt b/modules/ingest-common/src/main/resources/org/elasticsearch/ingest/common/processors_whitelist.txt new file mode 100644 index 00000000000..3d93b19f066 --- /dev/null +++ b/modules/ingest-common/src/main/resources/org/elasticsearch/ingest/common/processors_whitelist.txt @@ -0,0 +1,29 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# This file contains a whitelist of static processor methods that can be accessed from painless + +class org.elasticsearch.ingest.common.Processors { + long bytes(String) + String lowercase(String) + String uppercase(String) + Object json(Object) + void json(Map, String) + String urlDecode(String) +} \ No newline at end of file diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java index 0da3434adf1..788340a455a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java @@ -63,7 +63,7 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase { Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), - CoreMatchers.equalTo("failed to parse setting [" + fieldName + "] with value [8912pb] as a size in bytes")); + CoreMatchers.equalTo("failed to parse setting [Ingest Field] with value [8912pb] as a size in bytes")); assertThat(exception.getCause().getMessage(), CoreMatchers.containsString("Values greater than 9223372036854775807 bytes are not supported")); } @@ -93,6 +93,6 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase { processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(1126L)); assertWarnings("Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " + - "[" + fieldName + "]"); + "[Ingest Field]"); } } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/190_script_processor.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/190_script_processor.yml new file mode 100644 index 00000000000..bd55b764a95 --- /dev/null +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/190_script_processor.yml @@ -0,0 +1,216 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test invoke bytes processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.bytes(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "1kb"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "1kb" } + - match: { _source.target_field: 1024 } + +--- +"Test invoke lowercase processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.lowercase(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "FooBar"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "FooBar" } + - match: { _source.target_field: "foobar" } + +--- +"Test invoke uppercase processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.uppercase(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "FooBar"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "FooBar" } + - match: { _source.target_field: "FOOBAR" } + +--- +"Test invoke json processor, assign to field": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.json(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "{\"foo\":\"bar\"}"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "{\"foo\":\"bar\"}" } + - match: { _source.target_field.foo: "bar" } + +--- +"Test invoke json processor, assign to root": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "Processors.json(ctx, 'source_field')" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "{\"foo\":\"bar\"}"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "{\"foo\":\"bar\"}" } + - match: { _source.foo: "bar" } + +--- +"Test invoke urlDecode processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.urlDecode(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "foo%20bar"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "foo%20bar" } + - match: { _source.target_field: "foo bar" } diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index 6495659d9cd..8491d15c27e 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -174,4 +174,4 @@ class org.elasticsearch.index.similarity.ScriptedSimilarity$Term { class org.elasticsearch.index.similarity.ScriptedSimilarity$Doc { int getLength() float getFreq() -} +} \ No newline at end of file From 7aa8a0a92787b116edd9044c131115e68459d932 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 20 Jul 2018 22:32:50 +0200 Subject: [PATCH 125/260] INGEST: Extend KV Processor (#31789) (#32232) * INGEST: Extend KV Processor (#31789) Added more capabilities supported by LS to the KV processor: * Stripping of brackets and quotes from values (`include_brackets` in corresponding LS filter) * Adding key prefixes * Trimming specified chars from keys and values Refactored the way the filter is configured to avoid conditionals during execution. Refactored Tests a little to not have to add more redundant getters for new parameters. Relates #31786 * Add documentation --- docs/reference/ingest/ingest-node.asciidoc | 4 + .../ingest/common/KeyValueProcessor.java | 131 ++++++++++++++---- .../ingest/common/KeyValueProcessorTests.java | 116 ++++++++++++++-- 3 files changed, 214 insertions(+), 37 deletions(-) diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 3991b738e13..79277d22e81 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1732,6 +1732,10 @@ For example, if you have a log message which contains `ip=1.2.3.4 error=REFUSED` | `include_keys` | no | `null` | List of keys to filter and insert into document. Defaults to including all keys | `exclude_keys` | no | `null` | List of keys to exclude from document | `ignore_missing` | no | `false` | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document +| `prefix` | no | `null` | Prefix to be added to extracted keys +| `trim_key` | no | `null` | String of characters to trim from extracted keys +| `trim_value` | no | `null` | String of characters to trim from extracted values +| `strip_brackets` | no | `false` | If `true` strip brackets `()`, `<>`, `[]` as well as quotes `'` and `"` from extracted values |====== diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java index 6ed065926d6..9cce3cedf3d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java @@ -25,11 +25,14 @@ import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.regex.Pattern; /** * The KeyValueProcessor parses and extracts messages of the `key=value` variety into fields with values of the keys. @@ -38,6 +41,8 @@ public final class KeyValueProcessor extends AbstractProcessor { public static final String TYPE = "kv"; + private static final Pattern STRIP_BRACKETS = Pattern.compile("(^[\\(\\[<\"'])|([\\]\\)>\"']$)"); + private final String field; private final String fieldSplit; private final String valueSplit; @@ -45,9 +50,11 @@ public final class KeyValueProcessor extends AbstractProcessor { private final Set excludeKeys; private final String targetField; private final boolean ignoreMissing; + private final Consumer execution; KeyValueProcessor(String tag, String field, String fieldSplit, String valueSplit, Set includeKeys, - Set excludeKeys, String targetField, boolean ignoreMissing) { + Set excludeKeys, String targetField, boolean ignoreMissing, + String trimKey, String trimValue, boolean stripBrackets, String prefix) { super(tag); this.field = field; this.targetField = targetField; @@ -56,6 +63,92 @@ public final class KeyValueProcessor extends AbstractProcessor { this.includeKeys = includeKeys; this.excludeKeys = excludeKeys; this.ignoreMissing = ignoreMissing; + this.execution = buildExecution( + fieldSplit, valueSplit, field, includeKeys, excludeKeys, targetField, ignoreMissing, trimKey, trimValue, + stripBrackets, prefix + ); + } + + private static Consumer buildExecution(String fieldSplit, String valueSplit, String field, + Set includeKeys, Set excludeKeys, + String targetField, boolean ignoreMissing, + String trimKey, String trimValue, boolean stripBrackets, + String prefix) { + final Predicate keyFilter; + if (includeKeys == null) { + if (excludeKeys == null) { + keyFilter = key -> true; + } else { + keyFilter = key -> excludeKeys.contains(key) == false; + } + } else { + if (excludeKeys == null) { + keyFilter = includeKeys::contains; + } else { + keyFilter = key -> includeKeys.contains(key) && excludeKeys.contains(key) == false; + } + } + final String fieldPathPrefix; + String keyPrefix = prefix == null ? "" : prefix; + if (targetField == null) { + fieldPathPrefix = keyPrefix; + } else { + fieldPathPrefix = targetField + "." + keyPrefix; + } + final Function keyPrefixer; + if (fieldPathPrefix.isEmpty()) { + keyPrefixer = val -> val; + } else { + keyPrefixer = val -> fieldPathPrefix + val; + } + final Function fieldSplitter = buildSplitter(fieldSplit, true); + Function valueSplitter = buildSplitter(valueSplit, false); + final Function keyTrimmer = buildTrimmer(trimKey); + final Function bracketStrip; + if (stripBrackets) { + bracketStrip = val -> STRIP_BRACKETS.matcher(val).replaceAll(""); + } else { + bracketStrip = val -> val; + } + final Function valueTrimmer = buildTrimmer(trimValue); + return document -> { + String value = document.getFieldValue(field, String.class, ignoreMissing); + if (value == null) { + if (ignoreMissing) { + return; + } + throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs."); + } + for (String part : fieldSplitter.apply(value)) { + String[] kv = valueSplitter.apply(part); + if (kv.length != 2) { + throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]"); + } + String key = keyTrimmer.apply(kv[0]); + if (keyFilter.test(key)) { + append(document, keyPrefixer.apply(key), valueTrimmer.apply(bracketStrip.apply(kv[1]))); + } + } + }; + } + + private static Function buildTrimmer(String trim) { + if (trim == null) { + return val -> val; + } else { + Pattern pattern = Pattern.compile("(^([" + trim + "]+))|([" + trim + "]+$)"); + return val -> pattern.matcher(val).replaceAll(""); + } + } + + private static Function buildSplitter(String split, boolean fields) { + int limit = fields ? 0 : 2; + if (split.length() > 2 || split.length() == 2 && split.charAt(0) != '\\') { + Pattern splitPattern = Pattern.compile(split); + return val -> splitPattern.split(val, limit); + } else { + return val -> val.split(split, limit); + } } String getField() { @@ -86,7 +179,7 @@ public final class KeyValueProcessor extends AbstractProcessor { return ignoreMissing; } - public void append(IngestDocument document, String targetField, String value) { + private static void append(IngestDocument document, String targetField, String value) { if (document.hasField(targetField)) { document.appendFieldValue(targetField, value); } else { @@ -96,27 +189,7 @@ public final class KeyValueProcessor extends AbstractProcessor { @Override public void execute(IngestDocument document) { - String oldVal = document.getFieldValue(field, String.class, ignoreMissing); - - if (oldVal == null && ignoreMissing) { - return; - } else if (oldVal == null) { - throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs."); - } - - String fieldPathPrefix = (targetField == null) ? "" : targetField + "."; - Arrays.stream(oldVal.split(fieldSplit)) - .map((f) -> { - String[] kv = f.split(valueSplit, 2); - if (kv.length != 2) { - throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]"); - } - return kv; - }) - .filter((p) -> - (includeKeys == null || includeKeys.contains(p[0])) && - (excludeKeys == null || excludeKeys.contains(p[0]) == false)) - .forEach((p) -> append(document, fieldPathPrefix + p[0], p[1])); + execution.accept(document); } @Override @@ -132,6 +205,11 @@ public final class KeyValueProcessor extends AbstractProcessor { String targetField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "target_field"); String fieldSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field_split"); String valueSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "value_split"); + String trimKey = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_key"); + String trimValue = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_value"); + String prefix = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "prefix"); + boolean stripBrackets = + ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "strip_brackets", false); Set includeKeys = null; Set excludeKeys = null; List includeKeysList = ConfigurationUtils.readOptionalList(TYPE, processorTag, config, "include_keys"); @@ -143,7 +221,10 @@ public final class KeyValueProcessor extends AbstractProcessor { excludeKeys = Collections.unmodifiableSet(Sets.newHashSet(excludeKeysList)); } boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); - return new KeyValueProcessor(processorTag, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing); + return new KeyValueProcessor( + processorTag, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing, + trimKey, trimValue, stripBrackets, prefix + ); } } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java index 380af44c251..591f9994c60 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java @@ -25,19 +25,25 @@ import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.equalTo; public class KeyValueProcessorTests extends ESTestCase { + private static final KeyValueProcessor.Factory FACTORY = new KeyValueProcessor.Factory(); + public void test() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, null, "target", false); + Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe"))); @@ -46,7 +52,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testRootTarget() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "myField", "&", "=", null, null,null, false); + Processor processor = createKvProcessor("myField", "&", "=", null, null,null, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("first", String.class), equalTo("hello")); assertThat(ingestDocument.getFieldValue("second", List.class), equalTo(Arrays.asList("world", "universe"))); @@ -55,7 +61,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testKeySameAsSourceField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); ingestDocument.setFieldValue("first", "first=hello"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "first", "&", "=", null, null,null, false); + Processor processor = createKvProcessor("first", "&", "=", null, null,null, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("first", List.class), equalTo(Arrays.asList("first=hello", "hello"))); } @@ -63,7 +69,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testIncludeKeys() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", + Processor processor = createKvProcessor(fieldName, "&", "=", Sets.newHashSet("first"), null, "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); @@ -73,7 +79,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testExcludeKeys() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", + Processor processor = createKvProcessor(fieldName, "&", "=", null, Sets.newHashSet("second"), "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); @@ -84,7 +90,7 @@ public class KeyValueProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe&third=bar"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", + Processor processor = createKvProcessor(fieldName, "&", "=", Sets.newHashSet("first", "second"), Sets.newHashSet("first", "second"), "target", false); processor.execute(ingestDocument); assertFalse(ingestDocument.hasField("target.first")); @@ -92,9 +98,9 @@ public class KeyValueProcessorTests extends ESTestCase { assertFalse(ingestDocument.hasField("target.third")); } - public void testMissingField() { + public void testMissingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "&", + Processor processor = createKvProcessor("unknown", "&", "=", null, null, "target", false); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]")); @@ -105,7 +111,7 @@ public class KeyValueProcessorTests extends ESTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap(fieldName, null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "", "", null, null, "target", true); + Processor processor = createKvProcessor(fieldName, "", "", null, null, "target", true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -113,7 +119,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testNonExistentWithIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "", "", null, null, "target", true); + Processor processor = createKvProcessor("unknown", "", "", null, null, "target", true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -121,7 +127,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testFailFieldSplitMatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello|second=world|second=universe"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, null, "target", false); + Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello|second=world|second=universe")); assertFalse(ingestDocument.hasField("target.second")); @@ -129,8 +135,94 @@ public class KeyValueProcessorTests extends ESTestCase { public void testFailValueSplitMatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("foo", "bar")); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "foo", "&", "=", null, null, "target", false); + Processor processor = createKvProcessor("foo", "&", "=", null, null, "target", false); Exception exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [foo] does not contain value_split [=]")); } + + public void testTrimKeyAndValue() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first= hello &second=world& second =universe"); + Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, " ", " ", false, null); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); + assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe"))); + } + + public void testTrimMultiCharSequence() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, + "to=, orig_to=, %+relay=mail.example.com[private/dovecot-lmtp]," + + " delay=2.2, delays=1.9/0.01/0.01/0.21, dsn=2.0.0, status=sent " + ); + Processor processor = createKvProcessor(fieldName, " ", "=", null, null, "target", false, "%+", "<>,", false, null); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target.to", String.class), equalTo("foo@example.com")); + assertThat(ingestDocument.getFieldValue("target.orig_to", String.class), equalTo("bar@example.com")); + assertThat(ingestDocument.getFieldValue("target.relay", String.class), equalTo("mail.example.com[private/dovecot-lmtp]")); + assertThat(ingestDocument.getFieldValue("target.delay", String.class), equalTo("2.2")); + assertThat(ingestDocument.getFieldValue("target.delays", String.class), equalTo("1.9/0.01/0.01/0.21")); + assertThat(ingestDocument.getFieldValue("target.dsn", String.class), equalTo("2.0.0")); + assertThat(ingestDocument.getFieldValue("target.status", String.class), equalTo("sent")); + } + + public void testStripBrackets() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField( + random(), ingestDocument, "first=&second=\"world\"&second=(universe)&third=&fourth=[bar]&fifth='last'" + ); + Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, true, null); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); + assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe"))); + assertThat(ingestDocument.getFieldValue("target.third", String.class), equalTo("foo")); + assertThat(ingestDocument.getFieldValue("target.fourth", String.class), equalTo("bar")); + assertThat(ingestDocument.getFieldValue("target.fifth", String.class), equalTo("last")); + } + + public void testAddPrefix() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); + Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, false, "arg_"); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target.arg_first", String.class), equalTo("hello")); + assertThat(ingestDocument.getFieldValue("target.arg_second", List.class), equalTo(Arrays.asList("world", "universe"))); + } + + private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set includeKeys, + Set excludeKeys, String targetField, + boolean ignoreMissing) throws Exception { + return createKvProcessor( + field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing, null, null, false, null + ); + } + + private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set includeKeys, + Set excludeKeys, String targetField, boolean ignoreMissing, + String trimKey, String trimValue, boolean stripBrackets, + String prefix) throws Exception { + Map config = new HashMap<>(); + config.put("field", field); + config.put("field_split", fieldSplit); + config.put("value_split", valueSplit); + config.put("target_field", targetField); + if (includeKeys != null) { + config.put("include_keys", new ArrayList<>(includeKeys)); + } + if (excludeKeys != null) { + config.put("exclude_keys", new ArrayList<>(excludeKeys)); + } + config.put("ignore_missing", ignoreMissing); + if (trimKey != null) { + config.put("trim_key", trimKey); + } + if (trimValue != null) { + config.put("trim_value", trimValue); + } + config.put("strip_brackets", stripBrackets); + if (prefix != null) { + config.put("prefix", prefix); + } + return FACTORY.create(null, randomAlphaOfLength(10), config); + } } From aaa8f842d60f45d6303fb04a9f4fca4ca40dc4ca Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Sat, 21 Jul 2018 00:03:58 +0300 Subject: [PATCH 126/260] Remove BouncyCastle dependency from runtime (#32193) * Remove BouncyCastle dependency from runtime This commit introduces a new gradle project that contains the classes that have a dependency on BouncyCastle. For the default distribution, It builds a jar from those and in puts it in a subdirectory of lib (/tools/security-cli) along with the BouncyCastle jars. This directory is then passed in the ES_ADDITIONAL_CLASSPATH_DIRECTORIES of the CLI tools that use these classes. BouncyCastle is removed as a runtime dependency (remains as a compileOnly one) from x-pack core and x-pack security. --- distribution/archives/build.gradle | 2 +- distribution/build.gradle | 28 ++-- distribution/packages/build.gradle | 2 +- .../packaging/test/ArchiveTestCase.java | 22 +++ .../packaging/util/Installation.java | 1 + x-pack/plugin/core/build.gradle | 4 +- .../xpack/core/ssl/CertParsingUtils.java | 10 +- x-pack/plugin/security/build.gradle | 5 +- x-pack/plugin/security/cli/build.gradle | 20 +++ .../licenses/bcpkix-jdk15on-1.59.jar.sha1 | 0 .../licenses/bcprov-jdk15on-1.59.jar.sha1 | 0 .../cli}/licenses/bouncycastle-LICENSE.txt | 0 .../cli}/licenses/bouncycastle-NOTICE.txt | 0 .../xpack/security/cli}/CertGenUtils.java | 22 +-- .../cli}/CertificateGenerateTool.java | 128 ++++++++++-------- .../xpack/security/cli}/CertificateTool.java | 120 ++++++++-------- .../security/cli}/CertGenUtilsTests.java | 3 +- .../cli}/CertificateGenerateToolTests.java | 14 +- .../security/cli}/CertificateToolTests.java | 20 +-- .../xpack/security/cli/testnode.crt | 23 ++++ .../xpack/security/cli/testnode.pem | 30 ++++ .../src/main/bin/elasticsearch-certgen | 3 +- .../src/main/bin/elasticsearch-certgen.bat | 3 +- .../src/main/bin/elasticsearch-certutil | 3 +- .../src/main/bin/elasticsearch-certutil.bat | 3 +- 25 files changed, 294 insertions(+), 172 deletions(-) create mode 100644 x-pack/plugin/security/cli/build.gradle rename x-pack/plugin/{core => security/cli}/licenses/bcpkix-jdk15on-1.59.jar.sha1 (100%) rename x-pack/plugin/{core => security/cli}/licenses/bcprov-jdk15on-1.59.jar.sha1 (100%) rename x-pack/plugin/{core => security/cli}/licenses/bouncycastle-LICENSE.txt (100%) rename x-pack/plugin/{core => security/cli}/licenses/bouncycastle-NOTICE.txt (100%) rename x-pack/plugin/{core/src/main/java/org/elasticsearch/xpack/core/ssl => security/cli/src/main/java/org/elasticsearch/xpack/security/cli}/CertGenUtils.java (93%) rename x-pack/plugin/{core/src/main/java/org/elasticsearch/xpack/core/ssl => security/cli/src/main/java/org/elasticsearch/xpack/security/cli}/CertificateGenerateTool.java (90%) rename x-pack/plugin/{core/src/main/java/org/elasticsearch/xpack/core/ssl => security/cli/src/main/java/org/elasticsearch/xpack/security/cli}/CertificateTool.java (92%) rename x-pack/plugin/{core/src/test/java/org/elasticsearch/xpack/core/ssl => security/cli/src/test/java/org/elasticsearch/xpack/security/cli}/CertGenUtilsTests.java (97%) rename x-pack/{qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl => plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli}/CertificateGenerateToolTests.java (98%) rename x-pack/{qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl => plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli}/CertificateToolTests.java (98%) create mode 100644 x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.crt create mode 100644 x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.pem diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 3c6780cba84..0269e4399ce 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -49,7 +49,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os return copySpec { into("elasticsearch-${version}") { into('lib') { - with libFiles + with libFiles(oss) } into('config') { dirMode 0750 diff --git a/distribution/build.gradle b/distribution/build.gradle index 6ffb678cb2b..675799c5b22 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -227,16 +227,24 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { /***************************************************************************** * Common files in all distributions * *****************************************************************************/ - libFiles = copySpec { - // delay by using closures, since they have not yet been configured, so no jar task exists yet - from { project(':server').jar } - from { project(':server').configurations.runtime } - from { project(':libs:plugin-classloader').jar } - from { project(':distribution:tools:java-version-checker').jar } - from { project(':distribution:tools:launchers').jar } - into('tools/plugin-cli') { - from { project(':distribution:tools:plugin-cli').jar } - from { project(':distribution:tools:plugin-cli').configurations.runtime } + libFiles = { oss -> + copySpec { + // delay by using closures, since they have not yet been configured, so no jar task exists yet + from { project(':server').jar } + from { project(':server').configurations.runtime } + from { project(':libs:plugin-classloader').jar } + from { project(':distribution:tools:java-version-checker').jar } + from { project(':distribution:tools:launchers').jar } + into('tools/plugin-cli') { + from { project(':distribution:tools:plugin-cli').jar } + from { project(':distribution:tools:plugin-cli').configurations.runtime } + } + if (oss == false) { + into('tools/security-cli') { + from { project(':x-pack:plugin:security:cli').jar } + from { project(':x-pack:plugin:security:cli').configurations.compile } + } + } } } diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 04fa6313c0a..fcd69138da3 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -126,7 +126,7 @@ Closure commonPackageConfig(String type, boolean oss) { } into('lib') { with copySpec { - with libFiles + with libFiles(oss) // we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine eachFile { FileCopyDetails fcp -> String[] segments = fcp.relativePath.segments diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java index 3aada7837d8..20561115542 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java @@ -57,6 +57,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.isEmptyString; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeThat; import static org.junit.Assume.assumeTrue; @@ -302,5 +303,26 @@ public abstract class ArchiveTestCase extends PackagingTestCase { } } + public void test90SecurityCliPackaging() { + assumeThat(installation, is(notNullValue())); + + final Installation.Executables bin = installation.executables(); + final Shell sh = new Shell(); + + if (distribution().equals(Distribution.DEFAULT_TAR) || distribution().equals(Distribution.DEFAULT_ZIP)) { + assertTrue(Files.exists(installation.lib.resolve("tools").resolve("security-cli"))); + Platforms.onLinux(() -> { + final Result result = sh.run(bin.elasticsearchCertutil + " help"); + assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack")); + }); + + Platforms.onWindows(() -> { + final Result result = sh.run(bin.elasticsearchCertutil + " help"); + assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack")); + }); + } else if (distribution().equals(Distribution.OSS_TAR) || distribution().equals(Distribution.OSS_ZIP)) { + assertFalse(Files.exists(installation.lib.resolve("tools").resolve("security-cli"))); + } + } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java index 40dc546f230..8bc3fc6e14d 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java @@ -101,6 +101,7 @@ public class Installation { public final Path elasticsearchPlugin = platformExecutable("elasticsearch-plugin"); public final Path elasticsearchKeystore = platformExecutable("elasticsearch-keystore"); public final Path elasticsearchTranslog = platformExecutable("elasticsearch-translog"); + public final Path elasticsearchCertutil = platformExecutable("elasticsearch-certutil"); private Path platformExecutable(String name) { final String platformExecutableName = Platforms.WINDOWS diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index ca926fa0d54..7ed98ccb5b4 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -20,7 +20,6 @@ esplugin { } dependencyLicenses { - mapping from: /bc.*/, to: 'bouncycastle' mapping from: /http.*/, to: 'httpclient' // pulled in by rest client mapping from: /commons-.*/, to: 'commons' // pulled in by rest client } @@ -38,8 +37,6 @@ dependencies { // security deps compile 'com.unboundid:unboundid-ldapsdk:3.2.0' - compile 'org.bouncycastle:bcprov-jdk15on:1.59' - compile 'org.bouncycastle:bcpkix-jdk15on:1.59' compile project(path: ':modules:transport-netty4', configuration: 'runtime') testCompile 'org.elasticsearch:securemock:1.2' @@ -116,6 +113,7 @@ task testJar(type: Jar) { appendix 'test' from sourceSets.test.output } + artifacts { // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions archives jar diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java index 1617a92b550..6503f686b64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java @@ -63,7 +63,7 @@ public class CertParsingUtils { return PathUtils.get(path).normalize(); } - static KeyStore readKeyStore(Path path, String type, char[] password) + public static KeyStore readKeyStore(Path path, String type, char[] password) throws IOException, KeyStoreException, CertificateException, NoSuchAlgorithmException { try (InputStream in = Files.newInputStream(path)) { KeyStore store = KeyStore.getInstance(type); @@ -108,7 +108,7 @@ public class CertParsingUtils { return certificates.toArray(new X509Certificate[0]); } - static List readCertificates(InputStream input) throws CertificateException, IOException { + public static List readCertificates(InputStream input) throws CertificateException, IOException { CertificateFactory certFactory = CertificateFactory.getInstance("X.509"); Collection certificates = (Collection) certFactory.generateCertificates(input); return new ArrayList<>(certificates); @@ -140,7 +140,7 @@ public class CertParsingUtils { /** * Creates a {@link KeyStore} from a PEM encoded certificate and key file */ - static KeyStore getKeyStoreFromPEM(Path certificatePath, Path keyPath, char[] keyPassword) + public static KeyStore getKeyStoreFromPEM(Path certificatePath, Path keyPath, char[] keyPassword) throws IOException, CertificateException, KeyStoreException, NoSuchAlgorithmException { final PrivateKey key = PemUtils.readPrivateKey(keyPath, () -> keyPassword); final Certificate[] certificates = readCertificates(Collections.singletonList(certificatePath)); @@ -168,7 +168,7 @@ public class CertParsingUtils { /** * Returns a {@link X509ExtendedKeyManager} that is built from the provided keystore */ - static X509ExtendedKeyManager keyManager(KeyStore keyStore, char[] password, String algorithm) + public static X509ExtendedKeyManager keyManager(KeyStore keyStore, char[] password, String algorithm) throws NoSuchAlgorithmException, UnrecoverableKeyException, KeyStoreException { KeyManagerFactory kmf = KeyManagerFactory.getInstance(algorithm); kmf.init(keyStore, password); @@ -271,7 +271,7 @@ public class CertParsingUtils { /** * Creates a {@link X509ExtendedTrustManager} based on the trust material in the provided {@link KeyStore} */ - static X509ExtendedTrustManager trustManager(KeyStore keyStore, String algorithm) + public static X509ExtendedTrustManager trustManager(KeyStore keyStore, String algorithm) throws NoSuchAlgorithmException, KeyStoreException { TrustManagerFactory tmf = TrustManagerFactory.getInstance(algorithm); tmf.init(keyStore); diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 5bdfdf65014..854b0165954 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -22,8 +22,8 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') compile 'com.unboundid:unboundid-ldapsdk:3.2.0' - compile 'org.bouncycastle:bcprov-jdk15on:1.59' - compile 'org.bouncycastle:bcpkix-jdk15on:1.59' + compileOnly 'org.bouncycastle:bcprov-jdk15on:1.59' + compileOnly 'org.bouncycastle:bcpkix-jdk15on:1.59' // the following are all SAML dependencies - might as well download the whole internet compile "org.opensaml:opensaml-core:3.3.0" @@ -79,7 +79,6 @@ sourceSets.test.resources { srcDir '../core/src/test/resources' } dependencyLicenses { - mapping from: /bc.*/, to: 'bouncycastle' mapping from: /java-support|opensaml-.*/, to: 'shibboleth' mapping from: /http.*/, to: 'httpclient' } diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle new file mode 100644 index 00000000000..1799a2c7b81 --- /dev/null +++ b/x-pack/plugin/security/cli/build.gradle @@ -0,0 +1,20 @@ +apply plugin: 'elasticsearch.build' + +archivesBaseName = 'elasticsearch-security-cli' + +dependencies { + compileOnly "org.elasticsearch:elasticsearch:${version}" + compileOnly xpackProject('plugin:core') + compile 'org.bouncycastle:bcprov-jdk15on:1.59' + compile 'org.bouncycastle:bcpkix-jdk15on:1.59' + testImplementation 'com.google.jimfs:jimfs:1.1' + testCompile "junit:junit:${versions.junit}" + testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" + testCompile 'org.elasticsearch:securemock:1.2' + testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') +} + +dependencyLicenses { + mapping from: /bc.*/, to: 'bouncycastle' +} \ No newline at end of file diff --git a/x-pack/plugin/core/licenses/bcpkix-jdk15on-1.59.jar.sha1 b/x-pack/plugin/security/cli/licenses/bcpkix-jdk15on-1.59.jar.sha1 similarity index 100% rename from x-pack/plugin/core/licenses/bcpkix-jdk15on-1.59.jar.sha1 rename to x-pack/plugin/security/cli/licenses/bcpkix-jdk15on-1.59.jar.sha1 diff --git a/x-pack/plugin/core/licenses/bcprov-jdk15on-1.59.jar.sha1 b/x-pack/plugin/security/cli/licenses/bcprov-jdk15on-1.59.jar.sha1 similarity index 100% rename from x-pack/plugin/core/licenses/bcprov-jdk15on-1.59.jar.sha1 rename to x-pack/plugin/security/cli/licenses/bcprov-jdk15on-1.59.jar.sha1 diff --git a/x-pack/plugin/core/licenses/bouncycastle-LICENSE.txt b/x-pack/plugin/security/cli/licenses/bouncycastle-LICENSE.txt similarity index 100% rename from x-pack/plugin/core/licenses/bouncycastle-LICENSE.txt rename to x-pack/plugin/security/cli/licenses/bouncycastle-LICENSE.txt diff --git a/x-pack/plugin/core/licenses/bouncycastle-NOTICE.txt b/x-pack/plugin/security/cli/licenses/bouncycastle-NOTICE.txt similarity index 100% rename from x-pack/plugin/core/licenses/bouncycastle-NOTICE.txt rename to x-pack/plugin/security/cli/licenses/bouncycastle-NOTICE.txt diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertGenUtils.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertGenUtils.java similarity index 93% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertGenUtils.java rename to x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertGenUtils.java index 6273456aca2..0b88f3da40a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertGenUtils.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertGenUtils.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import org.bouncycastle.asn1.ASN1Encodable; import org.bouncycastle.asn1.ASN1ObjectIdentifier; @@ -78,7 +78,7 @@ public class CertGenUtils { * Generates a CA certificate */ public static X509Certificate generateCACertificate(X500Principal x500Principal, KeyPair keyPair, int days) - throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { + throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { return generateSignedCertificate(x500Principal, null, keyPair, null, null, true, days, null); } @@ -100,7 +100,7 @@ public class CertGenUtils { */ public static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair, X509Certificate caCert, PrivateKey caPrivKey, int days) - throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { + throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { return generateSignedCertificate(principal, subjectAltNames, keyPair, caCert, caPrivKey, false, days, null); } @@ -125,7 +125,7 @@ public class CertGenUtils { public static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair, X509Certificate caCert, PrivateKey caPrivKey, int days, String signatureAlgorithm) - throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { + throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { return generateSignedCertificate(principal, subjectAltNames, keyPair, caCert, caPrivKey, false, days, signatureAlgorithm); } @@ -150,7 +150,7 @@ public class CertGenUtils { private static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair, X509Certificate caCert, PrivateKey caPrivKey, boolean isCa, int days, String signatureAlgorithm) - throws NoSuchAlgorithmException, CertificateException, CertIOException, OperatorCreationException { + throws NoSuchAlgorithmException, CertificateException, CertIOException, OperatorCreationException { Objects.requireNonNull(keyPair, "Key-Pair must not be null"); final DateTime notBefore = new DateTime(DateTimeZone.UTC); if (days < 1) { @@ -175,8 +175,8 @@ public class CertGenUtils { } JcaX509v3CertificateBuilder builder = - new JcaX509v3CertificateBuilder(issuer, serial, - new Time(notBefore.toDate(), Locale.ROOT), new Time(notAfter.toDate(), Locale.ROOT), subject, keyPair.getPublic()); + new JcaX509v3CertificateBuilder(issuer, serial, + new Time(notBefore.toDate(), Locale.ROOT), new Time(notAfter.toDate(), Locale.ROOT), subject, keyPair.getPublic()); builder.addExtension(Extension.subjectKeyIdentifier, false, extUtils.createSubjectKeyIdentifier(keyPair.getPublic())); builder.addExtension(Extension.authorityKeyIdentifier, false, authorityKeyIdentifier); @@ -187,8 +187,8 @@ public class CertGenUtils { PrivateKey signingKey = caPrivKey != null ? caPrivKey : keyPair.getPrivate(); ContentSigner signer = new JcaContentSignerBuilder( - (Strings.isNullOrEmpty(signatureAlgorithm)) ? getDefaultSignatureAlgorithm(signingKey) : signatureAlgorithm) - .setProvider(CertGenUtils.BC_PROV).build(signingKey); + (Strings.isNullOrEmpty(signatureAlgorithm)) ? getDefaultSignatureAlgorithm(signingKey) : signatureAlgorithm) + .setProvider(CertGenUtils.BC_PROV).build(signingKey); X509CertificateHolder certificateHolder = builder.build(signer); return new JcaX509CertificateConverter().getCertificate(certificateHolder); } @@ -214,7 +214,7 @@ public class CertGenUtils { break; default: throw new IllegalArgumentException("Unsupported algorithm : " + key.getAlgorithm() - + " for signature, allowed values for private key algorithm are [RSA, DSA, EC]"); + + " for signature, allowed values for private key algorithm are [RSA, DSA, EC]"); } return signatureAlgorithm; } @@ -229,7 +229,7 @@ public class CertGenUtils { * @return a certificate signing request */ static PKCS10CertificationRequest generateCSR(KeyPair keyPair, X500Principal principal, GeneralNames sanList) - throws IOException, OperatorCreationException { + throws IOException, OperatorCreationException { Objects.requireNonNull(keyPair, "Key-Pair must not be null"); Objects.requireNonNull(keyPair.getPublic(), "Public-Key must not be null"); Objects.requireNonNull(principal, "Principal must not be null"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateTool.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateGenerateTool.java similarity index 90% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateTool.java rename to x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateGenerateTool.java index 5515978c3ca..809e4a6d305 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateTool.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateGenerateTool.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import joptsimple.ArgumentAcceptingOptionSpec; import joptsimple.OptionSet; @@ -34,6 +34,8 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import javax.security.auth.x500.X500Principal; @@ -68,6 +70,7 @@ import java.util.zip.ZipOutputStream; /** * CLI tool to make generation of certificates or certificate requests easier for users + * * @deprecated Replaced by {@link CertificateTool} */ @Deprecated @@ -81,7 +84,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { private static final int FILE_EXTENSION_LENGTH = 4; static final int MAX_FILENAME_LENGTH = 255 - FILE_EXTENSION_LENGTH; private static final Pattern ALLOWED_FILENAME_CHAR_PATTERN = - Pattern.compile("[a-zA-Z0-9!@#$%^&{}\\[\\]()_+\\-=,.~'` ]{1," + MAX_FILENAME_LENGTH + "}"); + Pattern.compile("[a-zA-Z0-9!@#$%^&{}\\[\\]()_+\\-=,.~'` ]{1," + MAX_FILENAME_LENGTH + "}"); private static final int DEFAULT_KEY_SIZE = 2048; private static final BouncyCastleProvider BC_PROV = new BouncyCastleProvider(); @@ -96,11 +99,11 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { // of the parser in this class so that we can defer initialization until after logging has been initialized static { @SuppressWarnings("unchecked") final ConstructingObjectParser instanceParser = - new ConstructingObjectParser<>( - "instances", - a -> new CertificateInformation( - (String) a[0], (String) (a[1] == null ? a[0] : a[1]), - (List) a[2], (List) a[3], (List) a[4])); + new ConstructingObjectParser<>( + "instances", + a -> new CertificateInformation( + (String) a[0], (String) (a[1] == null ? a[0] : a[1]), + (List) a[2], (List) a[3], (List) a[4])); instanceParser.declareString(ConstructingObjectParser.constructorArg(), new ParseField("name")); instanceParser.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("filename")); instanceParser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), new ParseField("ip")); @@ -125,29 +128,29 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { CertificateGenerateTool() { super(DESCRIPTION); outputPathSpec = parser.accepts("out", "path of the zip file that the output should be written to") - .withRequiredArg(); + .withRequiredArg(); csrSpec = parser.accepts("csr", "only generate certificate signing requests"); caCertPathSpec = parser.accepts("cert", "path to an existing ca certificate").availableUnless(csrSpec).withRequiredArg(); caKeyPathSpec = parser.accepts("key", "path to an existing ca private key") - .availableIf(caCertPathSpec) - .requiredIf(caCertPathSpec) - .withRequiredArg(); + .availableIf(caCertPathSpec) + .requiredIf(caCertPathSpec) + .withRequiredArg(); caPasswordSpec = parser.accepts("pass", "password for an existing ca private key or the generated ca private key") - .availableUnless(csrSpec) - .withOptionalArg(); + .availableUnless(csrSpec) + .withOptionalArg(); caDnSpec = parser.accepts("dn", "distinguished name to use for the generated ca. defaults to " + AUTO_GEN_CA_DN) - .availableUnless(caCertPathSpec) - .availableUnless(csrSpec) - .withRequiredArg(); + .availableUnless(caCertPathSpec) + .availableUnless(csrSpec) + .withRequiredArg(); keysizeSpec = parser.accepts("keysize", "size in bits of RSA keys").withRequiredArg().ofType(Integer.class); inputFileSpec = parser.accepts("in", "file containing details of the instances in yaml format").withRequiredArg(); daysSpec = parser.accepts("days", "number of days that the generated certificates are valid") - .availableUnless(csrSpec) - .withRequiredArg() - .ofType(Integer.class); + .availableUnless(csrSpec) + .withRequiredArg() + .ofType(Integer.class); p12Spec = parser.accepts("p12", "output a p12 (PKCS#12) version for each certificate/key pair, with optional password") - .availableUnless(csrSpec) - .withOptionalArg(); + .availableUnless(csrSpec) + .withOptionalArg(); } public static void main(String[] args) throws Exception { @@ -178,7 +181,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { p12Password = null; } CAInfo caInfo = getCAInfo(terminal, dn, caCertPathSpec.value(options), caKeyPathSpec.value(options), keyPass, prompt, env, - keysize, days); + keysize, days); Collection certificateInformations = getCertificateInformationList(terminal, inputFile); generateAndWriteSignedCertificates(outputFile, certificateInformations, caInfo, keysize, days, p12Password); } @@ -197,7 +200,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * Checks for output file in the user specified options or prompts the user for the output file * - * @param terminal terminal to communicate with a user + * @param terminal terminal to communicate with a user * @param outputPath user specified output file, may be {@code null} * @return a {@link Path} to the output file */ @@ -223,12 +226,13 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * This method handles the collection of information about each instance that is necessary to generate a certificate. The user may * be prompted or the information can be gathered from a file - * @param terminal the terminal to use for user interaction + * + * @param terminal the terminal to use for user interaction * @param inputFile an optional file that will be used to load the instance information * @return a {@link Collection} of {@link CertificateInformation} that represents each instance */ static Collection getCertificateInformationList(Terminal terminal, String inputFile) - throws Exception { + throws Exception { if (inputFile != null) { return parseAndValidateFile(terminal, resolvePath(inputFile).toAbsolutePath()); } @@ -239,7 +243,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { if (name.isEmpty() == false) { final boolean isNameValidFilename = Name.isValidFilename(name); String filename = terminal.readText("Enter name for directories and files " + (isNameValidFilename ? "[" + name + "]" : "") - + ": " ); + + ": "); if (filename.isEmpty() && isNameValidFilename) { filename = name; } @@ -267,7 +271,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { } String exit = terminal.readText("Would you like to specify another instance? Press 'y' to continue entering instance " + - "information: "); + "information: "); if ("y".equals(exit) == false) { done = true; } @@ -283,7 +287,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { if (errors.size() > 0) { hasError = true; terminal.println(Terminal.Verbosity.SILENT, "Configuration for instance " + certInfo.name.originalName - + " has invalid details"); + + " has invalid details"); for (String message : errors) { terminal.println(Terminal.Verbosity.SILENT, " * " + message); } @@ -298,6 +302,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * Parses the input file to retrieve the certificate information + * * @param file the file to parse * @return a collection of certificate information */ @@ -305,22 +310,23 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { try (Reader reader = Files.newBufferedReader(file)) { // EMPTY is safe here because we never use namedObject XContentParser xContentParser = XContentType.YAML.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, reader); + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, reader); return InputFileParser.PARSER.parse(xContentParser, new ArrayList<>(), null); } } /** * Generates certificate signing requests and writes them out to the specified file in zip format + * * @param outputFile the file to write the output to. This file must not already exist - * @param certInfo the details to use in the certificate signing requests + * @param certInfo the details to use in the certificate signing requests */ static void generateAndWriteCsrs(Path outputFile, Collection certInfo, int keysize) throws Exception { fullyWriteFile(outputFile, (outputStream, pemWriter) -> { for (CertificateInformation certificateInformation : certInfo) { KeyPair keyPair = CertGenUtils.generateKeyPair(keysize); GeneralNames sanList = getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, - certificateInformation.commonNames); + certificateInformation.commonNames); PKCS10CertificationRequest csr = CertGenUtils.generateCSR(keyPair, certificateInformation.name.x500Principal, sanList); final String dirName = certificateInformation.name.filename + "/"; @@ -347,15 +353,15 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { * Returns the CA certificate and private key that will be used to sign certificates. These may be specified by the user or * automatically generated * - * @param terminal the terminal to use for prompting the user - * @param dn the distinguished name to use for the CA + * @param terminal the terminal to use for prompting the user + * @param dn the distinguished name to use for the CA * @param caCertPath the path to the CA certificate or {@code null} if not provided - * @param caKeyPath the path to the CA private key or {@code null} if not provided - * @param prompt whether we should prompt the user for a password - * @param keyPass the password to the private key. If not present and the key is encrypted the user will be prompted - * @param env the environment for this tool to resolve files with - * @param keysize the size of the key in bits - * @param days the number of days that the certificate should be valid for + * @param caKeyPath the path to the CA private key or {@code null} if not provided + * @param prompt whether we should prompt the user for a password + * @param keyPass the password to the private key. If not present and the key is encrypted the user will be prompted + * @param env the environment for this tool to resolve files with + * @param keysize the size of the key in bits + * @param days the number of days that the certificate should be valid for * @return CA cert and private key */ static CAInfo getCAInfo(Terminal terminal, String dn, String caCertPath, String caKeyPath, char[] keyPass, boolean prompt, @@ -366,7 +372,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { Certificate[] certificates = CertParsingUtils.readCertificates(Collections.singletonList(resolvedCaCertPath), env); if (certificates.length != 1) { throw new IllegalArgumentException("expected a single certificate in file [" + caCertPath + "] but found [" + - certificates.length + "]"); + certificates.length + "]"); } Certificate caCert = certificates[0]; PrivateKey privateKey = readPrivateKey(caKeyPath, keyPass, terminal, prompt); @@ -388,11 +394,12 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * Generates signed certificates in PEM format stored in a zip file - * @param outputFile the file that the certificates will be written to. This file must not exist + * + * @param outputFile the file that the certificates will be written to. This file must not exist * @param certificateInformations details for creation of the certificates - * @param caInfo the CA information to sign the certificates with - * @param keysize the size of the key in bits - * @param days the number of days that the certificate should be valid for + * @param caInfo the CA information to sign the certificates with + * @param keysize the size of the key in bits + * @param days the number of days that the certificate should be valid for */ static void generateAndWriteSignedCertificates(Path outputFile, Collection certificateInformations, CAInfo caInfo, int keysize, int days, char[] pkcs12Password) throws Exception { @@ -403,9 +410,9 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { for (CertificateInformation certificateInformation : certificateInformations) { KeyPair keyPair = CertGenUtils.generateKeyPair(keysize); Certificate certificate = CertGenUtils.generateSignedCertificate(certificateInformation.name.x500Principal, - getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, - certificateInformation.commonNames), - keyPair, caInfo.caCert, caInfo.privateKey, days); + getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, + certificateInformation.commonNames), + keyPair, caInfo.caCert, caInfo.privateKey, days); final String dirName = certificateInformation.name.filename + "/"; ZipEntry zipEntry = new ZipEntry(dirName); @@ -429,7 +436,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { final KeyStore pkcs12 = KeyStore.getInstance("PKCS12"); pkcs12.load(null); pkcs12.setKeyEntry(certificateInformation.name.originalName, keyPair.getPrivate(), pkcs12Password, - new Certificate[]{certificate}); + new Certificate[]{certificate}); outputStream.putNextEntry(new ZipEntry(entryBase + ".p12")); pkcs12.store(outputStream, pkcs12Password); @@ -441,7 +448,8 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * This method handles the deletion of a file in the case of a partial write - * @param file the file that is being written to + * + * @param file the file that is being written to * @param writer writes the contents of the file */ private static void fullyWriteFile(Path file, Writer writer) throws Exception { @@ -468,9 +476,10 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * This method handles writing out the certificate authority cert and private key if the certificate authority was generated by * this invocation of the tool + * * @param outputStream the output stream to write to - * @param pemWriter the writer for PEM objects - * @param info the certificate authority information + * @param pemWriter the writer for PEM objects + * @param info the certificate authority information */ private static void writeCAInfoIfGenerated(ZipOutputStream outputStream, JcaPEMWriter pemWriter, CAInfo info) throws Exception { if (info.generated) { @@ -577,14 +586,15 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * Helper method to read a private key and support prompting of user for a key. To avoid passwords being placed as an argument we * can prompt the user for their password if we encounter an encrypted key. - * @param path the path to the private key + * + * @param path the path to the private key * @param password the password provided by the user or {@code null} * @param terminal the terminal to use for user interaction - * @param prompt whether to prompt the user or not + * @param prompt whether to prompt the user or not * @return the {@link PrivateKey} that was read from the file */ private static PrivateKey readPrivateKey(String path, char[] password, Terminal terminal, boolean prompt) - throws Exception { + throws Exception { AtomicReference passwordReference = new AtomicReference<>(password); try { return PemUtils.readPrivateKey(resolvePath(path), () -> { @@ -682,7 +692,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { } } catch (IllegalArgumentException e) { String error = "[" + name + "] could not be converted to a valid DN\n" + e.getMessage() + "\n" - + ExceptionsHelper.stackTrace(e); + + ExceptionsHelper.stackTrace(e); return new Name(name, null, null, error); } @@ -695,15 +705,15 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { static boolean isValidFilename(String name) { return ALLOWED_FILENAME_CHAR_PATTERN.matcher(name).matches() - && ALLOWED_FILENAME_CHAR_PATTERN.matcher(resolvePath(name).toString()).matches() - && name.startsWith(".") == false; + && ALLOWED_FILENAME_CHAR_PATTERN.matcher(resolvePath(name).toString()).matches() + && name.startsWith(".") == false; } @Override public String toString() { return getClass().getSimpleName() - + "{original=[" + originalName + "] principal=[" + x500Principal - + "] file=[" + filename + "] err=[" + error + "]}"; + + "{original=[" + originalName + "] principal=[" + x500Principal + + "] file=[" + filename + "] err=[" + error + "]}"; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateTool.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java similarity index 92% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateTool.java rename to x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java index dd90df4dd6a..a966cac9109 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateTool.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import joptsimple.OptionParser; import joptsimple.OptionSet; @@ -39,6 +39,8 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import javax.security.auth.x500.X500Principal; @@ -101,7 +103,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { private static final int FILE_EXTENSION_LENGTH = 4; static final int MAX_FILENAME_LENGTH = 255 - FILE_EXTENSION_LENGTH; private static final Pattern ALLOWED_FILENAME_CHAR_PATTERN = - Pattern.compile("[a-zA-Z0-9!@#$%^&{}\\[\\]()_+\\-=,.~'` ]{1," + MAX_FILENAME_LENGTH + "}"); + Pattern.compile("[a-zA-Z0-9!@#$%^&{}\\[\\]()_+\\-=,.~'` ]{1," + MAX_FILENAME_LENGTH + "}"); private static final int DEFAULT_KEY_SIZE = 2048; /** @@ -115,11 +117,11 @@ public class CertificateTool extends LoggingAwareMultiCommand { // of the parser in this class so that we can defer initialization until after logging has been initialized static { @SuppressWarnings("unchecked") final ConstructingObjectParser instanceParser = - new ConstructingObjectParser<>( - "instances", - a -> new CertificateInformation( - (String) a[0], (String) (a[1] == null ? a[0] : a[1]), - (List) a[2], (List) a[3], (List) a[4])); + new ConstructingObjectParser<>( + "instances", + a -> new CertificateInformation( + (String) a[0], (String) (a[1] == null ? a[0] : a[1]), + (List) a[2], (List) a[3], (List) a[4])); instanceParser.declareString(ConstructingObjectParser.constructorArg(), new ParseField("name")); instanceParser.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("filename")); instanceParser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), new ParseField("ip")); @@ -144,28 +146,28 @@ public class CertificateTool extends LoggingAwareMultiCommand { static final String INTRO_TEXT = "This tool assists you in the generation of X.509 certificates and certificate\n" + - "signing requests for use with SSL/TLS in the Elastic stack."; + "signing requests for use with SSL/TLS in the Elastic stack."; static final String INSTANCE_EXPLANATION = - " * An instance is any piece of the Elastic Stack that requires a SSL certificate.\n" + - " Depending on your configuration, Elasticsearch, Logstash, Kibana, and Beats\n" + - " may all require a certificate and private key.\n" + - " * The minimum required value for each instance is a name. This can simply be the\n" + - " hostname, which will be used as the Common Name of the certificate. A full\n" + - " distinguished name may also be used.\n" + - " * A filename value may be required for each instance. This is necessary when the\n" + - " name would result in an invalid file or directory name. The name provided here\n" + - " is used as the directory name (within the zip) and the prefix for the key and\n" + - " certificate files. The filename is required if you are prompted and the name\n" + - " is not displayed in the prompt.\n" + - " * IP addresses and DNS names are optional. Multiple values can be specified as a\n" + - " comma separated string. If no IP addresses or DNS names are provided, you may\n" + - " disable hostname verification in your SSL configuration."; + " * An instance is any piece of the Elastic Stack that requires a SSL certificate.\n" + + " Depending on your configuration, Elasticsearch, Logstash, Kibana, and Beats\n" + + " may all require a certificate and private key.\n" + + " * The minimum required value for each instance is a name. This can simply be the\n" + + " hostname, which will be used as the Common Name of the certificate. A full\n" + + " distinguished name may also be used.\n" + + " * A filename value may be required for each instance. This is necessary when the\n" + + " name would result in an invalid file or directory name. The name provided here\n" + + " is used as the directory name (within the zip) and the prefix for the key and\n" + + " certificate files. The filename is required if you are prompted and the name\n" + + " is not displayed in the prompt.\n" + + " * IP addresses and DNS names are optional. Multiple values can be specified as a\n" + + " comma separated string. If no IP addresses or DNS names are provided, you may\n" + + " disable hostname verification in your SSL configuration."; static final String CA_EXPLANATION = - " * All certificates generated by this tool will be signed by a certificate authority (CA).\n" + - " * The tool can automatically generate a new CA for you, or you can provide your own with the\n" + - " -ca or -ca-cert command line options."; + " * All certificates generated by this tool will be signed by a certificate authority (CA).\n" + + " * The tool can automatically generate a new CA for you, or you can provide your own with the\n" + + " -ca or -ca-cert command line options."; abstract static class CertificateCommand extends EnvironmentAwareCommand { @@ -202,32 +204,32 @@ public class CertificateTool extends LoggingAwareMultiCommand { final void acceptCertificateGenerationOptions() { pemFormatSpec = parser.accepts("pem", "output certificates and keys in PEM format instead of PKCS#12"); daysSpec = parser.accepts("days", "number of days that the generated certificates are valid") - .withRequiredArg().ofType(Integer.class); + .withRequiredArg().ofType(Integer.class); } final void acceptsCertificateAuthority() { caPkcs12PathSpec = parser.accepts("ca", "path to an existing ca key pair (in PKCS#12 format)").withRequiredArg(); caCertPathSpec = parser.accepts("ca-cert", "path to an existing ca certificate") - .availableUnless(caPkcs12PathSpec) - .withRequiredArg(); + .availableUnless(caPkcs12PathSpec) + .withRequiredArg(); caKeyPathSpec = parser.accepts("ca-key", "path to an existing ca private key") - .availableIf(caCertPathSpec) - .requiredIf(caCertPathSpec) - .withRequiredArg(); + .availableIf(caCertPathSpec) + .requiredIf(caCertPathSpec) + .withRequiredArg(); keepCaKeySpec = parser.accepts("keep-ca-key", "retain the CA private key for future use") - .availableUnless(caPkcs12PathSpec) - .availableUnless(caCertPathSpec); + .availableUnless(caPkcs12PathSpec) + .availableUnless(caCertPathSpec); caPasswordSpec = parser.accepts("ca-pass", "password for an existing ca private key or the generated ca private key") - .withOptionalArg(); + .withOptionalArg(); acceptsCertificateAuthorityName(); } void acceptsCertificateAuthorityName() { OptionSpecBuilder builder = parser.accepts("ca-dn", - "distinguished name to use for the generated ca. defaults to " + AUTO_GEN_CA_DN); + "distinguished name to use for the generated ca. defaults to " + AUTO_GEN_CA_DN); if (caPkcs12PathSpec != null) { builder = builder.availableUnless(caPkcs12PathSpec); } @@ -336,11 +338,11 @@ public class CertificateTool extends LoggingAwareMultiCommand { char[] passwordOption = getChars(caPasswordSpec.value(options)); Map keys = withPassword("CA (" + path + ")", passwordOption, - terminal, password -> CertParsingUtils.readPkcs12KeyPairs(path, password, a -> password)); + terminal, password -> CertParsingUtils.readPkcs12KeyPairs(path, password, a -> password)); if (keys.size() != 1) { throw new IllegalArgumentException("expected a single key in file [" + path.toAbsolutePath() + "] but found [" + - keys.size() + "]"); + keys.size() + "]"); } final Map.Entry pair = keys.entrySet().iterator().next(); return new CAInfo((X509Certificate) pair.getKey(), (PrivateKey) pair.getValue()); @@ -358,7 +360,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { Certificate[] certificates = CertParsingUtils.readCertificates(Collections.singletonList(resolvedCaCertPath), env); if (certificates.length != 1) { throw new IllegalArgumentException("expected a single certificate in file [" + resolvedCaCertPath + "] but found [" + - certificates.length + "]"); + certificates.length + "]"); } X509Certificate caCert = (X509Certificate) certificates[0]; PrivateKey privateKey = readPrivateKey(key, getChars(password), terminal); @@ -391,7 +393,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { * @return a {@link Collection} of {@link CertificateInformation} that represents each instance */ Collection getCertificateInformationList(Terminal terminal, OptionSet options) - throws Exception { + throws Exception { final Path input = resolvePath(options, inputFileSpec); if (input != null) { return parseAndValidateFile(terminal, input.toAbsolutePath()); @@ -456,7 +458,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { } String exit = terminal.readText("Would you like to specify another instance? Press 'y' to continue entering instance " + - "information: "); + "information: "); if ("y".equals(exit) == false) { done = true; } @@ -468,7 +470,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { final boolean isNameValidFilename = Name.isValidFilename(certName); while (true) { String filename = terminal.readText("Enter name for directories and files of " + certName + - (isNameValidFilename ? " [" + certName + "]" : "") + ": "); + (isNameValidFilename ? " [" + certName + "]" : "") + ": "); if (filename.isEmpty() && isNameValidFilename) { return certName; } @@ -490,7 +492,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { * @param includeKey if true, write the CA key in PEM format */ static void writeCAInfo(ZipOutputStream outputStream, JcaPEMWriter pemWriter, CAInfo info, boolean includeKey) - throws Exception { + throws Exception { final String caDirName = createCaDirectory(outputStream); outputStream.putNextEntry(new ZipEntry(caDirName + "ca.crt")); pemWriter.writeObject(info.certAndKey.cert); @@ -546,7 +548,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { pkcs12.load(null); withPassword(fileName, password, terminal, p12Password -> { if (isAscii(p12Password)) { - pkcs12.setKeyEntry(alias, pair.key, p12Password, new Certificate[] { pair.cert }); + pkcs12.setKeyEntry(alias, pair.key, p12Password, new Certificate[]{pair.cert}); if (caCert != null) { pkcs12.setCertificateEntry("ca", caCert); } @@ -574,7 +576,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { terminal.println("The 'csr' mode generates certificate signing requests that can be sent to"); terminal.println("a trusted certificate authority"); terminal.println(" * By default, this generates a single CSR for a single instance."); - terminal.println(" * You can use the '-multiple' option to generate CSRs for multiple" ); + terminal.println(" * You can use the '-multiple' option to generate CSRs for multiple"); terminal.println(" instances, each with their own private key."); terminal.println(" * The '-in' option allows for the CSR generation to be automated"); terminal.println(" by describing the details of each instance in a YAML file"); @@ -616,7 +618,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { for (CertificateInformation certificateInformation : certInfo) { KeyPair keyPair = CertGenUtils.generateKeyPair(keySize); GeneralNames sanList = getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, - certificateInformation.dnsNames, certificateInformation.commonNames); + certificateInformation.dnsNames, certificateInformation.commonNames); PKCS10CertificationRequest csr = CertGenUtils.generateCSR(keyPair, certificateInformation.name.x500Principal, sanList); final String dirName = certificateInformation.name.filename + "/"; @@ -750,7 +752,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { */ void generateAndWriteSignedCertificates(Path output, boolean writeZipFile, OptionSet options, Collection certs, CAInfo caInfo, Terminal terminal) - throws Exception { + throws Exception { checkDirectory(output, terminal); @@ -805,7 +807,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { final String fileName = entryBase + ".p12"; outputStream.putNextEntry(new ZipEntry(fileName)); writePkcs12(fileName, outputStream, certificateInformation.name.originalName, pair, caInfo.certAndKey.cert, - outputPassword, terminal); + outputPassword, terminal); outputStream.closeEntry(); } } @@ -815,7 +817,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { CertificateInformation certificateInformation = certs.iterator().next(); CertificateAndKey pair = generateCertificateAndKey(certificateInformation, caInfo, keySize, days); fullyWriteFile(output, stream -> writePkcs12(output.getFileName().toString(), stream, - certificateInformation.name.originalName, pair, caInfo.certAndKey.cert, outputPassword, terminal)); + certificateInformation.name.originalName, pair, caInfo.certAndKey.cert, outputPassword, terminal)); } } @@ -823,9 +825,9 @@ public class CertificateTool extends LoggingAwareMultiCommand { int keySize, int days) throws Exception { KeyPair keyPair = CertGenUtils.generateKeyPair(keySize); Certificate certificate = CertGenUtils.generateSignedCertificate(certificateInformation.name.x500Principal, - getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, - certificateInformation.commonNames), - keyPair, caInfo.certAndKey.cert, caInfo.certAndKey.key, days); + getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, + certificateInformation.commonNames), + keyPair, caInfo.certAndKey.cert, caInfo.certAndKey.key, days); return new CertificateAndKey((X509Certificate) certificate, keyPair.getPrivate()); } @@ -872,7 +874,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { } else { final String fileName = output.getFileName().toString(); fullyWriteFile(output, outputStream -> - writePkcs12(fileName, outputStream, "ca", caInfo.certAndKey, null, caInfo.password, terminal)); + writePkcs12(fileName, outputStream, "ca", caInfo.certAndKey, null, caInfo.password, terminal)); } } } @@ -912,7 +914,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { try (Reader reader = Files.newBufferedReader(file)) { // EMPTY is safe here because we never use namedObject XContentParser xContentParser = XContentType.YAML.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, reader); + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, reader); return CertificateToolParser.PARSER.parse(xContentParser, new ArrayList<>(), null); } } @@ -1015,7 +1017,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { * @return the {@link PrivateKey} that was read from the file */ private static PrivateKey readPrivateKey(Path path, char[] password, Terminal terminal) - throws Exception { + throws Exception { AtomicReference passwordReference = new AtomicReference<>(password); try { return PemUtils.readPrivateKey(path, () -> { @@ -1125,7 +1127,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { } } catch (IllegalArgumentException e) { String error = "[" + name + "] could not be converted to a valid DN\n" + e.getMessage() + "\n" - + ExceptionsHelper.stackTrace(e); + + ExceptionsHelper.stackTrace(e); return new Name(name, null, null, error); } @@ -1138,15 +1140,15 @@ public class CertificateTool extends LoggingAwareMultiCommand { static boolean isValidFilename(String name) { return ALLOWED_FILENAME_CHAR_PATTERN.matcher(name).matches() - && ALLOWED_FILENAME_CHAR_PATTERN.matcher(resolvePath(name).toString()).matches() - && name.startsWith(".") == false; + && ALLOWED_FILENAME_CHAR_PATTERN.matcher(resolvePath(name).toString()).matches() + && name.startsWith(".") == false; } @Override public String toString() { return getClass().getSimpleName() - + "{original=[" + originalName + "] principal=[" + x500Principal - + "] file=[" + filename + "] err=[" + error + "]}"; + + "{original=[" + originalName + "] principal=[" + x500Principal + + "] file=[" + filename + "] err=[" + error + "]}"; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertGenUtilsTests.java similarity index 97% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java rename to x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertGenUtilsTests.java index 20259144b42..bb1ed014b9c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java +++ b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertGenUtilsTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import org.bouncycastle.asn1.x509.GeneralName; import org.bouncycastle.asn1.x509.GeneralNames; @@ -12,6 +12,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.cli.CertGenUtils; import org.junit.BeforeClass; import java.math.BigInteger; diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateGenerateToolTests.java similarity index 98% rename from x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java rename to x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateGenerateToolTests.java index dde0b7645df..91fd55933c5 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java +++ b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateGenerateToolTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; @@ -33,9 +33,11 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; -import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.CAInfo; -import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.CertificateInformation; -import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.Name; +import org.elasticsearch.xpack.security.cli.CertificateGenerateTool.CAInfo; +import org.elasticsearch.xpack.security.cli.CertificateGenerateTool.CertificateInformation; +import org.elasticsearch.xpack.security.cli.CertificateGenerateTool.Name; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import org.hamcrest.Matchers; import org.junit.After; import org.junit.BeforeClass; @@ -359,8 +361,8 @@ public class CertificateGenerateToolTests extends ESTestCase { public void testGetCAInfo() throws Exception { Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); - Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.crt"); + Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.pem"); final boolean passwordPrompt = randomBoolean(); MockTerminal terminal = new MockTerminal(); if (passwordPrompt) { diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java similarity index 98% rename from x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java rename to x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java index 706d5dbab5f..9e970ea559a 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java +++ b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; @@ -39,12 +39,14 @@ import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.TestMatchers; -import org.elasticsearch.xpack.core.ssl.CertificateTool.CAInfo; -import org.elasticsearch.xpack.core.ssl.CertificateTool.CertificateAuthorityCommand; -import org.elasticsearch.xpack.core.ssl.CertificateTool.CertificateCommand; -import org.elasticsearch.xpack.core.ssl.CertificateTool.CertificateInformation; -import org.elasticsearch.xpack.core.ssl.CertificateTool.GenerateCertificateCommand; -import org.elasticsearch.xpack.core.ssl.CertificateTool.Name; +import org.elasticsearch.xpack.security.cli.CertificateTool.CAInfo; +import org.elasticsearch.xpack.security.cli.CertificateTool.CertificateAuthorityCommand; +import org.elasticsearch.xpack.security.cli.CertificateTool.CertificateCommand; +import org.elasticsearch.xpack.security.cli.CertificateTool.CertificateInformation; +import org.elasticsearch.xpack.security.cli.CertificateTool.GenerateCertificateCommand; +import org.elasticsearch.xpack.security.cli.CertificateTool.Name; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import org.hamcrest.Matchers; import org.junit.After; import org.junit.BeforeClass; @@ -387,8 +389,8 @@ public class CertificateToolTests extends ESTestCase { public void testGetCAInfo() throws Exception { Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); - Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.crt"); + Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.pem"); final boolean passwordPrompt = randomBoolean(); MockTerminal terminal = new MockTerminal(); if (passwordPrompt) { diff --git a/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.crt b/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.crt new file mode 100644 index 00000000000..08c160bcea5 --- /dev/null +++ b/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.crt @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIID0zCCArugAwIBAgIJALi5bDfjMszLMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTUwOTIzMTg1MjU3WhcNMTkwOTIyMTg1MjU3 +WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA3rGZ1QbsW0+MuyrSLmMfDFKtLBkIFW8V0gRuurFg1PUKKNR1 +Mq2tMVwjjYETAU/UY0iKZOzjgvYPKhDTYBTte/WHR1ZK4CYVv7TQX/gtFQG/ge/c +7u0sLch9p7fbd+/HZiLS/rBEZDIohvgUvzvnA8+OIYnw4kuxKo/5iboAIS41klMg +/lATm8V71LMY68inht71/ZkQoAHKgcR9z4yNYvQ1WqKG8DG8KROXltll3sTrKbl5 +zJhn660es/1ZnR6nvwt6xnSTl/mNHMjkfv1bs4rJ/py3qPxicdoSIn/KyojUcgHV +F38fuAy2CQTdjVG5fWj9iz+mQvLm3+qsIYQdFwIDAQABo4G/MIG8MAkGA1UdEwQC +MAAwHQYDVR0OBBYEFEMMWLWQi/g83PzlHYqAVnty5L7HMIGPBgNVHREEgYcwgYSC +CWxvY2FsaG9zdIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghds +b2NhbGhvc3Q0LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5s +b2NhbGRvbWFpbjaHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAEwDQYJKoZIhvcNAQEL +BQADggEBAMjGGXT8Nt1tbl2GkiKtmiuGE2Ej66YuZ37WSJViaRNDVHLlg87TCcHe +k2rdO+6sFqQbbzEfwQ05T7xGmVu7tm54HwKMRugoQ3wct0bQC5wEWYN+oMDvSyO6 +M28mZwWb4VtR2IRyWP+ve5DHwTM9mxWa6rBlGzsQqH6YkJpZojzqk/mQTug+Y8aE +mVoqRIPMHq9ob+S9qd5lp09+MtYpwPfTPx/NN+xMEooXWW/ARfpGhWPkg/FuCu4z +1tFmCqHgNcWirzMm3dQpF78muE9ng6OB2MXQwL4VgnVkxmlZNHbkR2v/t8MyZJxC +y4g6cTMM3S/UMt5/+aIB2JAuMKyuD+A= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.pem b/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.pem new file mode 100644 index 00000000000..5a67e103344 --- /dev/null +++ b/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,9D867F7E0C94D013 + +dVoVCjPeg1wgS7rVtOvGfQcrZyLkx393aWRnFq45tbjKBVuITtJ9vI7o4QXOV/15 +Gnb6WhXGIdWrzsxEAd46K6hIuNSISd4Emsx6c2Q5hTqWXXfexbOZBNfTtXtdJPnJ +1jAaikhtztLo3JSLTKNY5sNxd+XbaQyYVUWvueK6zOaIIMETvB+VPVFd9i1ROibk +Sgdtyj01KjkoalifqK/tA0CIYNKL0S6/eoK3UhAlpIprlpV+cnXa940C6bjLeJPt +PMAGGp5RrplxSgrSerw3I9DOWkHGtpqzIka3XneNUXJP8k4HUJ+aZkGH2ZILKS8d +4KMIb+KZSpHEGn+6uGccWLtZZmAjWJrDw56JbQtSHdRYLBRSOjLbTvQoPu/2Hpli +7HOxbotlvjptMunncq5aqK57SHA1dh0cwF7J3LUmGFJ67eoz+VV3b5qMn4MopSeI +mS16Ydd3nGpjSrln/elM0CQxqWfcOAXRZpDpFUQoXcBrLVzvz2DBl/0CrTRLhgzi +CO+5/IVcBWRlYpRNGgjjP7q0j6URID3jk5J06fYQXmBiwQT5j+GZqqzpMCJ9mIy2 +1O9SN1hebJnIcEU+E0njn/MGjlYdPywhaCy8pqElp6Q8TUEJpwLRFO/owCoBet/n +ZmCXUjfCGhc1pWHufFcDEQ6xMgEWWY/tdwCZeSU7EhErTjCbfupg+55A5fpDml0m +3wH4CFcuRjlqyx6Ywixm1ATeitDtJl5HQTw6b8OtEXwSgRmZ0eSqSRVk9QbVS7gu +IpQe09/Zimb5HzjZqZ3fdqHlcW4xax8hyJeyIvF5ZJ57eY8CBvu/wP2GDn26QnvF +xQqdfDbq1H4JmpwUHpbFwBoQK4Q6WFd1z4EA9bRQeo3H9PoqoOwMDjzajwLRF7b7 +q6tYH/n9PyHwdf1c4fFwgSmL1toXGfKlA9hjIaLsRSDD6srT5EdUk78bsnddwI51 +tu7C7P4JG+h1VdRNMNTlqtileWsIE7Nn2A1OkcUxZdF5mamENpDpJcHePLto6c8q +FKiwyFMsxhgsj6HK2HqO+UA4sX5Ni4oHwiPmb//EZLn045M5i1AN26KosJmb8++D +sgR5reWRy+UqJCTYblVg+7Dx++ggUnfxVyQEsWmw5r5f4KU5wXBkvoVMGtPNa9DE +n/uLtObD1qkNL38pRsr2OGRchYCgEoKGqEISBP4knfGXLOlWiW/246j9QzI97r1u +tvy7fKg28G7AUz9l6bpewsPHefBUeRQeieP9eJINaEpxkF/w2RpKDLpQjWxwDDOM +s+D0mrBMJve17AmJ8rMw6dIQPZYNZ88/jz1uQuUwQ2YlbmtZbCG81k9YMFGEU9XS +cyhJxj8hvYnt2PR5Z9/cJPyWOs0m/ufOeeQQ8SnU/lzmrQnpzUd2Z6p5i/B7LdRP +n1kX+l1qynuPnjvBz4nJQE0p6nzW8RyCDSniC9mtYtZmhgC8icqxgbvS7uEOBIYJ +NbK+0bEETTO34iY/JVTIqLOw3iQZYMeUpxpj6Phgx/oooxMTquMecPKNgeVtaBst +qjTNPX0ti1/HYpZqzYi8SV8YjHSJWCVMsZjKPr3W/HIcCKqYoIfgzi83Ha2KMQx6 +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/security/src/main/bin/elasticsearch-certgen b/x-pack/plugin/security/src/main/bin/elasticsearch-certgen index 4a192ac3b16..8e88e845e02 100644 --- a/x-pack/plugin/security/src/main/bin/elasticsearch-certgen +++ b/x-pack/plugin/security/src/main/bin/elasticsearch-certgen @@ -4,7 +4,8 @@ # or more contributor license agreements. Licensed under the Elastic License; # you may not use this file except in compliance with the Elastic License. -ES_MAIN_CLASS=org.elasticsearch.xpack.core.ssl.CertificateGenerateTool \ +ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.CertificateGenerateTool \ ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \ + ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \ "`dirname "$0"`"/elasticsearch-cli \ "$@" diff --git a/x-pack/plugin/security/src/main/bin/elasticsearch-certgen.bat b/x-pack/plugin/security/src/main/bin/elasticsearch-certgen.bat index b5842b57b16..bb303f740e5 100644 --- a/x-pack/plugin/security/src/main/bin/elasticsearch-certgen.bat +++ b/x-pack/plugin/security/src/main/bin/elasticsearch-certgen.bat @@ -7,8 +7,9 @@ rem you may not use this file except in compliance with the Elastic License. setlocal enabledelayedexpansion setlocal enableextensions -set ES_MAIN_CLASS=org.elasticsearch.xpack.core.ssl.CertificateGenerateTool +set ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.CertificateGenerateTool set ES_ADDITIONAL_SOURCES=x-pack-env;x-pack-security-env +set ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli call "%~dp0elasticsearch-cli.bat" ^ %%* ^ || exit /b 1 diff --git a/x-pack/plugin/security/src/main/bin/elasticsearch-certutil b/x-pack/plugin/security/src/main/bin/elasticsearch-certutil index a13be812f0b..6d94344949b 100644 --- a/x-pack/plugin/security/src/main/bin/elasticsearch-certutil +++ b/x-pack/plugin/security/src/main/bin/elasticsearch-certutil @@ -4,7 +4,8 @@ # or more contributor license agreements. Licensed under the Elastic License; # you may not use this file except in compliance with the Elastic License. -ES_MAIN_CLASS=org.elasticsearch.xpack.core.ssl.CertificateTool \ +ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.CertificateTool \ ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \ + ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \ "`dirname "$0"`"/elasticsearch-cli \ "$@" diff --git a/x-pack/plugin/security/src/main/bin/elasticsearch-certutil.bat b/x-pack/plugin/security/src/main/bin/elasticsearch-certutil.bat index 2e397190f23..34f595824f8 100644 --- a/x-pack/plugin/security/src/main/bin/elasticsearch-certutil.bat +++ b/x-pack/plugin/security/src/main/bin/elasticsearch-certutil.bat @@ -7,8 +7,9 @@ rem you may not use this file except in compliance with the Elastic License. setlocal enabledelayedexpansion setlocal enableextensions -set ES_MAIN_CLASS=org.elasticsearch.xpack.core.ssl.CertificateTool +set ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.CertificateTool set ES_ADDITIONAL_SOURCES=x-pack-env;x-pack-security-env +set ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli call "%~dp0elasticsearch-cli.bat" ^ %%* ^ || exit /b 1 From 5525cb1615b7d0a5b4e512305d6e5d2de73f5fcd Mon Sep 17 00:00:00 2001 From: debadair Date: Fri, 20 Jul 2018 14:17:48 -0700 Subject: [PATCH 127/260] [DOCS] Clarified that you must remove X-Pack plugin when upgrading from pre-6.3. (#32016) --- docs/reference/upgrade/cluster_restart.asciidoc | 6 ++++-- docs/reference/upgrade/remove-xpack.asciidoc | 4 ++++ docs/reference/upgrade/rolling_upgrade.asciidoc | 4 +++- 3 files changed, 11 insertions(+), 3 deletions(-) create mode 100644 docs/reference/upgrade/remove-xpack.asciidoc diff --git a/docs/reference/upgrade/cluster_restart.asciidoc b/docs/reference/upgrade/cluster_restart.asciidoc index b092e45ae29..06d5e96f8ef 100644 --- a/docs/reference/upgrade/cluster_restart.asciidoc +++ b/docs/reference/upgrade/cluster_restart.asciidoc @@ -47,6 +47,8 @@ include::set-paths-tip.asciidoc[] Use the `elasticsearch-plugin` script to install the upgraded version of each installed Elasticsearch plugin. All plugins must be upgraded when you upgrade a node. ++ +include::remove-xpack.asciidoc[] . *Start each upgraded node.* + @@ -91,7 +93,7 @@ already have local shard copies. + -- When all nodes have joined the cluster and recovered their primary shards, -reenable allocation by restoring `cluster.routing.allocation.enable` to its +reenable allocation by restoring `cluster.routing.allocation.enable` to its default: [source,js] @@ -123,4 +125,4 @@ GET _cat/recovery // CONSOLE -- -. *Restart machine learning jobs.* +. *Restart machine learning jobs.* diff --git a/docs/reference/upgrade/remove-xpack.asciidoc b/docs/reference/upgrade/remove-xpack.asciidoc new file mode 100644 index 00000000000..9d4c4c9f779 --- /dev/null +++ b/docs/reference/upgrade/remove-xpack.asciidoc @@ -0,0 +1,4 @@ +IMPORTANT: If you use {xpack} and are upgrading from a version prior to 6.3, +remove {xpack} before restarting: `bin/elasticsearch-plugin remove x-pack`. As +of 6.3, {xpack} is included in the default distribution. The node will fail to +start if the old {xpack} plugin is present. diff --git a/docs/reference/upgrade/rolling_upgrade.asciidoc b/docs/reference/upgrade/rolling_upgrade.asciidoc index 76a10f752be..e2edb6b2922 100644 --- a/docs/reference/upgrade/rolling_upgrade.asciidoc +++ b/docs/reference/upgrade/rolling_upgrade.asciidoc @@ -53,6 +53,8 @@ include::set-paths-tip.asciidoc[] Use the `elasticsearch-plugin` script to install the upgraded version of each installed Elasticsearch plugin. All plugins must be upgraded when you upgrade a node. ++ +include::remove-xpack.asciidoc[] . *Start the upgraded node.* + @@ -144,7 +146,7 @@ for each node that needs to be updated. -- -. *Restart machine learning jobs.* +. *Restart machine learning jobs.* [IMPORTANT] ==================================================== From 042424b43ba0e1bf0748e77f9941ccd3ef578f34 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 20 Jul 2018 17:33:15 -0400 Subject: [PATCH 128/260] Switch full-cluster-restart to new style Requests (#32140) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes all calls in the `qa/full-cluster-restart` project to use the new versions. It also fixes a small bug in the test for explain on the `_all` field that was causing it to not properly invoke `_explain`. --- .../upgrades/FullClusterRestartIT.java | 497 +++++++++--------- 1 file changed, 254 insertions(+), 243 deletions(-) diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 081a1918674..0b936e44e5b 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -19,9 +19,6 @@ package org.elasticsearch.upgrades; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; import org.elasticsearch.client.Request; @@ -34,7 +31,6 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.NotEqualMessageBuilder; @@ -45,7 +41,6 @@ import org.junit.Before; import java.io.IOException; import java.util.ArrayList; import java.util.Base64; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -142,8 +137,9 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); count = randomIntBetween(2000, 3000); byte[] randomByteArray = new byte[16]; @@ -164,16 +160,7 @@ public class FullClusterRestartIT extends ESRestTestCase { count = countOfIndexedRandomDocuments(); } - Map params = new HashMap<>(); - params.put("timeout", "2m"); - params.put("wait_for_status", "green"); - params.put("wait_for_no_relocating_shards", "true"); - params.put("wait_for_events", "languid"); - Map healthRsp = toMap(client().performRequest("GET", "/_cluster/health/" + index, params)); - logger.info("health api response: {}", healthRsp); - assertEquals("green", healthRsp.get("status")); - assertFalse((Boolean) healthRsp.get("timed_out")); - + ensureGreenLongWait(index); assertBasicSearchWorks(count); assertAllSearchWorks(count); assertBasicAggregationWorks(); @@ -205,8 +192,9 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); int numDocs = randomIntBetween(2000, 3000); indexRandomDocuments(numDocs, true, false, i -> { @@ -215,33 +203,26 @@ public class FullClusterRestartIT extends ESRestTestCase { .endObject(); }); logger.info("Refreshing [{}]", index); - client().performRequest("POST", "/" + index + "/_refresh"); + client().performRequest(new Request("POST", "/" + index + "/_refresh")); } else { final int numReplicas = 1; final long startTime = System.currentTimeMillis(); logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, index); - String requestBody = "{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}"; - Response response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, response.getStatusLine().getStatusCode()); + Request setNumberOfReplicas = new Request("PUT", "/" + index + "/_settings"); + setNumberOfReplicas.setJsonEntity("{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}"); + Response response = client().performRequest(setNumberOfReplicas); - Map params = new HashMap<>(); - params.put("timeout", "2m"); - params.put("wait_for_status", "green"); - params.put("wait_for_no_relocating_shards", "true"); - params.put("wait_for_events", "languid"); - Map healthRsp = toMap(client().performRequest("GET", "/_cluster/health/" + index, params)); - assertEquals("green", healthRsp.get("status")); - assertFalse((Boolean) healthRsp.get("timed_out")); + ensureGreenLongWait(index); logger.debug("--> index [{}] is green, took [{}] ms", index, (System.currentTimeMillis() - startTime)); - Map recoverRsp = toMap(client().performRequest("GET", "/" + index + "/_recovery")); + Map recoverRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_recovery"))); logger.debug("--> recovery status:\n{}", recoverRsp); Set counts = new HashSet<>(); for (String node : dataNodes(index, client())) { - Map responseBody = toMap(client().performRequest("GET", "/" + index + "/_search", - Collections.singletonMap("preference", "_only_nodes:" + node))); + Request search = new Request("GET", "/" + index + "/_search"); + search.addParameter("preference", "_only_nodes:" + node); + Map responseBody = entityAsMap(client().performRequest(search)); assertNoFailures(responseBody); int hits = (int) XContentMapValues.extractValue("hits.total", responseBody); counts.add(hits); @@ -282,12 +263,13 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); String aliasName = "%23" + index; // %23 == # - client().performRequest("PUT", "/" + index + "/_alias/" + aliasName); - Response response = client().performRequest("HEAD", "/" + index + "/_alias/" + aliasName); + client().performRequest(new Request("PUT", "/" + index + "/_alias/" + aliasName)); + Response response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + aliasName)); assertEquals(200, response.getStatusLine().getStatusCode()); count = randomIntBetween(32, 128); @@ -301,19 +283,20 @@ public class FullClusterRestartIT extends ESRestTestCase { count = countOfIndexedRandomDocuments(); } - logger.error("clusterState=" + toMap(client().performRequest("GET", "/_cluster/state", - Collections.singletonMap("metric", "metadata")))); + Request request = new Request("GET", "/_cluster/state"); + request.addParameter("metric", "metadata"); + logger.error("clusterState=" + entityAsMap(client().performRequest(request))); // We can read from the alias just like we can read from the index. String aliasName = "%23" + index; // %23 == # - Map searchRsp = toMap(client().performRequest("GET", "/" + aliasName + "/_search")); + Map searchRsp = entityAsMap(client().performRequest(new Request("GET", "/" + aliasName + "/_search"))); int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); assertEquals(count, totalHits); if (runningAgainstOldCluster == false) { // We can remove the alias. - Response response = client().performRequest("DELETE", "/" + index + "/_alias/" + aliasName); + Response response = client().performRequest(new Request("DELETE", "/" + index + "/_alias/" + aliasName)); assertEquals(200, response.getStatusLine().getStatusCode()); // and check that it is gone: - response = client().performRequest("HEAD", "/" + index + "/_alias/" + aliasName); + response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + aliasName)); assertEquals(404, response.getStatusLine().getStatusCode()); } } @@ -330,13 +313,14 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/_template/template_1", Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); - client().performRequest("PUT", "/" + index); + Request createTemplate = new Request("PUT", "/_template/template_1"); + createTemplate.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createTemplate); + client().performRequest(new Request("PUT", "/" + index)); } // verifying if we can still read some properties from cluster state api: - Map clusterState = toMap(client().performRequest("GET", "/_cluster/state")); + Map clusterState = entityAsMap(client().performRequest(new Request("GET", "/_cluster/state"))); // Check some global properties: String clusterName = (String) clusterState.get("cluster_name"); @@ -381,8 +365,9 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); numDocs = randomIntBetween(512, 1024); indexRandomDocuments(numDocs, true, true, i -> { @@ -393,23 +378,20 @@ public class FullClusterRestartIT extends ESRestTestCase { ensureGreen(index); // wait for source index to be available on both nodes before starting shrink - String updateSettingsRequestBody = "{\"settings\": {\"index.blocks.write\": true}}"; - Response rsp = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(), - new StringEntity(updateSettingsRequestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings"); + updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}"); + client().performRequest(updateSettingsRequest); - String shrinkIndexRequestBody = "{\"settings\": {\"index.number_of_shards\": 1}}"; - rsp = client().performRequest("PUT", "/" + index + "/_shrink/" + shrunkenIndex, Collections.emptyMap(), - new StringEntity(shrinkIndexRequestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex); + shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}"); + client().performRequest(shrinkIndexRequest); - rsp = client().performRequest("POST", "/_refresh"); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + client().performRequest(new Request("POST", "/_refresh")); } else { numDocs = countOfIndexedRandomDocuments(); } - Map response = toMap(client().performRequest("GET", "/" + index + "/_search")); + Map response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))); assertNoFailures(response); int totalShards = (int) XContentMapValues.extractValue("_shards.total", response); assertThat(totalShards, greaterThan(1)); @@ -418,7 +400,7 @@ public class FullClusterRestartIT extends ESRestTestCase { int totalHits = (int) XContentMapValues.extractValue("hits.total", response); assertEquals(numDocs, totalHits); - response = toMap(client().performRequest("GET", "/" + shrunkenIndex+ "/_search")); + response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex+ "/_search"))); assertNoFailures(response); totalShards = (int) XContentMapValues.extractValue("_shards.total", response); assertEquals(1, totalShards); @@ -448,8 +430,9 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); numDocs = randomIntBetween(512, 1024); indexRandomDocuments(numDocs, true, true, i -> { @@ -460,23 +443,20 @@ public class FullClusterRestartIT extends ESRestTestCase { } else { ensureGreen(index); // wait for source index to be available on both nodes before starting shrink - String updateSettingsRequestBody = "{\"settings\": {\"index.blocks.write\": true}}"; - Response rsp = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(), - new StringEntity(updateSettingsRequestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings"); + updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}"); + client().performRequest(updateSettingsRequest); - String shrinkIndexRequestBody = "{\"settings\": {\"index.number_of_shards\": 1}}"; - rsp = client().performRequest("PUT", "/" + index + "/_shrink/" + shrunkenIndex, Collections.emptyMap(), - new StringEntity(shrinkIndexRequestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex); + shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}"); + client().performRequest(shrinkIndexRequest); numDocs = countOfIndexedRandomDocuments(); } - Response rsp = client().performRequest("POST", "/_refresh"); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + client().performRequest(new Request("POST", "/_refresh")); - Map response = toMap(client().performRequest("GET", "/" + index + "/_search")); + Map response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))); assertNoFailures(response); int totalShards = (int) XContentMapValues.extractValue("_shards.total", response); assertThat(totalShards, greaterThan(1)); @@ -486,7 +466,7 @@ public class FullClusterRestartIT extends ESRestTestCase { assertEquals(numDocs, totalHits); if (runningAgainstOldCluster == false) { - response = toMap(client().performRequest("GET", "/" + shrunkenIndex + "/_search")); + response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex + "/_search"))); assertNoFailures(response); totalShards = (int) XContentMapValues.extractValue("_shards.total", response); assertEquals(1, totalShards); @@ -499,43 +479,48 @@ public class FullClusterRestartIT extends ESRestTestCase { void assertBasicSearchWorks(int count) throws IOException { logger.info("--> testing basic search"); - Map response = toMap(client().performRequest("GET", "/" + index + "/_search")); - assertNoFailures(response); - int numDocs = (int) XContentMapValues.extractValue("hits.total", response); - logger.info("Found {} in old index", numDocs); - assertEquals(count, numDocs); + { + Map response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))); + assertNoFailures(response); + int numDocs = (int) XContentMapValues.extractValue("hits.total", response); + logger.info("Found {} in old index", numDocs); + assertEquals(count, numDocs); + } logger.info("--> testing basic search with sort"); - String searchRequestBody = "{ \"sort\": [{ \"int\" : \"asc\" }]}"; - response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON))); - assertNoFailures(response); - numDocs = (int) XContentMapValues.extractValue("hits.total", response); - assertEquals(count, numDocs); + { + Request searchRequest = new Request("GET", "/" + index + "/_search"); + searchRequest.setJsonEntity("{ \"sort\": [{ \"int\" : \"asc\" }]}"); + Map response = entityAsMap(client().performRequest(searchRequest)); + assertNoFailures(response); + assertTotalHits(count, response); + } logger.info("--> testing exists filter"); - searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"string\"} }}"; - response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON))); - assertNoFailures(response); - numDocs = (int) XContentMapValues.extractValue("hits.total", response); - assertEquals(count, numDocs); + { + Request searchRequest = new Request("GET", "/" + index + "/_search"); + searchRequest.setJsonEntity("{ \"query\": { \"exists\" : {\"field\": \"string\"} }}"); + Map response = entityAsMap(client().performRequest(searchRequest)); + assertNoFailures(response); + assertTotalHits(count, response); + } - searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}"; - response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON))); - assertNoFailures(response); - numDocs = (int) XContentMapValues.extractValue("hits.total", response); - assertEquals(count, numDocs); + logger.info("--> testing field with dots in the name"); + { + Request searchRequest = new Request("GET", "/" + index + "/_search"); + searchRequest.setJsonEntity("{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}"); + Map response = entityAsMap(client().performRequest(searchRequest)); + assertNoFailures(response); + assertTotalHits(count, response); + } } void assertAllSearchWorks(int count) throws IOException { logger.info("--> testing _all search"); - Map searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search")); - assertNoFailures(searchRsp); - int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); - assertEquals(count, totalHits); - Map bestHit = (Map) ((List)(XContentMapValues.extractValue("hits.hits", searchRsp))).get(0); + Map response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))); + assertNoFailures(response); + assertTotalHits(count, response); + Map bestHit = (Map) ((List) (XContentMapValues.extractValue("hits.hits", response))).get(0); // Make sure there are payloads and they are taken into account for the score // the 'string' field has a boost of 4 in the mappings so it should get a payload boost @@ -543,82 +528,77 @@ public class FullClusterRestartIT extends ESRestTestCase { assertNotNull(stringValue); String type = (String) bestHit.get("_type"); String id = (String) bestHit.get("_id"); - String requestBody = "{ \"query\": { \"match_all\" : {} }}"; - String explanation = toStr(client().performRequest("GET", "/" + index + "/" + type + "/" + id, - Collections.emptyMap(), new StringEntity(requestBody, ContentType.APPLICATION_JSON))); + Request explanationRequest = new Request("GET", "/" + index + "/" + type + "/" + id + "/_explain"); + explanationRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}"); + String explanation = toStr(client().performRequest(explanationRequest)); assertFalse("Could not find payload boost in explanation\n" + explanation, explanation.contains("payloadBoost")); // Make sure the query can run on the whole index - searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", - Collections.singletonMap("explain", "true"), new StringEntity(requestBody, ContentType.APPLICATION_JSON))); - assertNoFailures(searchRsp); - totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); - assertEquals(count, totalHits); + Request searchRequest = new Request("GET", "/" + index + "/_search"); + searchRequest.setEntity(explanationRequest.getEntity()); + searchRequest.addParameter("explain", "true"); + Map matchAllResponse = entityAsMap(client().performRequest(searchRequest)); + assertNoFailures(matchAllResponse); + assertTotalHits(count, matchAllResponse); } void assertBasicAggregationWorks() throws IOException { // histogram on a long - String requestBody = "{ \"aggs\": { \"histo\" : {\"histogram\" : {\"field\": \"int\", \"interval\": 10}} }}"; - Map searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON))); - assertNoFailures(searchRsp); - List histoBuckets = (List) XContentMapValues.extractValue("aggregations.histo.buckets", searchRsp); - long totalCount = 0; + Request longHistogramRequest = new Request("GET", "/" + index + "/_search"); + longHistogramRequest.setJsonEntity("{ \"aggs\": { \"histo\" : {\"histogram\" : {\"field\": \"int\", \"interval\": 10}} }}"); + Map longHistogram = entityAsMap(client().performRequest(longHistogramRequest)); + assertNoFailures(longHistogram); + List histoBuckets = (List) XContentMapValues.extractValue("aggregations.histo.buckets", longHistogram); + int histoCount = 0; for (Object entry : histoBuckets) { Map bucket = (Map) entry; - totalCount += (Integer) bucket.get("doc_count"); + histoCount += (Integer) bucket.get("doc_count"); } - int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); - assertEquals(totalHits, totalCount); + assertTotalHits(histoCount, longHistogram); // terms on a boolean - requestBody = "{ \"aggs\": { \"bool_terms\" : {\"terms\" : {\"field\": \"bool\"}} }}"; - searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON))); - List termsBuckets = (List) XContentMapValues.extractValue("aggregations.bool_terms.buckets", searchRsp); - totalCount = 0; + Request boolTermsRequest = new Request("GET", "/" + index + "/_search"); + boolTermsRequest.setJsonEntity("{ \"aggs\": { \"bool_terms\" : {\"terms\" : {\"field\": \"bool\"}} }}"); + Map boolTerms = entityAsMap(client().performRequest(boolTermsRequest)); + List termsBuckets = (List) XContentMapValues.extractValue("aggregations.bool_terms.buckets", boolTerms); + int termsCount = 0; for (Object entry : termsBuckets) { Map bucket = (Map) entry; - totalCount += (Integer) bucket.get("doc_count"); + termsCount += (Integer) bucket.get("doc_count"); } - totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); - assertEquals(totalHits, totalCount); + assertTotalHits(termsCount, boolTerms); } void assertRealtimeGetWorks() throws IOException { - String requestBody = "{ \"index\": { \"refresh_interval\" : -1 }}"; - Response response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, response.getStatusLine().getStatusCode()); + Request disableAutoRefresh = new Request("PUT", "/" + index + "/_settings"); + disableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : -1 }}"); + client().performRequest(disableAutoRefresh); - requestBody = "{ \"query\": { \"match_all\" : {} }}"; - Map searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON))); - Map hit = (Map) ((List)(XContentMapValues.extractValue("hits.hits", searchRsp))).get(0); + Request searchRequest = new Request("GET", "/" + index + "/_search"); + searchRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}"); + Map searchResponse = entityAsMap(client().performRequest(searchRequest)); + Map hit = (Map) ((List)(XContentMapValues.extractValue("hits.hits", searchResponse))).get(0); String docId = (String) hit.get("_id"); - requestBody = "{ \"doc\" : { \"foo\": \"bar\"}}"; - response = client().performRequest("POST", "/" + index + "/doc/" + docId + "/_update", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, response.getStatusLine().getStatusCode()); + Request updateRequest = new Request("POST", "/" + index + "/doc/" + docId + "/_update"); + updateRequest.setJsonEntity("{ \"doc\" : { \"foo\": \"bar\"}}"); + client().performRequest(updateRequest); - Map getRsp = toMap(client().performRequest("GET", "/" + index + "/doc/" + docId)); + Map getRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/doc/" + docId))); Map source = (Map) getRsp.get("_source"); assertTrue("doc does not contain 'foo' key: " + source, source.containsKey("foo")); - requestBody = "{ \"index\": { \"refresh_interval\" : \"1s\" }}"; - response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, response.getStatusLine().getStatusCode()); + Request enableAutoRefresh = new Request("PUT", "/" + index + "/_settings"); + enableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : \"1s\" }}"); + client().performRequest(enableAutoRefresh); } void assertStoredBinaryFields(int count) throws Exception { - String requestBody = "{ \"query\": { \"match_all\" : {} }, \"size\": 100, \"stored_fields\": \"binary\"}"; - Map rsp = toMap(client().performRequest("GET", "/" + index + "/_search", - Collections.emptyMap(), new StringEntity(requestBody, ContentType.APPLICATION_JSON))); + Request request = new Request("GET", "/" + index + "/_search"); + request.setJsonEntity("{ \"query\": { \"match_all\" : {} }, \"size\": 100, \"stored_fields\": \"binary\"}"); + Map rsp = entityAsMap(client().performRequest(request)); - int totalCount = (Integer) XContentMapValues.extractValue("hits.total", rsp); - assertEquals(count, totalCount); + assertTotalHits(count, rsp); List hits = (List) XContentMapValues.extractValue("hits.hits", rsp); assertEquals(100, hits.size()); for (Object hit : hits) { @@ -631,14 +611,6 @@ public class FullClusterRestartIT extends ESRestTestCase { } } - static Map toMap(Response response) throws IOException { - return toMap(EntityUtils.toString(response.getEntity())); - } - - static Map toMap(String response) throws IOException { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); - } - static String toStr(Response response) throws IOException { return EntityUtils.toString(response.getEntity()); } @@ -648,6 +620,11 @@ public class FullClusterRestartIT extends ESRestTestCase { assertEquals(0, failed); } + static void assertTotalHits(int expectedTotalHits, Map response) { + int actualTotalHits = (Integer) XContentMapValues.extractValue("hits.total", response); + assertEquals(expectedTotalHits, actualTotalHits); + } + /** * Tests that a single document survives. Super basic smoke test. */ @@ -656,11 +633,12 @@ public class FullClusterRestartIT extends ESRestTestCase { String doc = "{\"test\": \"test\"}"; if (runningAgainstOldCluster) { - client().performRequest("PUT", docLocation, singletonMap("refresh", "true"), - new StringEntity(doc, ContentType.APPLICATION_JSON)); + Request createDoc = new Request("PUT", docLocation); + createDoc.setJsonEntity(doc); + client().performRequest(createDoc); } - assertThat(toStr(client().performRequest("GET", docLocation)), containsString(doc)); + assertThat(toStr(client().performRequest(new Request("GET", docLocation))), containsString(doc)); } /** @@ -733,16 +711,18 @@ public class FullClusterRestartIT extends ESRestTestCase { } // Count the documents in the index to make sure we have as many as we put there - String countResponse = toStr(client().performRequest("GET", "/" + index + "/_search", singletonMap("size", "0"))); + Request countRequest = new Request("GET", "/" + index + "/_search"); + countRequest.addParameter("size", "0"); + String countResponse = toStr(client().performRequest(countRequest)); assertThat(countResponse, containsString("\"total\":" + count)); if (false == runningAgainstOldCluster) { boolean restoredFromTranslog = false; boolean foundPrimary = false; - Map params = new HashMap<>(); - params.put("h", "index,shard,type,stage,translog_ops_recovered"); - params.put("s", "index,shard,type"); - String recoveryResponse = toStr(client().performRequest("GET", "/_cat/recovery/" + index, params)); + Request recoveryRequest = new Request("GET", "/_cat/recovery/" + index); + recoveryRequest.addParameter("h", "index,shard,type,stage,translog_ops_recovered"); + recoveryRequest.addParameter("s", "index,shard,type"); + String recoveryResponse = toStr(client().performRequest(recoveryRequest)); for (String line : recoveryResponse.split("\n")) { // Find the primaries foundPrimary = true; @@ -768,11 +748,10 @@ public class FullClusterRestartIT extends ESRestTestCase { if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) { int numCurrentVersion = 0; int numBwcVersion = 0; - params.clear(); - params.put("h", "prirep,shard,index,version"); - params.put("s", "prirep,shard,index"); - String segmentsResponse = toStr( - client().performRequest("GET", "/_cat/segments/" + index, params)); + Request segmentsRequest = new Request("GET", "/_cat/segments/" + index); + segmentsRequest.addParameter("h", "prirep,shard,index,version"); + segmentsRequest.addParameter("s", "prirep,shard,index"); + String segmentsResponse = toStr(client().performRequest(segmentsRequest)); for (String line : segmentsResponse.split("\n")) { if (false == line.startsWith("p")) { continue; @@ -817,14 +796,16 @@ public class FullClusterRestartIT extends ESRestTestCase { refresh(); // Count the documents in the index to make sure we have as many as we put there - String countResponse = toStr(client().performRequest("GET", "/" + index + "/_search", singletonMap("size", "0"))); + Request countRequest = new Request("GET", "/" + index + "/_search"); + countRequest.addParameter("size", "0"); + String countResponse = toStr(client().performRequest(countRequest)); assertThat(countResponse, containsString("\"total\":" + count)); // Stick a routing attribute into to cluster settings so we can see it after the restore - HttpEntity routingSetting = new StringEntity( - "{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}", - ContentType.APPLICATION_JSON); - client().performRequest("PUT", "/_cluster/settings", emptyMap(), routingSetting); + Request addRoutingSettings = new Request("PUT", "/_cluster/settings"); + addRoutingSettings.setJsonEntity( + "{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}"); + client().performRequest(addRoutingSettings); // Stick a template into the cluster so we can see it after the restore XContentBuilder templateBuilder = JsonXContent.contentBuilder().startObject(); @@ -857,8 +838,9 @@ public class FullClusterRestartIT extends ESRestTestCase { templateBuilder.endObject(); } templateBuilder.endObject().endObject(); - client().performRequest("PUT", "/_template/test_template", emptyMap(), - new StringEntity(Strings.toString(templateBuilder), ContentType.APPLICATION_JSON)); + Request createTemplateRequest = new Request("PUT", "/_template/test_template"); + createTemplateRequest.setJsonEntity(Strings.toString(templateBuilder)); + client().performRequest(createTemplateRequest); if (runningAgainstOldCluster) { // Create the repo @@ -871,13 +853,15 @@ public class FullClusterRestartIT extends ESRestTestCase { repoConfig.endObject(); } repoConfig.endObject(); - client().performRequest("PUT", "/_snapshot/repo", emptyMap(), - new StringEntity(Strings.toString(repoConfig), ContentType.APPLICATION_JSON)); + Request createRepoRequest = new Request("PUT", "/_snapshot/repo"); + createRepoRequest.setJsonEntity(Strings.toString(repoConfig)); + client().performRequest(createRepoRequest); } - client().performRequest("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"), - singletonMap("wait_for_completion", "true"), - new StringEntity("{\"indices\": \"" + index + "\"}", ContentType.APPLICATION_JSON)); + Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap")); + createSnapshot.addParameter("wait_for_completion", "true"); + createSnapshot.setJsonEntity("{\"indices\": \"" + index + "\"}"); + client().performRequest(createSnapshot); checkSnapshot("old_snap", count, oldClusterVersion); if (false == runningAgainstOldCluster) { @@ -896,10 +880,13 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); } else { - Response response = client().performRequest("GET", index + "/_stats", singletonMap("level", "shards")); + Request statsRequest = new Request("GET", index + "/_stats"); + statsRequest.addParameter("level", "shards"); + Response response = client().performRequest(statsRequest); List shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0"); String globalHistoryUUID = null; for (Object shard : shardStats) { @@ -920,18 +907,20 @@ public class FullClusterRestartIT extends ESRestTestCase { private void checkSnapshot(String snapshotName, int count, Version tookOnVersion) throws IOException { // Check the snapshot metadata, especially the version - String response = toStr(client().performRequest("GET", "/_snapshot/repo/" + snapshotName, listSnapshotVerboseParams())); - Map map = toMap(response); - assertEquals(response, singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", map)); - assertEquals(response, singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", map)); - assertEquals(response, singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", map)); + Request listSnapshotRequest = new Request("GET", "/_snapshot/repo/" + snapshotName); + if (false == (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0))) { + listSnapshotRequest.addParameter("verbose", "true"); + } + Map listSnapshotResponse = entityAsMap(client().performRequest(listSnapshotRequest)); + assertEquals(singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", listSnapshotResponse)); + assertEquals(singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", listSnapshotResponse)); + assertEquals(singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", listSnapshotResponse)); // Remove the routing setting and template so we can test restoring them. - HttpEntity clearRoutingSetting = new StringEntity( - "{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}", - ContentType.APPLICATION_JSON); - client().performRequest("PUT", "/_cluster/settings", emptyMap(), clearRoutingSetting); - client().performRequest("DELETE", "/_template/test_template", emptyMap(), clearRoutingSetting); + Request clearRoutingFromSettings = new Request("PUT", "/_cluster/settings"); + clearRoutingFromSettings.setJsonEntity("{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}"); + client().performRequest(clearRoutingFromSettings); + client().performRequest(new Request("DELETE", "/_template/test_template")); // Restore XContentBuilder restoreCommand = JsonXContent.contentBuilder().startObject(); @@ -940,11 +929,15 @@ public class FullClusterRestartIT extends ESRestTestCase { restoreCommand.field("rename_pattern", index); restoreCommand.field("rename_replacement", "restored_" + index); restoreCommand.endObject(); - client().performRequest("POST", "/_snapshot/repo/" + snapshotName + "/_restore", singletonMap("wait_for_completion", "true"), - new StringEntity(Strings.toString(restoreCommand), ContentType.APPLICATION_JSON)); + Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshotName + "/_restore"); + restoreRequest.addParameter("wait_for_completion", "true"); + restoreRequest.setJsonEntity(Strings.toString(restoreCommand)); + client().performRequest(restoreRequest); // Make sure search finds all documents - String countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0"))); + Request countRequest = new Request("GET", "/restored_" + index + "/_search"); + countRequest.addParameter("size", "0"); + String countResponse = toStr(client().performRequest(countRequest)); assertThat(countResponse, containsString("\"total\":" + count)); // Add some extra documents to the index to be sure we can still write to it after restoring it @@ -954,61 +947,56 @@ public class FullClusterRestartIT extends ESRestTestCase { bulk.append("{\"index\":{\"_id\":\"").append(count + i).append("\"}}\n"); bulk.append("{\"test\":\"test\"}\n"); } - client().performRequest("POST", "/restored_" + index + "/doc/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/doc/_bulk"); + writeToRestoredRequest.addParameter("refresh", "true"); + writeToRestoredRequest.setJsonEntity(bulk.toString()); + client().performRequest(writeToRestoredRequest); // And count to make sure the add worked // Make sure search finds all documents - countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0"))); - assertThat(countResponse, containsString("\"total\":" + (count + extras))); + Request countAfterWriteRequest = new Request("GET", "/restored_" + index + "/_search"); + countAfterWriteRequest.addParameter("size", "0"); + String countAfterWriteResponse = toStr(client().performRequest(countAfterWriteRequest)); + assertThat(countAfterWriteResponse, containsString("\"total\":" + (count + extras))); // Clean up the index for the next iteration - client().performRequest("DELETE", "/restored_*"); + client().performRequest(new Request("DELETE", "/restored_*")); // Check settings added by the restore process - map = toMap(client().performRequest("GET", "/_cluster/settings", singletonMap("flat_settings", "true"))); - Map expected = new HashMap<>(); - expected.put("transient", emptyMap()); - expected.put("persistent", singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString())); - if (expected.equals(map) == false) { + Request clusterSettingsRequest = new Request("GET", "/_cluster/settings"); + clusterSettingsRequest.addParameter("flat_settings", "true"); + Map clusterSettingsResponse = entityAsMap(client().performRequest(clusterSettingsRequest)); + Map expectedClusterSettings = new HashMap<>(); + expectedClusterSettings.put("transient", emptyMap()); + expectedClusterSettings.put("persistent", + singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString())); + if (expectedClusterSettings.equals(clusterSettingsResponse) == false) { NotEqualMessageBuilder builder = new NotEqualMessageBuilder(); - builder.compareMaps(map, expected); + builder.compareMaps(clusterSettingsResponse, expectedClusterSettings); fail("settings don't match:\n" + builder.toString()); } // Check that the template was restored successfully - map = toMap(client().performRequest("GET", "/_template/test_template")); - expected = new HashMap<>(); + Map getTemplateResponse = entityAsMap(client().performRequest(new Request("GET", "/_template/test_template"))); + Map expectedTemplate = new HashMap<>(); if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_0_0_beta1)) { - expected.put("template", "evil_*"); + expectedTemplate.put("template", "evil_*"); } else { - expected.put("index_patterns", singletonList("evil_*")); + expectedTemplate.put("index_patterns", singletonList("evil_*")); } - expected.put("settings", singletonMap("index", singletonMap("number_of_shards", "1"))); - expected.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true)))); - expected.put("order", 0); + expectedTemplate.put("settings", singletonMap("index", singletonMap("number_of_shards", "1"))); + expectedTemplate.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true)))); + expectedTemplate.put("order", 0); Map aliases = new HashMap<>(); aliases.put("alias1", emptyMap()); aliases.put("alias2", singletonMap("filter", singletonMap("term", singletonMap("version", tookOnVersion.toString())))); - expected.put("aliases", aliases); - expected = singletonMap("test_template", expected); - if (false == expected.equals(map)) { + expectedTemplate.put("aliases", aliases); + expectedTemplate = singletonMap("test_template", expectedTemplate); + if (false == expectedTemplate.equals(getTemplateResponse)) { NotEqualMessageBuilder builder = new NotEqualMessageBuilder(); - builder.compareMaps(map, expected); + builder.compareMaps(getTemplateResponse, expectedTemplate); fail("template doesn't match:\n" + builder.toString()); } - - } - - /** - * Parameters required to get the version of Elasticsearch that took the snapshot. - * On versions after 5.5 we need a {@code verbose} parameter. - */ - private Map listSnapshotVerboseParams() { - if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0)) { - return emptyMap(); - } - return singletonMap("verbose", "true"); } // TODO tests for upgrades after shrink. We've had trouble with shrink in the past. @@ -1018,14 +1006,15 @@ public class FullClusterRestartIT extends ESRestTestCase { logger.info("Indexing {} random documents", count); for (int i = 0; i < count; i++) { logger.debug("Indexing document [{}]", i); - client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(), - new StringEntity(Strings.toString(docSupplier.apply(i)), ContentType.APPLICATION_JSON)); + Request createDocument = new Request("POST", "/" + index + "/doc/" + i); + createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i))); + client().performRequest(createDocument); if (rarely()) { refresh(); } if (flushAllowed && rarely()) { logger.debug("Flushing [{}]", index); - client().performRequest("POST", "/" + index + "/_flush"); + client().performRequest(new Request("POST", "/" + index + "/_flush")); } } if (saveInfo) { @@ -1042,13 +1031,16 @@ public class FullClusterRestartIT extends ESRestTestCase { infoDoc.field("value", value); infoDoc.endObject(); // Only create the first version so we know how many documents are created when the index is first created - Map params = singletonMap("op_type", "create"); - client().performRequest("PUT", "/info/doc/" + index + "_" + type, params, - new StringEntity(Strings.toString(infoDoc), ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/info/doc/" + index + "_" + type); + request.addParameter("op_type", "create"); + request.setJsonEntity(Strings.toString(infoDoc)); + client().performRequest(request); } private String loadInfoDocument(String type) throws IOException { - String doc = toStr(client().performRequest("GET", "/info/doc/" + index + "_" + type, singletonMap("filter_path", "_source"))); + Request request = new Request("GET", "/info/doc/" + index + "_" + type); + request.addParameter("filter_path", "_source"); + String doc = toStr(client().performRequest(request)); Matcher m = Pattern.compile("\"value\":\"(.+)\"").matcher(doc); assertTrue(doc, m.find()); return m.group(1); @@ -1060,11 +1052,13 @@ public class FullClusterRestartIT extends ESRestTestCase { private void refresh() throws IOException { logger.debug("Refreshing [{}]", index); - client().performRequest("POST", "/" + index + "/_refresh"); + client().performRequest(new Request("POST", "/" + index + "/_refresh")); } private List dataNodes(String index, RestClient client) throws IOException { - Response response = client.performRequest("GET", index + "/_stats", singletonMap("level", "shards")); + Request request = new Request("GET", index + "/_stats"); + request.addParameter("level", "shards"); + Response response = client.performRequest(request); List nodes = new ArrayList<>(); List shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0"); for (Object shard : shardStats) { @@ -1073,4 +1067,21 @@ public class FullClusterRestartIT extends ESRestTestCase { } return nodes; } + + /** + * Wait for an index to have green health, waiting longer than + * {@link ESRestTestCase#ensureGreen}. + */ + protected void ensureGreenLongWait(String index) throws IOException { + Request request = new Request("GET", "/_cluster/health/" + index); + request.addParameter("timeout", "2m"); + request.addParameter("wait_for_status", "green"); + request.addParameter("wait_for_no_relocating_shards", "true"); + request.addParameter("wait_for_events", "languid"); + request.addParameter("level", "shards"); + Map healthRsp = entityAsMap(client().performRequest(request)); + logger.info("health api response: {}", healthRsp); + assertEquals("green", healthRsp.get("status")); + assertFalse((Boolean) healthRsp.get("timed_out")); + } } From 1390a849e10568b40a5333059aebfe94f0239411 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Sat, 21 Jul 2018 10:07:08 +0200 Subject: [PATCH 129/260] [TEST] improve REST high-level client naming conventions check (#32244) Check the deprecated methods are effectively deprecated. Also compare the class rather than their names when checking argument types. --- .../client/RestHighLevelClientTests.java | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 5acc6f5552f..64a344790ca 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -724,8 +724,8 @@ public class RestHighLevelClientTests extends ESTestCase { assertEquals(0, method.getExceptionTypes().length); assertEquals(3, method.getParameterTypes().length); assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request")); - assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName())); - assertThat(method.getParameterTypes()[2].getName(), equalTo(ActionListener.class.getName())); + assertThat(method.getParameterTypes()[1], equalTo(RequestOptions.class)); + assertThat(method.getParameterTypes()[2], equalTo(ActionListener.class)); } else { //A few methods return a boolean rather than a response object if (apiName.equals("ping") || apiName.contains("exist")) { @@ -738,18 +738,23 @@ public class RestHighLevelClientTests extends ESTestCase { //a few methods don't accept a request object as argument if (apiName.equals("ping") || apiName.equals("info")) { assertEquals(1, method.getParameterTypes().length); - assertThat(method.getParameterTypes()[0].getName(), equalTo(RequestOptions.class.getName())); + assertThat(method.getParameterTypes()[0], equalTo(RequestOptions.class)); } else { assertEquals(apiName, 2, method.getParameterTypes().length); assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request")); - assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName())); + assertThat(method.getParameterTypes()[1], equalTo(RequestOptions.class)); } boolean remove = apiSpec.remove(apiName); - if (remove == false && deprecatedMethods.contains(apiName) == false) { - //TODO xpack api are currently ignored, we need to load xpack yaml spec too - if (apiName.startsWith("xpack.") == false) { - apiNotFound.add(apiName); + if (remove == false) { + if (deprecatedMethods.contains(apiName)) { + assertTrue("method [" + method.getName() + "], api [" + apiName + "] should be deprecated", + method.isAnnotationPresent(Deprecated.class)); + } else { + //TODO xpack api are currently ignored, we need to load xpack yaml spec too + if (apiName.startsWith("xpack.") == false) { + apiNotFound.add(apiName); + } } } } From 0a511cc76f770f910e512cbbe76ab27f966e1a26 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Sat, 21 Jul 2018 12:50:17 +0000 Subject: [PATCH 130/260] Improve message when JAVA_HOME not set (#32022) closes #31399 --- .../main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 9f54ae8b682..5da5912dabe 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -222,7 +222,11 @@ class BuildPlugin implements Plugin { // IntelliJ does not set JAVA_HOME, so we use the JDK that Gradle was run with return Jvm.current().javaHome } else { - throw new GradleException("JAVA_HOME must be set to build Elasticsearch") + throw new GradleException( + "JAVA_HOME must be set to build Elasticsearch. " + + "Note that if the variable was just set you might have to run `./gradlew --stop` for " + + "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details." + ) } } return javaHome From 8f1d15ce61bd9b2c7f4025eaff8d1848cc282a9c Mon Sep 17 00:00:00 2001 From: Nicolas Ruflin Date: Mon, 23 Jul 2018 07:38:19 +0200 Subject: [PATCH 131/260] Add new fields to monitoring template for Beats state (#32085) New data is reported from Beats to the monitoring endpoint. This PR adds the template change necessary for it. See https://github.com/elastic/beats/issues/7521 for more details. Queue data is skipped for now as implementation is not finished yet. --- .../src/main/resources/monitoring-beats.json | 63 +++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/x-pack/plugin/core/src/main/resources/monitoring-beats.json b/x-pack/plugin/core/src/main/resources/monitoring-beats.json index ed027387a49..07756ba2602 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-beats.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-beats.json @@ -37,6 +37,49 @@ }, "state": { "properties": { + "beat": { + "properties": { + "name": { + "type": "keyword" + } + } + }, + "host": { + "properties": { + "architecture": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "os": { + "properties": { + "build": { + "type": "keyword" + }, + "family": { + "type": "keyword" + }, + "platform": { + "type": "keyword" + }, + "version": { + "type": "keyword" + } + } + } + } + }, + "input": { + "properties": { + "count": { + "type": "long" + }, + "names": { + "type": "keyword" + } + } + }, "module": { "properties": { "count": { @@ -46,6 +89,26 @@ "type": "keyword" } } + }, + "output": { + "properties": { + "name": { + "type": "keyword" + } + } + }, + "service": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "version": { + "type": "keyword" + } + } } } }, From 4b3284f7cbe679b94f10178468566fb0835d8a73 Mon Sep 17 00:00:00 2001 From: itsnotv Date: Mon, 23 Jul 2018 01:09:52 -0700 Subject: [PATCH 132/260] CCE when re-throwing "shard not available" exception in TransportShardMultiGetAction (#32185) ClassCastException can be thrown by callers of TransportActions.isShardNotAvailableException(e) as e is not always an instance of ElasticSearchException fixes #32173 --- .../action/get/TransportShardMultiGetAction.java | 5 ++--- .../TransportShardMultiTermsVectorAction.java | 11 +++++------ 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index d15b7b92d62..e0a6cd82786 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.get; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -90,9 +89,9 @@ public class TransportShardMultiGetAction extends TransportSingleShardAction new ParameterizedMessage("{} failed to execute multi_get for [{}]/[{}]", shardId, item.type(), item.id()), e); diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java index b83ac3881fd..f1641fdd25c 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.termvectors; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -84,13 +83,13 @@ public class TransportShardMultiTermsVectorAction extends TransportSingleShardAc try { TermVectorsResponse termVectorsResponse = TermVectorsService.getTermVectors(indexShard, termVectorsRequest); response.add(request.locations.get(i), termVectorsResponse); - } catch (Exception t) { - if (TransportActions.isShardNotAvailableException(t)) { - throw (ElasticsearchException) t; + } catch (RuntimeException e) { + if (TransportActions.isShardNotAvailableException(e)) { + throw e; } else { - logger.debug(() -> new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), t); + logger.debug(() -> new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), e); response.add(request.locations.get(i), - new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), t)); + new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), e)); } } } From ff87b7aba42926dea95f65933a7625636d718c87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 23 Jul 2018 11:31:04 +0200 Subject: [PATCH 133/260] Remove unnecessary warning supressions (#32250) --- .../client/PreBuiltTransportClient.java | 1 + .../nio/ChannelFactoryTests.java | 1 - .../elasticsearch/nio/EventHandlerTests.java | 1 - .../nio/SocketChannelContextTests.java | 1 - .../matrix/stats/MultiPassStats.java | 1 - .../ingest/common/JsonProcessorTests.java | 1 - .../painless/InitializerTests.java | 6 ++--- .../reindex/RestUpdateByQueryAction.java | 1 - .../netty4/SimpleNetty4TransportTests.java | 1 - .../http/nio/HttpReadWriteHandlerTests.java | 2 +- .../nio/SimpleNioTransportTests.java | 1 - .../settings/ClusterGetSettingsResponse.java | 1 - .../validate/query/QueryExplanation.java | 1 - .../elasticsearch/action/get/GetResponse.java | 1 - .../ingest/SimulateProcessorResult.java | 4 +-- .../ingest/WriteableIngestDocument.java | 1 - .../action/support/ActiveShardCount.java | 3 ++- .../ReplicationRequestBuilder.java | 1 - .../cluster/RestoreInProgress.java | 5 ++-- .../cluster/metadata/IndexGraveyard.java | 5 ++-- .../cluster/metadata/IndexMetaData.java | 2 +- .../metadata/IndexTemplateMetaData.java | 1 - .../cluster/metadata/MetaData.java | 4 +-- .../cluster/routing/RecoverySource.java | 2 +- .../AbstractAllocationDecision.java | 2 +- .../AllocateUnassignedDecision.java | 2 +- .../routing/allocation/MoveDecision.java | 2 +- .../component/AbstractLifecycleComponent.java | 2 -- .../inject/ConstructorInjectorStore.java | 1 - .../inject/TypeConverterBindingProcessor.java | 2 -- .../assistedinject/AssistedConstructor.java | 1 - .../inject/internal/ProviderMethod.java | 2 +- .../inject/multibindings/Multibinder.java | 1 - .../spi/DefaultBindingTargetVisitor.java | 3 +-- .../common/io/stream/StreamInput.java | 1 - .../elasticsearch/index/get/GetResult.java | 1 - .../index/mapper/DocumentMapper.java | 1 - .../NodePersistentTasksExecutor.java | 1 - .../PersistentTasksClusterService.java | 1 - .../PersistentTasksExecutorRegistry.java | 1 - .../elasticsearch/repositories/IndexId.java | 2 +- .../repositories/RepositoryData.java | 2 +- .../bucket/composite/InternalComposite.java | 1 - .../bucket/range/RangeAggregator.java | 2 -- .../MovFnPipelineAggregationBuilder.java | 1 - .../support/ValuesSourceConfig.java | 1 - .../search/sort/GeoDistanceSortBuilder.java | 1 - .../org/elasticsearch/snapshots/Snapshot.java | 2 +- .../elasticsearch/snapshots/SnapshotId.java | 3 +-- .../elasticsearch/threadpool/ThreadPool.java | 6 +++-- .../transport/TransportResponseHandler.java | 1 - .../indices/stats/IndicesStatsTests.java | 1 - .../cluster/metadata/MetaDataTests.java | 1 - .../blobstore/BlobStoreRepositoryTests.java | 4 +-- .../aggregations/bucket/DoubleTermsIT.java | 3 +-- .../aggregations/bucket/LongTermsIT.java | 1 - .../aggregations/bucket/MinDocCountIT.java | 2 +- .../search/aggregations/bucket/RangeIT.java | 1 - .../composite/CompositeAggregatorTests.java | 1 - .../bucket/terms/StringTermsIT.java | 4 --- .../CumulativeSumAggregatorTests.java | 2 -- .../pipeline/movfn/MovFnUnitTests.java | 1 - .../search/fields/SearchFieldsIT.java | 3 --- .../search/functionscore/FunctionScoreIT.java | 4 +-- .../functionscore/RandomScoreFunctionIT.java | 2 -- .../search/sort/FieldSortIT.java | 3 --- .../search/sort/SimpleSortIT.java | 4 +-- .../ESBlobStoreRepositoryIntegTestCase.java | 2 +- .../org/elasticsearch/test/ESTestCase.java | 3 +-- .../hamcrest/ElasticsearchAssertions.java | 3 ++- .../yaml/section/GreaterThanAssertion.java | 1 - .../transport/MockTcpTransportTests.java | 2 +- .../nio/SimpleMockNioTransportTests.java | 1 - .../core/security/client/SecurityClient.java | 3 --- .../upgrade/actions/IndexUpgradeAction.java | 1 - .../integration/MlRestTestStateCleaner.java | 1 - .../ml/job/results/AnomalyRecordTests.java | 3 +-- .../rollup/RollupRestTestStateCleaner.java | 1 - .../ml/job/persistence/JobProviderTests.java | 26 +++++++++---------- .../persistence/JobResultsPersisterTests.java | 2 +- .../normalizer/ScoresUpdaterTests.java | 1 - .../monitoring/integration/MonitoringIT.java | 8 ++---- .../security/authc/AuthenticationService.java | 1 - .../action/RestAuthenticateActionTests.java | 1 - .../nio/SimpleSecurityNioTransportTests.java | 8 +++--- .../xpack/test/rest/XPackRestIT.java | 2 +- .../ActiveDirectorySessionFactoryTests.java | 2 +- 87 files changed, 64 insertions(+), 139 deletions(-) diff --git a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java index 7f9bcc6ea08..7bde7fbc06f 100644 --- a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java +++ b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java @@ -21,6 +21,7 @@ package org.elasticsearch.transport.client; import io.netty.util.ThreadDeathWatcher; import io.netty.util.concurrent.GlobalEventExecutor; + import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.NetworkModule; diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java index 8ff0cfcd0c8..af4eabefd94 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java @@ -137,7 +137,6 @@ public class ChannelFactoryTests extends ESTestCase { super(rawChannelFactory); } - @SuppressWarnings("unchecked") @Override public NioSocketChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { NioSocketChannel nioSocketChannel = new NioSocketChannel(channel); diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java index 0cc3aa04800..6e1e34ec1f5 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java @@ -120,7 +120,6 @@ public class EventHandlerTests extends ESTestCase { verify(channelFactory, times(2)).acceptNioChannel(same(serverContext), same(selectorSupplier)); } - @SuppressWarnings("unchecked") public void testHandleAcceptCallsServerAcceptCallback() throws IOException { NioSocketChannel childChannel = new NioSocketChannel(mock(SocketChannel.class)); SocketChannelContext childContext = mock(SocketChannelContext.class); diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java index bc9a7c33f0f..9dbf483107b 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java @@ -275,7 +275,6 @@ public class SocketChannelContextTests extends ESTestCase { } } - @SuppressWarnings("unchecked") public void testCloseClosesChannelBuffer() throws IOException { try (SocketChannel realChannel = SocketChannel.open()) { when(channel.getRawChannel()).thenReturn(realChannel); diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java index 70e2172ce92..b5a348f45eb 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java @@ -43,7 +43,6 @@ class MultiPassStats { this.fieldBKey = fieldBName; } - @SuppressWarnings("unchecked") void computeStats(final List fieldA, final List fieldB) { // set count count = fieldA.size(); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index 2867ed1d240..099e8e1866b 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -146,7 +146,6 @@ public class JsonProcessorTests extends ESTestCase { assertThat(exception.getMessage(), equalTo("field [field] not present as part of path [field]")); } - @SuppressWarnings("unchecked") public void testAddToRoot() throws Exception { String processorTag = randomAlphaOfLength(3); String randomTargetField = randomAlphaOfLength(2); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java index 5d881632dee..d0d0b2165ca 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java @@ -26,7 +26,7 @@ import java.util.Map; public class InitializerTests extends ScriptTestCase { - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"rawtypes"}) public void testArrayInitializers() { int[] ints = (int[])exec("new int[] {}"); @@ -59,7 +59,7 @@ public class InitializerTests extends ScriptTestCase { assertEquals("aaaaaa", objects[3]); } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"rawtypes"}) public void testListInitializers() { List list = (List)exec("[]"); @@ -91,7 +91,7 @@ public class InitializerTests extends ScriptTestCase { assertEquals("aaaaaa", list.get(3)); } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"rawtypes"}) public void testMapInitializers() { Map map = (Map)exec("[:]"); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 8f09afbb17c..bf0adc6e142 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -57,7 +57,6 @@ public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler PARSER = new ConstructingObjectParser<>( "cluster_get_settings_response", diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java index e330a0b8565..d0a62fe771d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java @@ -45,7 +45,6 @@ public class QueryExplanation implements Streamable, ToXContentFragment { public static final int RANDOM_SHARD = -1; - @SuppressWarnings("unchecked") static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "query_explanation", true, diff --git a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java index 9ee59cf70d0..455aab7f6e3 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java @@ -129,7 +129,6 @@ public class GetResponse extends ActionResponse implements Iterable getSourceAsMap() throws ElasticsearchParseException { return getResult.sourceAsMap(); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java index 101ce7ec260..3f41aaddfb7 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java @@ -32,8 +32,8 @@ import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; public class SimulateProcessorResult implements Writeable, ToXContentObject { @@ -42,7 +42,6 @@ public class SimulateProcessorResult implements Writeable, ToXContentObject { private final WriteableIngestDocument ingestDocument; private final Exception failure; - @SuppressWarnings("unchecked") private static final ConstructingObjectParser IGNORED_ERROR_PARSER = new ConstructingObjectParser<>( "ignored_error_parser", @@ -57,7 +56,6 @@ public class SimulateProcessorResult implements Writeable, ToXContentObject { ); } - @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "simulate_processor_result", diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 2430868bb59..6331097024c 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -94,7 +94,6 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment { ); } - @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "writeable_ingest_document", diff --git a/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java b/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java index cdd895ff8cd..8598ab3e4be 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java +++ b/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.support; import com.carrotsearch.hppc.cursors.IntObjectCursor; + import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -205,7 +206,7 @@ public final class ActiveShardCount implements Writeable { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") ActiveShardCount that = (ActiveShardCount) o; + ActiveShardCount that = (ActiveShardCount) o; return value == that.value; } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java index 9dc7a899d03..7b137fb418c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java @@ -72,7 +72,6 @@ public abstract class ReplicationRequestBuilder implements if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") Entry entry = (Entry) o; + Entry entry = (Entry) o; return snapshot.equals(entry.snapshot) && state == entry.state && indices.equals(entry.indices) && @@ -291,7 +292,7 @@ public class RestoreInProgress extends AbstractNamedDiffable implements return false; } - @SuppressWarnings("unchecked") ShardRestoreStatus status = (ShardRestoreStatus) o; + ShardRestoreStatus status = (ShardRestoreStatus) o; return state == status.state && Objects.equals(nodeId, status.nodeId) && Objects.equals(reason, status.reason); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index 74789aada3a..3bb9d42a578 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -161,7 +161,6 @@ public final class IndexGraveyard implements MetaData.Custom { } @Override - @SuppressWarnings("unchecked") public Diff diff(final MetaData.Custom previous) { return new IndexGraveyardDiff((IndexGraveyard) previous, this); } @@ -321,7 +320,7 @@ public final class IndexGraveyard implements MetaData.Custom { @Override public IndexGraveyard apply(final MetaData.Custom previous) { - @SuppressWarnings("unchecked") final IndexGraveyard old = (IndexGraveyard) previous; + final IndexGraveyard old = (IndexGraveyard) previous; if (removedCount > old.tombstones.size()) { throw new IllegalStateException("IndexGraveyardDiff cannot remove [" + removedCount + "] entries from [" + old.tombstones.size() + "] tombstones."); @@ -416,7 +415,7 @@ public final class IndexGraveyard implements MetaData.Custom { if (other == null || getClass() != other.getClass()) { return false; } - @SuppressWarnings("unchecked") Tombstone that = (Tombstone) other; + Tombstone that = (Tombstone) other; return index.equals(that.index) && deleteDateInMillis == that.deleteDateInMillis; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 90380205012..18b89db72a3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.LongArrayList; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.RolloverInfo; import org.elasticsearch.action.support.ActiveShardCount; @@ -685,7 +686,6 @@ public class IndexMetaData implements Diffable, ToXContentFragmen return lookupPrototypeSafe(key).readFrom(in); } - @SuppressWarnings("unchecked") @Override public Diff readDiff(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readDiffFrom(in); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index ae58d2885bb..d35a4baa1e6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -381,7 +381,6 @@ public class IndexTemplateMetaData extends AbstractDiffable, Diffable, To private final SortedMap aliasAndIndexLookup; - @SuppressWarnings("unchecked") MetaData(String clusterUUID, long version, Settings transientSettings, Settings persistentSettings, ImmutableOpenMap indices, ImmutableOpenMap templates, ImmutableOpenMap customs, String[] allIndices, String[] allOpenIndices, String[] allClosedIndices, @@ -1000,7 +1000,7 @@ public class MetaData implements Iterable, Diffable, To } public IndexGraveyard indexGraveyard() { - @SuppressWarnings("unchecked") IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE); + IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE); return graveyard; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java index ff7aab4a256..13cb85ea399 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java @@ -217,7 +217,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject { return false; } - @SuppressWarnings("unchecked") SnapshotRecoverySource that = (SnapshotRecoverySource) o; + SnapshotRecoverySource that = (SnapshotRecoverySource) o; return snapshot.equals(that.snapshot) && index.equals(that.index) && version.equals(that.version); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java index 850e8c9c142..7ce971958c9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java @@ -175,7 +175,7 @@ public abstract class AbstractAllocationDecision implements ToXContentFragment, if (other == null || other instanceof AbstractAllocationDecision == false) { return false; } - @SuppressWarnings("unchecked") AbstractAllocationDecision that = (AbstractAllocationDecision) other; + AbstractAllocationDecision that = (AbstractAllocationDecision) other; return Objects.equals(targetNode, that.targetNode) && Objects.equals(nodeDecisions, that.nodeDecisions); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java index fc2d81b38c4..c32d3e1518d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java @@ -316,7 +316,7 @@ public class AllocateUnassignedDecision extends AbstractAllocationDecision { if (other instanceof AllocateUnassignedDecision == false) { return false; } - @SuppressWarnings("unchecked") AllocateUnassignedDecision that = (AllocateUnassignedDecision) other; + AllocateUnassignedDecision that = (AllocateUnassignedDecision) other; return Objects.equals(allocationStatus, that.allocationStatus) && Objects.equals(allocationId, that.allocationId) && reuseStore == that.reuseStore diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java index de9795ff4c2..9439187d739 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java @@ -300,7 +300,7 @@ public final class MoveDecision extends AbstractAllocationDecision { if (other instanceof MoveDecision == false) { return false; } - @SuppressWarnings("unchecked") MoveDecision that = (MoveDecision) other; + MoveDecision that = (MoveDecision) other; return Objects.equals(allocationDecision, that.allocationDecision) && Objects.equals(canRemainDecision, that.canRemainDecision) && Objects.equals(clusterRebalanceDecision, that.clusterRebalanceDecision) diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java index de14e0cd53d..3c4b35d5c34 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java @@ -54,7 +54,6 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple listeners.remove(listener); } - @SuppressWarnings({"unchecked"}) @Override public void start() { if (!lifecycle.canMoveToStarted()) { @@ -72,7 +71,6 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple protected abstract void doStart(); - @SuppressWarnings({"unchecked"}) @Override public void stop() { if (!lifecycle.canMoveToStopped()) { diff --git a/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java b/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java index ce63da62d8d..dfc216028c1 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java +++ b/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java @@ -32,7 +32,6 @@ class ConstructorInjectorStore { private final FailableCache, ConstructorInjector> cache = new FailableCache, ConstructorInjector>() { @Override - @SuppressWarnings("unchecked") protected ConstructorInjector create(TypeLiteral type, Errors errors) throws ErrorsException { return createConstructor(type, errors); diff --git a/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java index e42082817c1..e2963864085 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java @@ -101,7 +101,6 @@ class TypeConverterBindingProcessor extends AbstractProcessor { }, new TypeConverter() { @Override - @SuppressWarnings("unchecked") public Object convert(String value, TypeLiteral toType) { try { return Class.forName(value); @@ -128,7 +127,6 @@ class TypeConverterBindingProcessor extends AbstractProcessor { TypeConverter typeConverter = new TypeConverter() { @Override - @SuppressWarnings("unchecked") public Object convert(String value, TypeLiteral toType) { try { return parser.invoke(null, value); diff --git a/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java b/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java index cb434a90369..d676b19dddb 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java @@ -42,7 +42,6 @@ class AssistedConstructor { private final ParameterListKey assistedParameters; private final List allParameters; - @SuppressWarnings("unchecked") AssistedConstructor(Constructor constructor, List> parameterTypes) { this.constructor = constructor; diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java b/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java index 0cfafc4a30a..349935ac7c4 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java @@ -97,7 +97,7 @@ public class ProviderMethod implements ProviderWithDependencies { try { // We know this cast is safe because T is the method's return type. - @SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"}) + @SuppressWarnings({"unchecked"}) T result = (T) method.invoke(instance, parameters); return result; } catch (IllegalAccessException e) { diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java index 5bc1595be5f..5447f2ca399 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java @@ -220,7 +220,6 @@ public abstract class Multibinder { } @Override - @SuppressWarnings("unchecked") public void configure(Binder binder) { checkConfiguration(!isInitialized(), "Multibinder was already initialized"); diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java b/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java index 75a3b615a10..0e4f7a80131 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java @@ -78,8 +78,7 @@ public abstract class DefaultBindingTargetVisitor implements BindingTarget // javac says it's an error to cast ProviderBinding to Binding @Override - @SuppressWarnings("unchecked") public V visit(ProviderBinding providerBinding) { - return visitOther((Binding) providerBinding); + return visitOther(providerBinding); } } diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index b11aa9d4a96..d7879b0d928 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -518,7 +518,6 @@ public abstract class StreamInput extends InputStream { return (Map) readGenericValue(); } - @SuppressWarnings({"unchecked"}) @Nullable public Object readGenericValue() throws IOException { byte type = readByte(); diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index 021e97767d8..a3f83609037 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -178,7 +178,6 @@ public class GetResult implements Streamable, Iterable, ToXConten /** * The source of the document (As a map). */ - @SuppressWarnings({"unchecked"}) public Map sourceAsMap() throws ElasticsearchParseException { if (source == null) { return null; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 87942260742..a0640ac68a9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -197,7 +197,6 @@ public class DocumentMapper implements ToXContentFragment { return mapping.root; } - @SuppressWarnings({"unchecked"}) public T metadataMapper(Class type) { return mapping.metadataMapper(type); } diff --git a/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java b/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java index 87ea08dc74d..59523f33901 100644 --- a/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java +++ b/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java @@ -45,7 +45,6 @@ public class NodePersistentTasksExecutor { task.markAsFailed(e); } - @SuppressWarnings("unchecked") @Override protected void doRun() throws Exception { try { diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 9ed0af010b5..4cb8c722f26 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -85,7 +85,6 @@ public class PersistentTasksClusterService extends AbstractComponent implements listener.onFailure(e); } - @SuppressWarnings("unchecked") @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { PersistentTasksCustomMetaData tasks = newState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java index 2ac57e074b7..a8f9c73ab32 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java @@ -33,7 +33,6 @@ public class PersistentTasksExecutorRegistry extends AbstractComponent { private final Map> taskExecutors; - @SuppressWarnings("unchecked") public PersistentTasksExecutorRegistry(Settings settings, Collection> taskExecutors) { super(settings); Map> map = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/repositories/IndexId.java b/server/src/main/java/org/elasticsearch/repositories/IndexId.java index 469caa26b64..2a3d9f15d16 100644 --- a/server/src/main/java/org/elasticsearch/repositories/IndexId.java +++ b/server/src/main/java/org/elasticsearch/repositories/IndexId.java @@ -89,7 +89,7 @@ public final class IndexId implements Writeable, ToXContentObject { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") IndexId that = (IndexId) o; + IndexId that = (IndexId) o; return Objects.equals(name, that.name) && Objects.equals(id, that.id); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 7a8d8327d5e..a97cf4bb419 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -238,7 +238,7 @@ public final class RepositoryData { if (obj == null || getClass() != obj.getClass()) { return false; } - @SuppressWarnings("unchecked") RepositoryData that = (RepositoryData) obj; + RepositoryData that = (RepositoryData) obj; return snapshotIds.equals(that.snapshotIds) && snapshotStates.equals(that.snapshotStates) && indices.equals(that.indices) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 1428a31a8de..e93266db805 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -247,7 +247,6 @@ public class InternalComposite this.formats = formats; } - @SuppressWarnings("unchecked") InternalBucket(StreamInput in, List sourceNames, List formats, int[] reverseMuls) throws IOException { this.key = new CompositeKey(in); this.docCount = in.readVLong(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index d998beedf14..c490b344bdb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -358,7 +357,6 @@ public class RangeAggregator extends BucketsAggregator { private final InternalRange.Factory factory; private final DocValueFormat format; - @SuppressWarnings("unchecked") public Unmapped(String name, R[] ranges, boolean keyed, DocValueFormat format, SearchContext context, Aggregator parent, InternalRange.Factory factory, List pipelineAggregators, Map metaData) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java index d49da4658ae..185e1c63b98 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java @@ -62,7 +62,6 @@ public class MovFnPipelineAggregationBuilder extends AbstractPipelineAggregation private static final Function> PARSER = name -> { - @SuppressWarnings("unchecked") ConstructingObjectParser parser = new ConstructingObjectParser<>( MovFnPipelineAggregationBuilder.NAME, false, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java index b33ba879660..0e354e14a37 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java @@ -54,7 +54,6 @@ public class ValuesSourceConfig { if (field == null) { if (script == null) { - @SuppressWarnings("unchecked") ValuesSourceConfig config = new ValuesSourceConfig<>(ValuesSourceType.ANY); config.format(resolveFormat(null, valueType)); return config; diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 30f1dfb14fc..6adad6dabf0 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -90,7 +90,6 @@ public class GeoDistanceSortBuilder extends SortBuilder private DistanceUnit unit = DistanceUnit.DEFAULT; private SortMode sortMode = null; - @SuppressWarnings("rawtypes") private QueryBuilder nestedFilter; private String nestedPath; diff --git a/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java b/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java index 314cd4053dd..2847af386b2 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java +++ b/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java @@ -80,7 +80,7 @@ public final class Snapshot implements Writeable { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") Snapshot that = (Snapshot) o; + Snapshot that = (Snapshot) o; return repository.equals(that.repository) && snapshotId.equals(that.snapshotId); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java index b80dfd94d75..7a8848618c2 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java @@ -22,7 +22,6 @@ package org.elasticsearch.snapshots; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -98,7 +97,7 @@ public final class SnapshotId implements Comparable, Writeable, ToXC if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") final SnapshotId that = (SnapshotId) o; + final SnapshotId that = (SnapshotId) o; return name.equals(that.name) && uuid.equals(that.uuid); } diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 51a4adec8d1..2d3be2435b4 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -21,7 +21,6 @@ package org.elasticsearch.threadpool; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.Counter; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; @@ -38,6 +37,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.XRejectedExecutionHandler; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.node.Node; import java.io.Closeable; @@ -197,7 +197,7 @@ public class ThreadPool extends AbstractComponent implements Scheduler, Closeabl threadContext = new ThreadContext(settings); final Map executors = new HashMap<>(); - for (@SuppressWarnings("unchecked") final Map.Entry entry : builders.entrySet()) { + for (final Map.Entry entry : builders.entrySet()) { final ExecutorBuilder.ExecutorSettings executorSettings = entry.getValue().getSettings(settings); final ExecutorHolder executorHolder = entry.getValue().build(executorSettings, threadContext); if (executors.containsKey(executorHolder.info.getName())) { @@ -338,6 +338,7 @@ public class ThreadPool extends AbstractComponent implements Scheduler, Closeabl * the ScheduledFuture will cannot interact with it. * @throws org.elasticsearch.common.util.concurrent.EsRejectedExecutionException if the task cannot be scheduled for execution */ + @Override public ScheduledFuture schedule(TimeValue delay, String executor, Runnable command) { if (!Names.SAME.equals(executor)) { command = new ThreadedRunnable(command, executor(executor)); @@ -358,6 +359,7 @@ public class ThreadPool extends AbstractComponent implements Scheduler, Closeabl command, executor), e)); } + @Override public Runnable preserveContext(Runnable command) { return getThreadContext().preserveContext(command); } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java b/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java index 447bbd92dd2..fbe477ad04b 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java @@ -40,7 +40,6 @@ public interface TransportResponseHandler extends W * * @return the deserialized response. */ - @SuppressWarnings("deprecation") @Override default T read(StreamInput in) throws IOException { T instance = newInstance(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 264a92137be..f53eb63bc10 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -153,7 +153,6 @@ public class IndicesStatsTests extends ESSingleNodeTestCase { assertEquals(0, common.refresh.getListeners()); } - @SuppressWarnings("unchecked") public void testUuidOnRootStatsIndices() { String uuid = createIndex("test").indexUUID(); IndicesStatsResponse rsp = client().admin().indices().prepareStats().get(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 0b9055cb758..9d82e9e1cdc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -672,7 +672,6 @@ public class MetaDataTests extends ESTestCase { public static void assertLeafs(Map properties, String... fields) { for (String field : fields) { assertTrue(properties.containsKey(field)); - @SuppressWarnings("unchecked") Map fieldProp = (Map)properties.get(field); assertNotNull(fieldProp); assertFalse(fieldProp.containsKey("properties")); diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index 1abdb97f174..0137f136d3e 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -123,7 +123,7 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { logger.info("--> make sure the node's repository can resolve the snapshots"); final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - @SuppressWarnings("unchecked") final BlobStoreRepository repository = + final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repositoryName); final List originalSnapshots = Arrays.asList(snapshotId1, snapshotId2); @@ -245,7 +245,7 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - @SuppressWarnings("unchecked") final BlobStoreRepository repository = + final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repositoryName); assertThat("getBlobContainer has to be lazy initialized", repository.getBlobContainer(), nullValue()); return repository; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index d216709791e..2876fbbaa25 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -32,9 +32,9 @@ import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; @@ -84,7 +84,6 @@ public class DoubleTermsIT extends AbstractTermsTestCase { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java index bce4006fa10..e7e64027274 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java @@ -81,7 +81,6 @@ public class LongTermsIT extends AbstractTermsTestCase { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java index 4a85c2c1453..eeb6e121613 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -77,7 +78,6 @@ public class MinDocCountIT extends AbstractTermsTestCase { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index edc29b0d2c5..99aeac167e0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -75,7 +75,6 @@ public class RangeIT extends ESIntegTestCase { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index 0ed1dacb73f..b0263cb2dbd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -1746,7 +1746,6 @@ public class CompositeAggregatorTests extends AggregatorTestCase { } } - @SuppressWarnings("unchecked") private static Map createAfterKey(Object... fields) { assert fields.length % 2 == 0; final Map map = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index 3b7e686ef4d..160e51a67b2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -36,9 +36,6 @@ import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; -import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; -import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.stats.Stats; @@ -103,7 +100,6 @@ public class StringTermsIT extends AbstractTermsTestCase { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index fa46921a941..f5dc01f1914 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -262,7 +262,6 @@ public class CumulativeSumAggregatorTests extends AggregatorTestCase { }); } - @SuppressWarnings("unchecked") private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer verify) throws IOException { executeTestCase(query, aggBuilder, verify, indexWriter -> { Document document = new Document(); @@ -282,7 +281,6 @@ public class CumulativeSumAggregatorTests extends AggregatorTestCase { }); } - @SuppressWarnings("unchecked") private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer verify, CheckedConsumer setup) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java index 88bbe3671b2..db3f2d745e1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java @@ -98,7 +98,6 @@ public class MovFnUnitTests extends AggregatorTestCase { } - @SuppressWarnings("unchecked") private void executeTestCase(Query query, DateHistogramAggregationBuilder aggBuilder, Consumer verify, diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 2126e0e94eb..31fa4f838df 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -96,7 +96,6 @@ public class SearchFieldsIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); @@ -143,7 +142,6 @@ public class SearchFieldsIT extends ESIntegTestCase { return scripts; } - @SuppressWarnings("unchecked") static Object fieldsScript(Map vars, String fieldName) { Map fields = (Map) vars.get("_fields"); FieldLookup fieldLookup = (FieldLookup) fields.get(fieldName); @@ -156,7 +154,6 @@ public class SearchFieldsIT extends ESIntegTestCase { return XContentMapValues.extractValue(path, source); } - @SuppressWarnings("unchecked") static Object docScript(Map vars, String fieldName) { Map doc = (Map) vars.get("doc"); ScriptDocValues values = (ScriptDocValues) doc.get(fieldName); diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index 12e48a3ae4f..fc11554dfb3 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.ESIntegTestCase; @@ -48,8 +49,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; - -import org.elasticsearch.script.ScriptType; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -70,7 +69,6 @@ public class FunctionScoreIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); scripts.put("1", vars -> 1.0d); diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 257089c9054..8203dac1a2d 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -67,7 +67,6 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); @@ -84,7 +83,6 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { return scripts; } - @SuppressWarnings("unchecked") static Double scoringScript(Map vars, Function scoring) { Map doc = (Map) vars.get("doc"); Double index = ((Number) ((ScriptDocValues) doc.get("index")).getValues().get(0)).doubleValue(); diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index ff0196aacdf..40d6b26b4f9 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -86,7 +86,6 @@ import static org.hamcrest.Matchers.nullValue; public class FieldSortIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); scripts.put("doc['number'].value", vars -> sortDoubleScript(vars)); @@ -94,14 +93,12 @@ public class FieldSortIT extends ESIntegTestCase { return scripts; } - @SuppressWarnings("unchecked") static Double sortDoubleScript(Map vars) { Map doc = (Map) vars.get("doc"); Double index = ((Number) ((ScriptDocValues) doc.get("number")).getValues().get(0)).doubleValue(); return index; } - @SuppressWarnings("unchecked") static String sortStringScript(Map vars) { Map doc = (Map) vars.get("doc"); String value = ((String) ((ScriptDocValues) doc.get("keyword")).getValues().get(0)); diff --git a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java index aa49bed6975..6668c1be0e4 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; import org.elasticsearch.test.ESIntegTestCase; @@ -50,8 +51,6 @@ import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; - -import org.elasticsearch.script.ScriptType; import static org.elasticsearch.search.sort.SortBuilders.scriptSort; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -104,7 +103,6 @@ public class SimpleSortIT extends ESIntegTestCase { /** * Return the minimal value from a set of values. */ - @SuppressWarnings("unchecked") static > T getMinValueScript(Map vars, T initialValue, String fieldName, Function converter) { T retval = initialValue; diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 439728bac9e..cef44ed17fd 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -248,7 +248,7 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase logger.info("--> verify index folder deleted from blob container"); RepositoriesService repositoriesSvc = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName()); ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, internalCluster().getMasterName()); - @SuppressWarnings("unchecked") BlobStoreRepository repository = (BlobStoreRepository) repositoriesSvc.repository(repoName); + BlobStoreRepository repository = (BlobStoreRepository) repositoriesSvc.repository(repoName); final SetOnce indicesBlobContainer = new SetOnce<>(); final SetOnce repositoryData = new SetOnce<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 5d555ece438..13540eaec95 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -29,6 +29,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; + import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -1070,7 +1071,6 @@ public abstract class ESTestCase extends LuceneTestCase { List targetList = new ArrayList<>(); for(Object value : list) { if (value instanceof Map) { - @SuppressWarnings("unchecked") LinkedHashMap valueMap = (LinkedHashMap) value; targetList.add(shuffleMap(valueMap, exceptFields)); } else if(value instanceof List) { @@ -1090,7 +1090,6 @@ public abstract class ESTestCase extends LuceneTestCase { for (String key : keys) { Object value = map.get(key); if (value instanceof Map && exceptFields.contains(key) == false) { - @SuppressWarnings("unchecked") LinkedHashMap valueMap = (LinkedHashMap) value; targetMap.put(key, shuffleMap(valueMap, exceptFields)); } else if(value instanceof List && exceptFields.contains(key) == false) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index cf3cc39d34d..48fc7982074 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -49,6 +49,8 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; @@ -685,7 +687,6 @@ public class ElasticsearchAssertions { /** * Compares two lists recursively, but using arrays comparisons for byte[] through Arrays.equals(byte[], byte[]) */ - @SuppressWarnings("unchecked") private static void assertListEquals(List expected, List actual) { assertEquals(expected.size(), actual.size()); Iterator actualIterator = actual.iterator(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java index 39a1f1d3780..494d65e05de 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java @@ -54,7 +54,6 @@ public class GreaterThanAssertion extends Assertion { } @Override - @SuppressWarnings("unchecked") protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is greater than [{}] (field: [{}])", actualValue, expectedValue, getField()); assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", diff --git a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java index 84c82f4159d..6d1e5116474 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java @@ -64,7 +64,7 @@ public class MockTcpTransportTests extends AbstractSimpleTransportTestCase { @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { final MockTcpTransport t = (MockTcpTransport) transport; - @SuppressWarnings("unchecked") final TcpTransport.NodeChannels channels = + final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java index cf9eb5d7a8c..108411dee5b 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java @@ -96,7 +96,6 @@ public class SimpleMockNioTransportTests extends AbstractSimpleTransportTestCase @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { - @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java index 26c35db1fc9..2668e62abbc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java @@ -100,7 +100,6 @@ public class SecurityClient { * Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively * select the realms (by their unique names) and/or users (by their usernames) that should be evicted. */ - @SuppressWarnings("unchecked") public ClearRealmCacheRequestBuilder prepareClearRealmCache() { return new ClearRealmCacheRequestBuilder(client); } @@ -109,7 +108,6 @@ public class SecurityClient { * Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively * select the realms (by their unique names) and/or users (by their usernames) that should be evicted. */ - @SuppressWarnings("unchecked") public void clearRealmCache(ClearRealmCacheRequest request, ActionListener listener) { client.execute(ClearRealmCacheAction.INSTANCE, request, listener); } @@ -118,7 +116,6 @@ public class SecurityClient { * Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively * select the realms (by their unique names) and/or users (by their usernames) that should be evicted. */ - @SuppressWarnings("unchecked") public ActionFuture clearRealmCache(ClearRealmCacheRequest request) { return client.execute(ClearRealmCacheAction.INSTANCE, request); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java index 84a643ae72d..89279f4ea31 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java @@ -76,7 +76,6 @@ public class IndexUpgradeAction extends Action { /** * Sets the index. */ - @SuppressWarnings("unchecked") public final Request index(String index) { this.index = index; return this; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java index 4be0cefe525..3e0d2d80651 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java @@ -37,7 +37,6 @@ public class MlRestTestStateCleaner { final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds"); datafeedsRequest.addParameter("filter_path", "datafeeds"); final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest); - @SuppressWarnings("unchecked") final List> datafeeds = (List>) XContentMapValues.extractValue("datafeeds", ESRestTestCase.entityAsMap(datafeedsResponse)); if (datafeeds == null) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java index ef285b87cf1..fc2ee52dc41 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java @@ -147,7 +147,6 @@ public class AnomalyRecordTests extends AbstractSerializingTestCase jobs = ESRestTestCase.entityAsMap(response); - @SuppressWarnings("unchecked") List> jobConfigs = (List>) XContentMapValues.extractValue("jobs", jobs); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java index ef87fe392dd..e33dbc69db6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java @@ -43,13 +43,13 @@ import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; -import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition; import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.Result; +import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; import org.mockito.ArgumentCaptor; import java.io.IOException; @@ -252,7 +252,7 @@ public class JobProviderTests extends ESTestCase { BucketsQueryBuilder bq = new BucketsQueryBuilder().from(from).size(size).anomalyScoreThreshold(1.0); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.buckets(jobId, bq, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage buckets = holder[0]; @@ -286,7 +286,7 @@ public class JobProviderTests extends ESTestCase { BucketsQueryBuilder bq = new BucketsQueryBuilder().from(from).size(size).anomalyScoreThreshold(5.1) .includeInterim(true); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.buckets(jobId, bq, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage buckets = holder[0]; @@ -322,7 +322,7 @@ public class JobProviderTests extends ESTestCase { bq.anomalyScoreThreshold(5.1); bq.includeInterim(true); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.buckets(jobId, bq, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage buckets = holder[0]; @@ -368,7 +368,7 @@ public class JobProviderTests extends ESTestCase { BucketsQueryBuilder bq = new BucketsQueryBuilder(); bq.timestamp(Long.toString(now.getTime())); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] bucketHolder = new QueryPage[1]; provider.buckets(jobId, bq, q -> bucketHolder[0] = q, e -> {}, client); assertThat(bucketHolder[0].count(), equalTo(1L)); @@ -409,7 +409,7 @@ public class JobProviderTests extends ESTestCase { .epochEnd(String.valueOf(now.getTime())).includeInterim(true).sortField(sortfield) .recordScore(2.2); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.records(jobId, rqb, page -> holder[0] = page, RuntimeException::new, client); QueryPage recordPage = holder[0]; @@ -462,7 +462,7 @@ public class JobProviderTests extends ESTestCase { rqb.sortField(sortfield); rqb.recordScore(2.2); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.records(jobId, rqb, page -> holder[0] = page, RuntimeException::new, client); QueryPage recordPage = holder[0]; @@ -507,7 +507,7 @@ public class JobProviderTests extends ESTestCase { Client client = getMockedClient(qb -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.bucketRecords(jobId, bucket, from, size, true, sortfield, true, page -> holder[0] = page, RuntimeException::new, client); @@ -568,7 +568,7 @@ public class JobProviderTests extends ESTestCase { Client client = getMockedClient(q -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.categoryDefinitions(jobId, null, false, from, size, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); @@ -590,7 +590,7 @@ public class JobProviderTests extends ESTestCase { SearchResponse response = createSearchResponse(Collections.singletonList(source)); Client client = getMockedClient(q -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.categoryDefinitions(jobId, categoryId, false, null, null, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); @@ -632,7 +632,7 @@ public class JobProviderTests extends ESTestCase { Client client = getMockedClient(q -> qbHolder[0] = q, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; InfluencersQuery query = new InfluencersQueryBuilder().from(from).size(size).includeInterim(false).build(); provider.influencers(jobId, query, page -> holder[0] = page, RuntimeException::new, client); @@ -692,7 +692,7 @@ public class JobProviderTests extends ESTestCase { Client client = getMockedClient(q -> qbHolder[0] = q, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; InfluencersQuery query = new InfluencersQueryBuilder().from(from).size(size).start("0").end("0").sortField("sort") .sortDescending(true).influencerScoreThreshold(0.0).includeInterim(true).build(); @@ -747,7 +747,7 @@ public class JobProviderTests extends ESTestCase { Client client = getMockedClient(qb -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.modelSnapshots(jobId, from, size, r -> holder[0] = r, RuntimeException::new); QueryPage page = holder[0]; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index f2c18ec9d5a..c31ebd4bc2c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -195,7 +195,7 @@ public class JobResultsPersisterTests extends ESTestCase { verifyNoMoreInteractions(client); } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) private Client mockClient(ArgumentCaptor captor) { Client client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java index eedc42148b1..5f8b685f844 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java @@ -65,7 +65,6 @@ public class ScoresUpdaterTests extends ESTestCase { } @Before - @SuppressWarnings("unchecked") public void setUpMocks() throws IOException { MockitoAnnotations.initMocks(this); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index a77f6bf24e9..efc32fccb3d 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -35,11 +35,11 @@ import org.elasticsearch.threadpool.ThreadPoolStats; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackUsageRequestBuilder; import org.elasticsearch.xpack.core.action.XPackUsageResponse; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkRequestBuilder; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkResponse; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; -import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; -import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; import org.elasticsearch.xpack.monitoring.LocalStateMonitoring; import org.elasticsearch.xpack.monitoring.MonitoringService; import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsMonitoringDoc; @@ -112,7 +112,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { * This test uses the Monitoring Bulk API to index document as an external application like Kibana would do. It * then ensure that the documents were correctly indexed and have the expected information. */ - @SuppressWarnings("unchecked") public void testMonitoringBulk() throws Exception { whenExportersAreReady(() -> { final MonitoredSystem system = randomSystem(); @@ -188,7 +187,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { * have been indexed with the expected information. */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29880") - @SuppressWarnings("unchecked") public void testMonitoringService() throws Exception { final boolean createAPMIndex = randomBoolean(); final String indexName = createAPMIndex ? "apm-2017.11.06" : "books"; @@ -284,7 +282,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { * Asserts that the source_node information (provided as a Map) of a monitoring document correspond to * the current local node information */ - @SuppressWarnings("unchecked") private void assertMonitoringDocSourceNode(final Map sourceNode) { assertEquals(6, sourceNode.size()); @@ -541,7 +538,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { /** * Disable the monitoring service and the Local exporter. */ - @SuppressWarnings("unchecked") public void disableMonitoring() throws Exception { final Settings settings = Settings.builder() .putNull("xpack.monitoring.collection.enabled") diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index 8bae951e883..3898e34b7a4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -541,7 +541,6 @@ public class AuthenticationService extends AbstractComponent { private final RestRequest request; - @SuppressWarnings("unchecked") AuditableRestRequest(AuditTrail auditTrail, AuthenticationFailureHandler failureHandler, ThreadContext threadContext, RestRequest request) { super(auditTrail, failureHandler, threadContext); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java index c66ecbec2b3..67bfc2ecdcb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java @@ -58,7 +58,6 @@ public class RestAuthenticateActionTests extends SecurityIntegTestCase { assertThat(response.getStatusLine().getStatusCode(), is(200)); ObjectPath objectPath = ObjectPath.createFromResponse(response); assertThat(objectPath.evaluate("username").toString(), equalTo(SecuritySettingsSource.TEST_USER_NAME)); - @SuppressWarnings("unchecked") List roles = objectPath.evaluate("roles"); assertThat(roles.size(), is(1)); assertThat(roles, contains(SecuritySettingsSource.TEST_ROLE)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index feca093e581..70ab085fcf7 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -35,9 +35,6 @@ import org.elasticsearch.xpack.core.common.socket.SocketAccess; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; -import javax.net.SocketFactory; -import javax.net.ssl.HandshakeCompletedListener; -import javax.net.ssl.SSLSocket; import java.io.IOException; import java.net.InetAddress; import java.net.SocketTimeoutException; @@ -47,6 +44,10 @@ import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; +import javax.net.SocketFactory; +import javax.net.ssl.HandshakeCompletedListener; +import javax.net.ssl.SSLSocket; + import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.containsString; @@ -119,7 +120,6 @@ public class SimpleSecurityNioTransportTests extends AbstractSimpleTransportTest @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { - @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index f1d9eb1fb3f..f1495f4f3ac 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.http.HttpStatus; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.Response; @@ -219,7 +220,6 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase { @SuppressWarnings("unchecked") final Map node = (Map) nodes.values().iterator().next(); - @SuppressWarnings("unchecked") final Number activeWrites = (Number) extractValue("thread_pool.write.active", node); return activeWrites != null && activeWrites.longValue() == 0L; } catch (Exception e) { diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index ec4e8824a19..7861557709e 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authc.ldap; import com.unboundid.ldap.sdk.LDAPException; import com.unboundid.ldap.sdk.ResultCode; + import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -302,7 +303,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } } - @SuppressWarnings("unchecked") @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29840") public void testHandlingLdapReferralErrors() throws Exception { String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com"; From 261002283b4526a942eb8bdff0a268aaf123734b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 23 Jul 2018 08:56:43 -0400 Subject: [PATCH 134/260] AwaitsFix RecoveryIT#testHistoryUUIDIsGenerated Relates #31291 --- .../src/test/java/org/elasticsearch/upgrades/RecoveryIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index de5681ebe1a..062016909b6 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -47,6 +47,8 @@ import static org.hamcrest.Matchers.notNullValue; * In depth testing of the recovery mechanism during a rolling restart. */ public class RecoveryIT extends AbstractRollingTestCase { + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31291") public void testHistoryUUIDIsGenerated() throws Exception { final String index = "index_history_uuid"; if (CLUSTER_TYPE == ClusterType.OLD) { From 33f11e637dc05b5112ffc8c1ca95000a4782fa6d Mon Sep 17 00:00:00 2001 From: Andrey Ershov Date: Mon, 23 Jul 2018 16:38:55 +0200 Subject: [PATCH 135/260] Fail shard if IndexShard#storeStats runs into an IOException (#32241) Fail shard if IndexShard#storeStats runs into an IOException. Closes #29008 --- .../elasticsearch/index/shard/IndexShard.java | 1 + .../index/shard/IndexShardTests.java | 85 ++++++++++++++++++- .../BlobStoreRepositoryRestoreTests.java | 1 + .../ESIndexLevelReplicationTestCase.java | 11 +-- .../index/shard/IndexShardTestCase.java | 35 +++++--- 5 files changed, 113 insertions(+), 20 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index fc08438a7d9..d4a1d0502d0 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -917,6 +917,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl try { return store.stats(); } catch (IOException e) { + failShard("Failing shard because of exception during storeStats", e); throw new ElasticsearchException("io exception while building 'store stats'", e); } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 15e6151457f..1880b6b0954 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -27,6 +27,8 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Constants; import org.elasticsearch.Version; @@ -112,6 +114,7 @@ import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.ElasticsearchException; import java.io.IOException; import java.nio.charset.Charset; @@ -138,6 +141,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.LongFunction; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -1162,6 +1166,81 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(shard); } + + public void testShardStatsWithFailures() throws IOException { + allowShardFailures(); + final ShardId shardId = new ShardId("index", "_na_", 0); + final ShardRouting shardRouting = newShardRouting(shardId, "node", true, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, ShardRoutingState.INITIALIZING); + final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); + + + ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .build(); + IndexMetaData metaData = IndexMetaData.builder(shardRouting.getIndexName()) + .settings(settings) + .primaryTerm(0, 1) + .build(); + + // Override two Directory methods to make them fail at our will + // We use AtomicReference here to inject failure in the middle of the test not immediately + // We use Supplier instead of IOException to produce meaningful stacktrace + // (remember stack trace is filled when exception is instantiated) + AtomicReference> exceptionToThrow = new AtomicReference<>(); + AtomicBoolean throwWhenMarkingStoreCorrupted = new AtomicBoolean(false); + Directory directory = new FilterDirectory(newFSDirectory(shardPath.resolveIndex())) { + //fileLength method is called during storeStats try block + //it's not called when store is marked as corrupted + @Override + public long fileLength(String name) throws IOException { + Supplier ex = exceptionToThrow.get(); + if (ex == null) { + return super.fileLength(name); + } else { + throw ex.get(); + } + } + + //listAll method is called when marking store as corrupted + @Override + public String[] listAll() throws IOException { + Supplier ex = exceptionToThrow.get(); + if (throwWhenMarkingStoreCorrupted.get() && ex != null) { + throw ex.get(); + } else { + return super.listAll(); + } + } + }; + + try (Store store = createStore(shardId, new IndexSettings(metaData, Settings.EMPTY), directory)) { + IndexShard shard = newShard(shardRouting, shardPath, metaData, store, + null, new InternalEngineFactory(), () -> { + }, EMPTY_EVENT_LISTENER); + AtomicBoolean failureCallbackTriggered = new AtomicBoolean(false); + shard.addShardFailureCallback((ig)->failureCallbackTriggered.set(true)); + + recoverShardFromStore(shard); + + final boolean corruptIndexException = randomBoolean(); + + if (corruptIndexException) { + exceptionToThrow.set(() -> new CorruptIndexException("Test CorruptIndexException", "Test resource")); + throwWhenMarkingStoreCorrupted.set(randomBoolean()); + } else { + exceptionToThrow.set(() -> new IOException("Test IOException")); + } + ElasticsearchException e = expectThrows(ElasticsearchException.class, shard::storeStats); + assertTrue(failureCallbackTriggered.get()); + + if (corruptIndexException && !throwWhenMarkingStoreCorrupted.get()) { + assertTrue(store.isMarkedCorrupted()); + } + } + } + public void testRefreshMetric() throws IOException { IndexShard shard = newStartedShard(); assertThat(shard.refreshStats().getTotal(), equalTo(2L)); // refresh on: finalize and end of recovery @@ -1868,6 +1947,7 @@ public class IndexShardTests extends IndexShardTestCase { ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE), shard.shardPath(), shard.indexSettings().getIndexMetaData(), + null, wrapper, new InternalEngineFactory(), () -> {}, @@ -2020,6 +2100,7 @@ public class IndexShardTests extends IndexShardTestCase { ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE), shard.shardPath(), shard.indexSettings().getIndexMetaData(), + null, wrapper, new InternalEngineFactory(), () -> {}, @@ -2506,7 +2587,7 @@ public class IndexShardTests extends IndexShardTestCase { .put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "true", "checksum", "fix"))) .build(); final IndexShard newShard = newShard(shardRouting, indexShard.shardPath(), indexMetaData, - null, indexShard.engineFactory, indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER); + null, null, indexShard.engineFactory, indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER); Store.MetadataSnapshot storeFileMetaDatas = newShard.snapshotStoreMetadata(); assertTrue("at least 2 files, commit and data: " + storeFileMetaDatas.toString(), storeFileMetaDatas.size() > 1); @@ -3005,7 +3086,7 @@ public class IndexShardTests extends IndexShardTestCase { ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); AtomicBoolean markedInactive = new AtomicBoolean(); AtomicReference primaryRef = new AtomicReference<>(); - IndexShard primary = newShard(shardRouting, shardPath, metaData, null, new InternalEngineFactory(), () -> { + IndexShard primary = newShard(shardRouting, shardPath, metaData, null, null, new InternalEngineFactory(), () -> { }, new IndexEventListener() { @Override public void onShardInactive(IndexShard indexShard) { diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java index 0eae9a14200..fa7de2d6291 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java @@ -105,6 +105,7 @@ public class BlobStoreRepositoryRestoreTests extends IndexShardTestCase { shard.shardPath(), shard.indexSettings().getIndexMetaData(), null, + null, new InternalEngineFactory(), () -> {}, EMPTY_EVENT_LISTENER); diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 5a5ee12065c..2b1841c39ae 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -259,13 +259,14 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase public synchronized IndexShard addReplicaWithExistingPath(final ShardPath shardPath, final String nodeId) throws IOException { final ShardRouting shardRouting = TestShardRouting.newShardRouting( - shardId, - nodeId, - false, ShardRoutingState.INITIALIZING, - RecoverySource.PeerRecoverySource.INSTANCE); + shardId, + nodeId, + false, ShardRoutingState.INITIALIZING, + RecoverySource.PeerRecoverySource.INSTANCE); final IndexShard newReplica = - newShard(shardRouting, shardPath, indexMetaData, null, getEngineFactory(shardRouting), () -> {}, EMPTY_EVENT_LISTENER); + newShard(shardRouting, shardPath, indexMetaData, null, null, getEngineFactory(shardRouting), + () -> {}, EMPTY_EVENT_LISTENER); replicas.add(newReplica); updateAllocationIDsOnPrimary(); return newReplica; diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 0cbc6e44502..f9289f65861 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -163,15 +163,20 @@ public abstract class IndexShardTestCase extends ESTestCase { return Settings.EMPTY; } - private Store createStore(IndexSettings indexSettings, ShardPath shardPath) throws IOException { - final ShardId shardId = shardPath.getShardId(); + + protected Store createStore(IndexSettings indexSettings, ShardPath shardPath) throws IOException { + return createStore(shardPath.getShardId(), indexSettings, newFSDirectory(shardPath.resolveIndex())); + } + + protected Store createStore(ShardId shardId, IndexSettings indexSettings, Directory directory) throws IOException { final DirectoryService directoryService = new DirectoryService(shardId, indexSettings) { @Override public Directory newDirectory() throws IOException { - return newFSDirectory(shardPath.resolveIndex()); + return directory; } }; return new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId)); + } /** @@ -284,29 +289,32 @@ public abstract class IndexShardTestCase extends ESTestCase { final ShardId shardId = routing.shardId(); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); - return newShard(routing, shardPath, indexMetaData, indexSearcherWrapper, engineFactory, globalCheckpointSyncer, + return newShard(routing, shardPath, indexMetaData, null, indexSearcherWrapper, engineFactory, globalCheckpointSyncer, EMPTY_EVENT_LISTENER, listeners); } /** * creates a new initializing shard. - * @param routing shard routing to use - * @param shardPath path to use for shard data - * @param indexMetaData indexMetaData for the shard, including any mapping - * @param indexSearcherWrapper an optional wrapper to be used during searchers - * @param globalCheckpointSyncer callback for syncing global checkpoints - * @param indexEventListener index even listener - * @param listeners an optional set of listeners to add to the shard + * @param routing shard routing to use + * @param shardPath path to use for shard data + * @param indexMetaData indexMetaData for the shard, including any mapping + * @param store an optional custom store to use. If null a default file based store will be created + * @param indexSearcherWrapper an optional wrapper to be used during searchers + * @param globalCheckpointSyncer callback for syncing global checkpoints + * @param indexEventListener index event listener + * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMetaData indexMetaData, - @Nullable IndexSearcherWrapper indexSearcherWrapper, + @Nullable Store store, @Nullable IndexSearcherWrapper indexSearcherWrapper, @Nullable EngineFactory engineFactory, Runnable globalCheckpointSyncer, IndexEventListener indexEventListener, IndexingOperationListener... listeners) throws IOException { final Settings nodeSettings = Settings.builder().put("node.name", routing.currentNodeId()).build(); final IndexSettings indexSettings = new IndexSettings(indexMetaData, nodeSettings); final IndexShard indexShard; - final Store store = createStore(indexSettings, shardPath); + if (store == null) { + store = createStore(indexSettings, shardPath); + } boolean success = false; try { IndexCache indexCache = new IndexCache(indexSettings, new DisabledQueryCache(indexSettings), null); @@ -357,6 +365,7 @@ public abstract class IndexShardTestCase extends ESTestCase { current.shardPath(), current.indexSettings().getIndexMetaData(), null, + null, current.engineFactory, current.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER, listeners); From d3c4904fa38e3372e65c15d2e0278367a3c71934 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Mon, 23 Jul 2018 09:12:30 -0700 Subject: [PATCH 136/260] Painless: Clean up add methods in PainlessLookup (#32258) This is largely mechanical change that cleans up the addConstructor, addMethod, and addFields methods in PainlessLookup. Changes include renamed variables, better error messages, and some minor code movement to make it more maintainable long term. --- .../lookup/PainlessLookupBuilder.java | 609 +++++++++++------- .../lookup/PainlessLookupUtility.java | 12 +- .../elasticsearch/painless/spi/java.lang.txt | 2 +- 3 files changed, 393 insertions(+), 230 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 06773d3ffdd..8945c956c27 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -24,10 +24,12 @@ import org.elasticsearch.painless.spi.WhitelistClass; import org.elasticsearch.painless.spi.WhitelistConstructor; import org.elasticsearch.painless.spi.WhitelistField; import org.elasticsearch.painless.spi.WhitelistMethod; -import org.objectweb.asm.Type; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; +import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collections; @@ -38,8 +40,13 @@ import java.util.Objects; import java.util.Stack; import java.util.regex.Pattern; -import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_TYPE_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.CONSTRUCTOR_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.DEF_CLASS_NAME; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessFieldKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToJavaType; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typesToCanonicalTypeNames; public class PainlessLookupBuilder { @@ -123,17 +130,17 @@ public class PainlessLookupBuilder { private final List whitelists; private final Map> canonicalClassNamesToClasses; - private final Map, PainlessClassBuilder> classesToPainlessClasses; + private final Map, PainlessClassBuilder> classesToPainlessClassBuilders; public PainlessLookupBuilder(List whitelists) { this.whitelists = whitelists; canonicalClassNamesToClasses = new HashMap<>(); - classesToPainlessClasses = new HashMap<>(); + classesToPainlessClassBuilders = new HashMap<>(); - canonicalClassNamesToClasses.put(DEF_TYPE_NAME, def.class); - classesToPainlessClasses.put(def.class, - new PainlessClassBuilder(DEF_TYPE_NAME, Object.class, Type.getType(Object.class))); + canonicalClassNamesToClasses.put(DEF_CLASS_NAME, def.class); + classesToPainlessClassBuilders.put(def.class, + new PainlessClassBuilder(DEF_CLASS_NAME, Object.class, org.objectweb.asm.Type.getType(Object.class))); } private Class canonicalTypeNameToType(String canonicalTypeName) { @@ -141,7 +148,7 @@ public class PainlessLookupBuilder { } private void validateType(Class type) { - PainlessLookupUtility.validateType(type, classesToPainlessClasses.keySet()); + PainlessLookupUtility.validateType(type, classesToPainlessClassBuilders.keySet()); } public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importClassName) { @@ -174,10 +181,10 @@ public class PainlessLookupBuilder { Objects.requireNonNull(clazz); if (clazz == def.class) { - throw new IllegalArgumentException("cannot add reserved class [" + DEF_TYPE_NAME + "]"); + throw new IllegalArgumentException("cannot add reserved class [" + DEF_CLASS_NAME + "]"); } - String canonicalClassName = clazz.getCanonicalName(); + String canonicalClassName = typeToCanonicalTypeName(clazz); if (clazz.isArray()) { throw new IllegalArgumentException("cannot add array type [" + canonicalClassName + "] as a class"); @@ -187,13 +194,14 @@ public class PainlessLookupBuilder { throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]"); } - PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClasses.get(clazz); + PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClassBuilders.get(clazz); if (existingPainlessClassBuilder == null) { - PainlessClassBuilder painlessClassBuilder = new PainlessClassBuilder(canonicalClassName, clazz, Type.getType(clazz)); + PainlessClassBuilder painlessClassBuilder = + new PainlessClassBuilder(canonicalClassName, clazz, org.objectweb.asm.Type.getType(clazz)); canonicalClassNamesToClasses.put(canonicalClassName, clazz); - classesToPainlessClasses.put(clazz, painlessClassBuilder); + classesToPainlessClassBuilders.put(clazz, painlessClassBuilder); } else if (existingPainlessClassBuilder.clazz.equals(clazz) == false) { throw new IllegalArgumentException("class [" + canonicalClassName + "] " + "cannot represent multiple java classes with the same name from different class loaders"); @@ -207,308 +215,459 @@ public class PainlessLookupBuilder { throw new IllegalArgumentException("must use only_fqn parameter on class [" + canonicalClassName + "] with no package"); } } else { - Class importedPainlessType = canonicalClassNamesToClasses.get(importedCanonicalClassName); + Class importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); - if (importedPainlessType == null) { + if (importedPainlessClass == null) { if (importClassName) { if (existingPainlessClassBuilder != null) { - throw new IllegalArgumentException( - "inconsistent only_fqn parameters found for painless type [" + canonicalClassName + "]"); + throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]"); } canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); } - } else if (importedPainlessType.equals(clazz) == false) { - throw new IllegalArgumentException("painless type [" + importedCanonicalClassName + "] illegally represents multiple " + - "java types [" + clazz.getCanonicalName() + "] and [" + importedPainlessType.getCanonicalName() + "]"); + } else if (importedPainlessClass.equals(clazz) == false) { + throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " + + "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]"); } else if (importClassName == false) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for painless type [" + canonicalClassName + "]"); + throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]"); } } } - private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) { - PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); + public void addPainlessConstructor(String targetCanonicalClassName, List typeNameParameters) { + Objects.requireNonNull(targetCanonicalClassName); + Objects.requireNonNull(typeNameParameters); - if (ownerStruct == null) { - throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " + - "parameters " + whitelistConstructor.painlessParameterTypeNames); + Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); + + if (targetClass == null) { + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" + + "for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]"); } - List> painlessParametersTypes = new ArrayList<>(whitelistConstructor.painlessParameterTypeNames.size()); - Class[] javaClassParameters = new Class[whitelistConstructor.painlessParameterTypeNames.size()]; - - for (int parameterCount = 0; parameterCount < whitelistConstructor.painlessParameterTypeNames.size(); ++parameterCount) { - String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount); + List> typeParameters = new ArrayList<>(typeNameParameters.size()); + for (String typeNameParameter : typeNameParameters) { try { - Class painlessParameterClass = canonicalTypeNameToType(painlessParameterTypeName); - - painlessParametersTypes.add(painlessParameterClass); - javaClassParameters[parameterCount] = PainlessLookupUtility.typeToJavaType(painlessParameterClass); + Class typeParameter = canonicalTypeNameToType(typeNameParameter); + typeParameters.add(typeParameter); } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " + - "with owner struct [" + ownerStructName + "] and constructor parameters " + - whitelistConstructor.painlessParameterTypeNames, iae); + throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " + + "for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]", iae); } } - java.lang.reflect.Constructor javaConstructor; + addPainlessConstructor(targetClass, typeParameters); + } - try { - javaConstructor = ownerStruct.clazz.getConstructor(javaClassParameters); - } catch (NoSuchMethodException exception) { - throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " + - " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception); + public void addPainlessConstructor(Class targetClass, List> typeParameters) { + Objects.requireNonNull(targetClass); + Objects.requireNonNull(typeParameters); + + if (targetClass == def.class) { + throw new IllegalArgumentException("cannot add constructor to reserved class [" + DEF_CLASS_NAME + "]"); } - String painlessMethodKey = buildPainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size()); - PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey); + String targetCanonicalClassName = targetClass.getCanonicalName(); + PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); + + if (painlessClassBuilder == null) { + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" + + "for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + int typeParametersSize = typeParameters.size(); + List> javaTypeParameters = new ArrayList<>(typeParametersSize); + + for (Class typeParameter : typeParameters) { + try { + validateType(typeParameter); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + + "for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); + } + + javaTypeParameters.add(typeToJavaType(typeParameter)); + } + + Constructor javaConstructor; + + try { + javaConstructor = targetClass.getConstructor(javaTypeParameters.toArray(new Class[typeParametersSize])); + } catch (NoSuchMethodException nsme) { + throw new IllegalArgumentException("constructor reflection object " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + } + + String painlessMethodKey = buildPainlessMethodKey(CONSTRUCTOR_NAME, typeParametersSize); + PainlessMethod painlessConstructor = painlessClassBuilder.constructors.get(painlessMethodKey); if (painlessConstructor == null) { org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor); - MethodHandle javaHandle; + MethodHandle methodHandle; try { - javaHandle = MethodHandles.publicLookup().in(ownerStruct.clazz).unreflectConstructor(javaConstructor); - } catch (IllegalAccessException exception) { - throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " + - " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames); + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("constructor method handle " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); } painlessConstructor = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(ownerStruct.clazz, "", painlessParametersTypes), - key -> new PainlessMethod("", ownerStruct.clazz, null, void.class, painlessParametersTypes, - asmConstructor, javaConstructor.getModifiers(), javaHandle)); - ownerStruct.constructors.put(painlessMethodKey, painlessConstructor); - } else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){ - throw new IllegalArgumentException( - "illegal duplicate constructors [" + painlessMethodKey + "] found within the struct [" + ownerStruct.name + "] " + - "with parameters " + painlessParametersTypes + " and " + painlessConstructor.arguments); + new PainlessMethodCacheKey(targetClass, CONSTRUCTOR_NAME, typeParameters), + key -> new PainlessMethod(CONSTRUCTOR_NAME, targetClass, null, void.class, typeParameters, + asmConstructor, javaConstructor.getModifiers(), methodHandle) + ); + + painlessClassBuilder.constructors.put(painlessMethodKey, painlessConstructor); + } else if (painlessConstructor.arguments.equals(typeParameters) == false){ + throw new IllegalArgumentException("cannot have constructors " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.arguments) + "] " + + "with the same arity and different type parameters"); } } - private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) { - PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); + public void addPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, String augmentedCanonicalClassName, + String methodName, String returnCanonicalTypeName, List typeNameParameters) { - if (ownerStruct == null) { - throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + - "name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); + Objects.requireNonNull(classLoader); + Objects.requireNonNull(targetCanonicalClassName); + Objects.requireNonNull(methodName); + Objects.requireNonNull(returnCanonicalTypeName); + Objects.requireNonNull(typeNameParameters); + + Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); + + if (targetClass == null) { + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]"); } - if (METHOD_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) { - throw new IllegalArgumentException("invalid method name" + - " [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "]."); - } + Class augmentedClass = null; - Class javaAugmentedClass; - - if (whitelistMethod.javaAugmentedClassName != null) { + if (augmentedCanonicalClassName != null) { try { - javaAugmentedClass = Class.forName(whitelistMethod.javaAugmentedClassName, true, whitelistClassLoader); + augmentedClass = Class.forName(augmentedCanonicalClassName, true, classLoader); } catch (ClassNotFoundException cnfe) { - throw new IllegalArgumentException("augmented class [" + whitelistMethod.javaAugmentedClassName + "] " + - "not found for method with name [" + whitelistMethod.javaMethodName + "] " + - "and parameters " + whitelistMethod.painlessParameterTypeNames, cnfe); + throw new IllegalArgumentException("augmented class [" + augmentedCanonicalClassName + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", cnfe); } - } else { - javaAugmentedClass = null; } - int augmentedOffset = javaAugmentedClass == null ? 0 : 1; - - List> painlessParametersTypes = new ArrayList<>(whitelistMethod.painlessParameterTypeNames.size()); - Class[] javaClassParameters = new Class[whitelistMethod.painlessParameterTypeNames.size() + augmentedOffset]; - - if (javaAugmentedClass != null) { - javaClassParameters[0] = ownerStruct.clazz; - } - - for (int parameterCount = 0; parameterCount < whitelistMethod.painlessParameterTypeNames.size(); ++parameterCount) { - String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount); + List> typeParameters = new ArrayList<>(typeNameParameters.size()); + for (String typeNameParameter : typeNameParameters) { try { - Class painlessParameterClass = canonicalTypeNameToType(painlessParameterTypeName); - - painlessParametersTypes.add(painlessParameterClass); - javaClassParameters[parameterCount + augmentedOffset] = - PainlessLookupUtility.typeToJavaType(painlessParameterClass); + Class typeParameter = canonicalTypeNameToType(typeNameParameter); + typeParameters.add(typeParameter); } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " + - "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + - "and parameters " + whitelistMethod.painlessParameterTypeNames, iae); + throw new IllegalArgumentException("parameter type [" + typeNameParameter + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae); } } - Class javaImplClass = javaAugmentedClass == null ? ownerStruct.clazz : javaAugmentedClass; - java.lang.reflect.Method javaMethod; + Class returnType; try { - javaMethod = javaImplClass.getMethod(whitelistMethod.javaMethodName, javaClassParameters); - } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("method with name [" + whitelistMethod.javaMethodName + "] " + - "and parameters " + whitelistMethod.painlessParameterTypeNames + " not found for class [" + - javaImplClass.getName() + "]", nsme); - } - - Class painlessReturnClass; - - try { - painlessReturnClass = canonicalTypeNameToType(whitelistMethod.painlessReturnTypeName); + returnType = canonicalTypeNameToType(returnCanonicalTypeName); } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " + - "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + - "and parameters " + whitelistMethod.painlessParameterTypeNames, iae); + throw new IllegalArgumentException("parameter type [" + returnCanonicalTypeName + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae); } - if (javaMethod.getReturnType() != PainlessLookupUtility.typeToJavaType(painlessReturnClass)) { - throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " + - "does not match the return type class [" + javaMethod.getReturnType() + "] for the " + - "method with name [" + whitelistMethod.javaMethodName + "] " + - "and parameters " + whitelistMethod.painlessParameterTypeNames); + addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters); + } + + public void addPainlessMethod(Class targetClass, Class augmentedClass, String methodName, + Class returnType, List> typeParameters) { + Objects.requireNonNull(targetClass); + Objects.requireNonNull(methodName); + Objects.requireNonNull(returnType); + Objects.requireNonNull(typeParameters); + + if (targetClass == def.class) { + throw new IllegalArgumentException("cannot add method to reserved class [" + DEF_CLASS_NAME + "]"); } - String painlessMethodKey = - buildPainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size()); + String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); - if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { - PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey); + if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { + throw new IllegalArgumentException( + "invalid method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]."); + } - if (painlessMethod == null) { - org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); - MethodHandle javaMethodHandle; + PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); - try { - javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod); - } catch (IllegalAccessException exception) { - throw new IllegalArgumentException("method handle not found for method with name " + - "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); - } + if (painlessClassBuilder == null) { + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + } - painlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(ownerStruct.clazz, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, null, painlessReturnClass, - painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); - ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod); - } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass && - painlessMethod.arguments.equals(painlessParametersTypes)) == false) { - throw new IllegalArgumentException("illegal duplicate static methods [" + painlessMethodKey + "] " + - "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " + - "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " + - "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments); + int typeParametersSize = typeParameters.size(); + int augmentedParameterOffset = augmentedClass == null ? 0 : 1; + List> javaTypeParameters = new ArrayList<>(typeParametersSize + augmentedParameterOffset); + + if (augmentedClass != null) { + javaTypeParameters.add(targetClass); + } + + for (Class typeParameter : typeParameters) { + try { + validateType(typeParameter); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + + "not found for method [[" + targetCanonicalClassName + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]", iae); + } + + javaTypeParameters.add(typeToJavaType(typeParameter)); + } + + try { + validateType(returnType); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); + } + + Method javaMethod; + + if (augmentedClass == null) { + try { + javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); + } catch (NoSuchMethodException nsme) { + throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); } } else { - PainlessMethod painlessMethod = ownerStruct.methods.get(painlessMethodKey); + try { + javaMethod = augmentedClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); + } catch (NoSuchMethodException nsme) { + throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + + "with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme); + } + } + + if (javaMethod.getReturnType() != typeToJavaType(returnType)) { + throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " + + "does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " + + "for method [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); + + if (augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { + PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey); if (painlessMethod == null) { org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); MethodHandle javaMethodHandle; try { - javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod); - } catch (IllegalAccessException exception) { - throw new IllegalArgumentException("method handle not found for method with name " + - "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames); + javaMethodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("static method handle [[" + targetClass.getCanonicalName() + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); } painlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(ownerStruct.clazz, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, javaAugmentedClass, painlessReturnClass, - painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); - ownerStruct.methods.put(painlessMethodKey, painlessMethod); - } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) && - painlessMethod.arguments.equals(painlessParametersTypes)) == false) { - throw new IllegalArgumentException("illegal duplicate member methods [" + painlessMethodKey + "] " + - "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " + - "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " + - "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments); + new PainlessMethodCacheKey(targetClass, methodName, typeParameters), + key -> new PainlessMethod(methodName, targetClass, null, returnType, + typeParameters, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); + + painlessClassBuilder.staticMethods.put(painlessMethodKey, painlessMethod); + } else if ((painlessMethod.name.equals(methodName) && painlessMethod.rtn == returnType && + painlessMethod.arguments.equals(typeParameters)) == false) { + throw new IllegalArgumentException("cannot have static methods " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(returnType) + "], " + + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(painlessMethod.rtn) + "], " + + typesToCanonicalTypeNames(painlessMethod.arguments) + "] " + + "with the same arity and different return type or type parameters"); + } + } else { + PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey); + + if (painlessMethod == null) { + org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); + MethodHandle javaMethodHandle; + + if (augmentedClass == null) { + try { + javaMethodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); + } + } else { + try { + javaMethodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + + "with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae); + } + } + + painlessMethod = painlessMethodCache.computeIfAbsent( + new PainlessMethodCacheKey(targetClass, methodName, typeParameters), + key -> new PainlessMethod(methodName, targetClass, augmentedClass, returnType, + typeParameters, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); + + painlessClassBuilder.methods.put(painlessMethodKey, painlessMethod); + } else if ((painlessMethod.name.equals(methodName) && painlessMethod.rtn == returnType && + painlessMethod.arguments.equals(typeParameters)) == false) { + throw new IllegalArgumentException("cannot have methods " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(returnType) + "], " + + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(painlessMethod.rtn) + "], " + + typesToCanonicalTypeNames(painlessMethod.arguments) + "] " + + "with the same arity and different return type or type parameters"); } } } - private void addField(String ownerStructName, WhitelistField whitelistField) { - PainlessClassBuilder ownerStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(ownerStructName)); + public void addPainlessField(String targetCanonicalClassName, String fieldName, String typeNameParameter) { + Objects.requireNonNull(targetCanonicalClassName); + Objects.requireNonNull(fieldName); + Objects.requireNonNull(typeNameParameter); - if (ownerStruct == null) { - throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + - "name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName); + Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); + + if (targetClass == null) { + throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); } - if (FIELD_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) { - throw new IllegalArgumentException("invalid field name " + - "[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "]."); - } - - java.lang.reflect.Field javaField; + Class typeParameter; try { - javaField = ownerStruct.clazz.getField(whitelistField.javaFieldName); - } catch (NoSuchFieldException exception) { - throw new IllegalArgumentException("field [" + whitelistField.javaFieldName + "] " + - "not found for class [" + ownerStruct.clazz.getName() + "]."); - } - - Class painlessFieldClass; - - try { - painlessFieldClass = canonicalTypeNameToType(whitelistField.painlessFieldTypeName); + typeParameter = canonicalTypeNameToType(typeNameParameter); } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " + - "with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae); + throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " + + "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); } + + addPainlessField(targetClass, fieldName, typeParameter); + } + + public void addPainlessField(Class targetClass, String fieldName, Class typeParameter) { + Objects.requireNonNull(targetClass); + Objects.requireNonNull(fieldName); + Objects.requireNonNull(typeParameter); + + if (targetClass == def.class) { + throw new IllegalArgumentException("cannot add field to reserved class [" + DEF_CLASS_NAME + "]"); + } + + String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); + + if (FIELD_NAME_PATTERN.matcher(fieldName).matches() == false) { + throw new IllegalArgumentException( + "invalid field name [" + fieldName + "] for target class [" + targetCanonicalClassName + "]."); + } + + + PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); + + if (painlessClassBuilder == null) { + throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); + } + + try { + validateType(typeParameter); + } catch (IllegalArgumentException iae) { + throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + + "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", iae); + } + + Field javaField; + + try { + javaField = targetClass.getField(fieldName); + } catch (NoSuchFieldException nsme) { + throw new IllegalArgumentException( + "field reflection object [[" + targetCanonicalClassName + "], [" + fieldName + "] not found", nsme); + } + + if (javaField.getType() != typeToJavaType(typeParameter)) { + throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(javaField.getType()) + "] " + + "does not match the specified type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + + "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); + } + + String painlessFieldKey = buildPainlessFieldKey(fieldName); + if (Modifier.isStatic(javaField.getModifiers())) { if (Modifier.isFinal(javaField.getModifiers()) == false) { - throw new IllegalArgumentException("static [" + whitelistField.javaFieldName + "] " + - "with owner struct [" + ownerStruct.name + "] is not final"); + throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "]. [" + fieldName + "]] must be final"); } - PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName); + PainlessField painlessField = painlessClassBuilder.staticMembers.get(painlessFieldKey); if (painlessField == null) { painlessField = painlessFieldCache.computeIfAbsent( - new PainlessFieldCacheKey(ownerStruct.clazz, whitelistField.javaFieldName, painlessFieldClass), - key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), - ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), null, null)); - ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField); - } else if (painlessField.clazz != painlessFieldClass) { - throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " + - "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]"); + new PainlessFieldCacheKey(targetClass, fieldName, typeParameter), + key -> new PainlessField(fieldName, javaField.getName(), targetClass, + typeParameter, javaField.getModifiers(), null, null)); + + painlessClassBuilder.staticMembers.put(painlessFieldKey, painlessField); + } else if (painlessField.clazz != typeParameter) { + throw new IllegalArgumentException("cannot have static fields " + + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + + typeToCanonicalTypeName(typeParameter) + "] and " + + "[[" + targetCanonicalClassName + "], [" + painlessField.name + "], " + + typeToCanonicalTypeName(painlessField.clazz) + "] " + + "with the same and different type parameters"); } } else { - MethodHandle javaMethodHandleGetter; - MethodHandle javaMethodHandleSetter; + MethodHandle methodHandleGetter; try { - if (Modifier.isStatic(javaField.getModifiers()) == false) { - javaMethodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField); - javaMethodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField); - } else { - javaMethodHandleGetter = null; - javaMethodHandleSetter = null; - } - } catch (IllegalAccessException exception) { - throw new IllegalArgumentException("getter/setter [" + whitelistField.javaFieldName + "]" + - " not found for class [" + ownerStruct.clazz.getName() + "]."); + methodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException( + "method handle getter not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); } - PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName); + MethodHandle methodHandleSetter; + + try { + methodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException( + "method handle setter not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); + } + + PainlessField painlessField = painlessClassBuilder.members.get(painlessFieldKey); if (painlessField == null) { painlessField = painlessFieldCache.computeIfAbsent( - new PainlessFieldCacheKey(ownerStruct.clazz, whitelistField.javaFieldName, painlessFieldClass), - key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), - ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter)); - ownerStruct.members.put(whitelistField.javaFieldName, painlessField); - } else if (painlessField.clazz != painlessFieldClass) { - throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " + - "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]"); + new PainlessFieldCacheKey(targetClass, painlessFieldKey, typeParameter), + key -> new PainlessField(fieldName, javaField.getName(), targetClass, + typeParameter, javaField.getModifiers(), methodHandleGetter, methodHandleSetter)); + + painlessClassBuilder.members.put(fieldName, painlessField); + } else if (painlessField.clazz != typeParameter) { + throw new IllegalArgumentException("cannot have fields " + + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + + typeToCanonicalTypeName(typeParameter) + "] and " + + "[[" + targetCanonicalClassName + "], [" + painlessField.name + "], " + + typeToCanonicalTypeName(painlessField.clazz) + "] " + + "with the same and different type parameters"); } } } private void copyStruct(String struct, List children) { - final PainlessClassBuilder owner = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(struct)); + final PainlessClassBuilder owner = classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(struct)); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); @@ -516,7 +675,7 @@ public class PainlessLookupBuilder { for (int count = 0; count < children.size(); ++count) { final PainlessClassBuilder child = - classesToPainlessClasses.get(canonicalClassNamesToClasses.get(children.get(count))); + classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(children.get(count))); if (child == null) { throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + @@ -690,7 +849,7 @@ public class PainlessLookupBuilder { for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); PainlessClassBuilder painlessStruct = - classesToPainlessClasses.get(canonicalClassNamesToClasses.get(painlessTypeName)); + classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(painlessTypeName)); if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + @@ -701,8 +860,8 @@ public class PainlessLookupBuilder { addPainlessClass( whitelist.javaClassLoader, whitelistStruct.javaClassName, whitelistStruct.onlyFQNJavaClassName == false); - painlessStruct = classesToPainlessClasses.get(canonicalClassNamesToClasses.get(painlessTypeName)); - classesToPainlessClasses.put(painlessStruct.clazz, painlessStruct); + painlessStruct = classesToPainlessClassBuilders.get(canonicalClassNamesToClasses.get(painlessTypeName)); + classesToPainlessClassBuilders.put(painlessStruct.clazz, painlessStruct); } } @@ -715,17 +874,19 @@ public class PainlessLookupBuilder { for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) { origin = whitelistConstructor.origin; - addConstructor(painlessTypeName, whitelistConstructor); + addPainlessConstructor(painlessTypeName, whitelistConstructor.painlessParameterTypeNames); } for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) { origin = whitelistMethod.origin; - addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod); + addPainlessMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod.javaAugmentedClassName, + whitelistMethod.javaMethodName, whitelistMethod.painlessReturnTypeName, + whitelistMethod.painlessParameterTypeNames); } for (WhitelistField whitelistField : whitelistStruct.whitelistFields) { origin = whitelistField.origin; - addField(painlessTypeName, whitelistField); + addPainlessField(painlessTypeName, whitelistField.javaFieldName, whitelistField.painlessFieldTypeName); } } } @@ -735,8 +896,8 @@ public class PainlessLookupBuilder { // goes through each Painless struct and determines the inheritance list, // and then adds all inherited types to the Painless struct's whitelist - for (Class javaClass : classesToPainlessClasses.keySet()) { - PainlessClassBuilder painlessStruct = classesToPainlessClasses.get(javaClass); + for (Class javaClass : classesToPainlessClassBuilders.keySet()) { + PainlessClassBuilder painlessStruct = classesToPainlessClassBuilders.get(javaClass); List painlessSuperStructs = new ArrayList<>(); Class javaSuperClass = painlessStruct.clazz.getSuperclass(); @@ -747,7 +908,7 @@ public class PainlessLookupBuilder { // adds super classes to the inheritance list if (javaSuperClass != null && javaSuperClass.isInterface() == false) { while (javaSuperClass != null) { - PainlessClassBuilder painlessSuperStruct = classesToPainlessClasses.get(javaSuperClass); + PainlessClassBuilder painlessSuperStruct = classesToPainlessClassBuilders.get(javaSuperClass); if (painlessSuperStruct != null) { painlessSuperStructs.add(painlessSuperStruct.name); @@ -763,7 +924,7 @@ public class PainlessLookupBuilder { Class javaInterfaceLookup = javaInteraceLookups.pop(); for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) { - PainlessClassBuilder painlessInterfaceStruct = classesToPainlessClasses.get(javaSuperInterface); + PainlessClassBuilder painlessInterfaceStruct = classesToPainlessClassBuilders.get(javaSuperInterface); if (painlessInterfaceStruct != null) { String painlessInterfaceStructName = painlessInterfaceStruct.name; @@ -784,7 +945,7 @@ public class PainlessLookupBuilder { // copies methods and fields from Object into interface types if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) { - PainlessClassBuilder painlessObjectStruct = classesToPainlessClasses.get(Object.class); + PainlessClassBuilder painlessObjectStruct = classesToPainlessClassBuilders.get(Object.class); if (painlessObjectStruct != null) { copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name)); @@ -793,14 +954,14 @@ public class PainlessLookupBuilder { } // precompute runtime classes - for (PainlessClassBuilder painlessStruct : classesToPainlessClasses.values()) { + for (PainlessClassBuilder painlessStruct : classesToPainlessClassBuilders.values()) { addRuntimeClass(painlessStruct); } Map, PainlessClass> javaClassesToPainlessClasses = new HashMap<>(); // copy all structs to make them unmodifiable for outside users: - for (Map.Entry,PainlessClassBuilder> entry : classesToPainlessClasses.entrySet()) { + for (Map.Entry,PainlessClassBuilder> entry : classesToPainlessClassBuilders.entrySet()) { entry.getValue().functionalMethod = computeFunctionalInterfaceMethod(entry.getValue()); javaClassesToPainlessClasses.put(entry.getKey(), entry.getValue().build()); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index 1f698b7c673..86d3f876638 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -33,10 +33,12 @@ import java.util.Objects; * * A class is a set of methods and fields under a specific class name. A type is either a class or an array under a specific type name. * Note the distinction between class versus type is class means that no array classes will be be represented whereas type allows array - * classes to be represented. The set of available classes will always be a subset of the available types. + * classes to be represented. The set of available classes will always be a subset of the available types. * * Under ambiguous circumstances most variable names are prefixed with asm, java, or painless. If the variable value is the same for asm, - * java, and painless, no prefix is used. + * java, and painless, no prefix is used. Target is used as a prefix to represent if a constructor, method, or field is being + * called/accessed on that specific class. Parameter is often a postfix used to represent if a type is used as a parameter to a + * constructor, method, or field. * *