From 7517c50698961c573dd0074db404d4f5f51e0b81 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Tue, 27 Sep 2016 10:39:21 +0200 Subject: [PATCH 01/50] Update to Tika 1.14 Closes #20390. --- plugins/ingest-attachment/build.gradle | 4 +++- plugins/ingest-attachment/licenses/tika-core-1.13.jar.sha1 | 1 - plugins/ingest-attachment/licenses/tika-core-1.14.jar.sha1 | 1 + plugins/ingest-attachment/licenses/tika-parsers-1.13.jar.sha1 | 1 - plugins/ingest-attachment/licenses/tika-parsers-1.14.jar.sha1 | 1 + 5 files changed, 5 insertions(+), 3 deletions(-) delete mode 100644 plugins/ingest-attachment/licenses/tika-core-1.13.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-core-1.14.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/tika-parsers-1.13.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parsers-1.14.jar.sha1 diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index f22e0fdfeae..c80a8a1db59 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -23,7 +23,7 @@ esplugin { } versions << [ - 'tika': '1.13', + 'tika': '1.14', 'pdfbox': '2.0.3', 'bouncycastle': '1.55', 'poi': '3.15' @@ -526,7 +526,9 @@ thirdPartyAudit.excludes = [ 'org.apache.http.StatusLine', 'org.apache.http.client.HttpClient', 'org.apache.http.client.methods.HttpGet', + 'org.apache.http.client.methods.HttpPost', 'org.apache.http.client.utils.URIBuilder', + 'org.apache.http.entity.ByteArrayEntity', 'org.apache.http.impl.client.DefaultHttpClient', 'org.apache.james.mime4j.MimeException', 'org.apache.james.mime4j.codec.DecodeMonitor', diff --git a/plugins/ingest-attachment/licenses/tika-core-1.13.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.13.jar.sha1 deleted file mode 100644 index cfc36a450bd..00000000000 --- a/plugins/ingest-attachment/licenses/tika-core-1.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1305c798d41d1d7bbf12cb7c0ca184c98eed25ad \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.14.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.14.jar.sha1 new file mode 100644 index 00000000000..2da2c07d9b2 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-core-1.14.jar.sha1 @@ -0,0 +1 @@ +afff8f1774994aa973ef90bc8d38ddf089b9d6d9 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.13.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.13.jar.sha1 deleted file mode 100644 index 7fb2755d545..00000000000 --- a/plugins/ingest-attachment/licenses/tika-parsers-1.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -374fde67b9d35f785534b0e6c4953533c31bab5f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.14.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.14.jar.sha1 new file mode 100644 index 00000000000..b96b3cdefdd --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parsers-1.14.jar.sha1 @@ -0,0 +1 @@ +d26c10a9e7d116366562aa260013a30a55ff4e8f \ No newline at end of file From 914664d89abc5036435a570cb125af66c0d6c6fc Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 17 Nov 2016 08:22:15 -0800 Subject: [PATCH 02/50] Fix leftover reference to ScriptModule in native script docs --- docs/reference/modules/scripting/native.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/modules/scripting/native.asciidoc b/docs/reference/modules/scripting/native.asciidoc index 37a2eac18cc..ed81f44889b 100644 --- a/docs/reference/modules/scripting/native.asciidoc +++ b/docs/reference/modules/scripting/native.asciidoc @@ -13,8 +13,8 @@ to construct the script. The actual script will extend either `AbstractExecutableScript` or `AbstractSearchScript`. The second one is likely the most useful and has several helpful subclasses you can extend like `AbstractLongSearchScript` and `AbstractDoubleSearchScript`. -Finally, your plugin should register the native -script by declaring the `onModule(ScriptModule)` method. +Finally, your plugin should register the native script by implementing the +`ScriptPlugin` interface. If you squashed the whole thing into one class it'd look like: From 09fbb4d06d5576d8582715d622368e47b530ae46 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 17 Nov 2016 18:45:34 +0100 Subject: [PATCH 03/50] Fix match_phrase_prefix on boosted fields (#21623) This change fixes the match_phrase_prefix on fields that define a boost in their mapping. Fixes #21613 --- .../index/search/MatchQuery.java | 32 ++++++++++------- .../index/query/MatchQueryBuilderTests.java | 36 +++++++++++++++++++ 2 files changed, 56 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java index cf8be75306f..46eb6b7d399 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -25,6 +25,7 @@ import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiTermQuery; @@ -303,31 +304,38 @@ public class MatchQuery { public Query createPhrasePrefixQuery(String field, String queryText, int phraseSlop, int maxExpansions) { final Query query = createFieldQuery(getAnalyzer(), Occur.MUST, field, queryText, true, phraseSlop); + float boost = 1; + Query innerQuery = query; + while (innerQuery instanceof BoostQuery) { + BoostQuery bq = (BoostQuery) innerQuery; + boost *= bq.getBoost(); + innerQuery = bq.getQuery(); + } final MultiPhrasePrefixQuery prefixQuery = new MultiPhrasePrefixQuery(); prefixQuery.setMaxExpansions(maxExpansions); prefixQuery.setSlop(phraseSlop); - if (query instanceof PhraseQuery) { - PhraseQuery pq = (PhraseQuery)query; + if (innerQuery instanceof PhraseQuery) { + PhraseQuery pq = (PhraseQuery) innerQuery; Term[] terms = pq.getTerms(); int[] positions = pq.getPositions(); for (int i = 0; i < terms.length; i++) { prefixQuery.add(new Term[] {terms[i]}, positions[i]); } - return prefixQuery; - } else if (query instanceof MultiPhraseQuery) { - MultiPhraseQuery pq = (MultiPhraseQuery)query; + return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); + } else if (innerQuery instanceof MultiPhraseQuery) { + MultiPhraseQuery pq = (MultiPhraseQuery) innerQuery; Term[][] terms = pq.getTermArrays(); int[] positions = pq.getPositions(); for (int i = 0; i < terms.length; i++) { prefixQuery.add(terms[i], positions[i]); } - return prefixQuery; - } else if (query instanceof TermQuery) { - prefixQuery.add(((TermQuery) query).getTerm()); - return prefixQuery; - } else if (query instanceof AllTermQuery) { - prefixQuery.add(((AllTermQuery) query).getTerm()); - return prefixQuery; + return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); + } else if (innerQuery instanceof TermQuery) { + prefixQuery.add(((TermQuery) innerQuery).getTerm()); + return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); + } else if (innerQuery instanceof AllTermQuery) { + prefixQuery.add(((AllTermQuery) innerQuery).getTerm()); + return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); } return query; } diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index a4e202e2304..00d41aa754e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -29,11 +30,15 @@ import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.index.search.MatchQuery.Type; import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery; @@ -458,4 +463,35 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase parseQuery(json2)); } + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + mapperService.merge("t_boost", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("t_boost", + "string_boost", "type=text,boost=4").string()), MapperService.MergeReason.MAPPING_UPDATE, false); + } + + public void testMatchPhrasePrefixWithBoost() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + QueryShardContext context = createShardContext(); + assumeTrue("test runs only when the index version is on or after V_5_0_0_alpha1", + context.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)); + + { + // field boost is applied on a single term query + MatchPhrasePrefixQueryBuilder builder = new MatchPhrasePrefixQueryBuilder("string_boost", "foo"); + Query query = builder.toQuery(context); + assertThat(query, instanceOf(BoostQuery.class)); + assertThat(((BoostQuery) query).getBoost(), equalTo(4f)); + Query innerQuery = ((BoostQuery) query).getQuery(); + assertThat(innerQuery, instanceOf(MultiPhrasePrefixQuery.class)); + } + + { + // field boost is ignored on phrase query + MatchPhrasePrefixQueryBuilder builder = new MatchPhrasePrefixQueryBuilder("string_boost", "foo bar"); + Query query = builder.toQuery(context); + assertThat(query, instanceOf(MultiPhrasePrefixQuery.class)); + } + + } } From 2a1e08f76ac8200b55110864afbb72b4aff43e0d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 17 Nov 2016 12:54:57 -0500 Subject: [PATCH 04/50] Fix compilation in Eclipse (#21606) * Fix compilation in Eclipse I'm not sure what the bug is, but ecj doesn't like this expression unless the type is set explicitly. * Add comment explaining why no diamond operator --- .../action/support/replication/TransportReplicationAction.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 024672922fa..0f0a9ae2c5d 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -1018,7 +1018,8 @@ public abstract class TransportReplicationAction< } transportService.sendRequest(node, transportReplicaAction, new ConcreteShardRequest<>(request, replica.allocationId().getId()), transportOptions, - new ActionListenerResponseHandler<>(listener, ReplicaResponse::new)); + // Eclipse can't handle when this is <> so we specify the type here. + new ActionListenerResponseHandler(listener, ReplicaResponse::new)); } @Override From b8cae39b7cb314a76235d5395045f71af55d64b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 1 Nov 2016 14:22:14 +0100 Subject: [PATCH 05/50] Using ObjectParser in MatchAllQueryBuilder and IdsQueryBuilder A first step moving away from the current parsing to use the generalized Objectparser and ConstructingObjectParser. This PR start by making use of it in MatchAllQueryBuilder and IdsQueryBuilder. --- .../index/query/AbstractQueryBuilder.java | 10 +++ .../index/query/IdsQueryBuilder.java | 90 ++++--------------- .../index/query/MatchAllQueryBuilder.java | 43 +++------ .../index/query/IdsQueryBuilderTests.java | 66 +++++++++----- 4 files changed, 83 insertions(+), 126 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index 3ed3b80fb84..47beb64cb11 100644 --- a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -26,10 +26,12 @@ import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcherSupplier; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.xcontent.AbstractObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentType; @@ -300,4 +302,12 @@ public abstract class AbstractQueryBuilder> + processedFieldName + "] and [" + currentFieldName + "]"); } } + + /** + * Adds 'boost' and 'query_name' parsing to all query builder parsers passed in + */ + protected static void declareStandardFields(AbstractObjectParser parser) { + parser.declareFloat((builder, value) -> builder.boost(value), AbstractQueryBuilder.BOOST_FIELD); + parser.declareString((builder, value) -> builder.queryName(value), AbstractQueryBuilder.NAME_FIELD); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java index c8f9f55f96e..0b41a7277af 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java @@ -27,13 +27,12 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.UidFieldMapper; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -43,6 +42,8 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + /** * A query that will return only documents matching specific ids (and a type). */ @@ -56,13 +57,6 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { private final String[] types; - /** - * Creates a new IdsQueryBuilder without providing the types of the documents to look for - */ - public IdsQueryBuilder() { - this.types = new String[0]; - } - /** * Creates a new IdsQueryBuilder by providing the types of the documents to look for */ @@ -126,71 +120,23 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { builder.endObject(); } - public static Optional fromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - List ids = new ArrayList<>(); - List types = new ArrayList<>(); - float boost = AbstractQueryBuilder.DEFAULT_BOOST; - String queryName = null; + @SuppressWarnings("unchecked") + private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + a -> new IdsQueryBuilder(((List) a[0]).toArray(new String[0]))); - String currentFieldName = null; - XContentParser.Token token; - boolean idsProvided = false; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token == XContentParser.Token.START_ARRAY) { - if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUES_FIELD)) { - idsProvided = true; - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if ((token == XContentParser.Token.VALUE_STRING) || - (token == XContentParser.Token.VALUE_NUMBER)) { - String id = parser.textOrNull(); - if (id == null) { - throw new ParsingException(parser.getTokenLocation(), "No value specified for term filter"); - } - ids.add(id); - } else { - throw new ParsingException(parser.getTokenLocation(), - "Illegal value for id, expecting a string or number, got: " + token); - } - } - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - String value = parser.textOrNull(); - if (value == null) { - throw new ParsingException(parser.getTokenLocation(), "No type specified for term filter"); - } - types.add(value); - } - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + - "] query does not support [" + currentFieldName + "]"); - } - } else if (token.isValue()) { - if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) { - types = Collections.singletonList(parser.text()); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { - boost = parser.floatValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { - queryName = parser.text(); - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + - "] query does not support [" + currentFieldName + "]"); - } - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + - "] unknown token [" + token + "] after [" + currentFieldName + "]"); - } - } - if (!idsProvided) { - throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] query, no ids values provided"); - } + static { + PARSER.declareStringArray(constructorArg(), IdsQueryBuilder.TYPE_FIELD); + PARSER.declareStringArray((builder, values) -> builder.addIds(values.toArray(new String[values.size()])), + IdsQueryBuilder.VALUES_FIELD); + declareStandardFields(PARSER); + } - IdsQueryBuilder query = new IdsQueryBuilder(types.toArray(new String[types.size()])); - query.addIds(ids.toArray(new String[ids.size()])); - query.boost(boost).queryName(queryName); - return Optional.of(query); + public static Optional fromXContent(QueryParseContext context) { + try { + return Optional.of(PARSER.apply(context.parser(), context)); + } catch (IllegalArgumentException e) { + throw new ParsingException(context.parser().getTokenLocation(), e.getMessage(), e); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java index ba704809a4f..0796fb662a6 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java @@ -20,13 +20,12 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Optional; @@ -48,7 +47,7 @@ public class MatchAllQueryBuilder extends AbstractQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); + private static ObjectParser PARSER = new ObjectParser<>(NAME, MatchAllQueryBuilder::new); - String currentFieldName = null; - XContentParser.Token token; - String queryName = null; - float boost = AbstractQueryBuilder.DEFAULT_BOOST; - while (((token = parser.nextToken()) != XContentParser.Token.END_OBJECT && token != XContentParser.Token.END_ARRAY)) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { - queryName = parser.text(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { - boost = parser.floatValue(); - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + MatchAllQueryBuilder.NAME + - "] query does not support [" + currentFieldName + "]"); - } - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + MatchAllQueryBuilder.NAME + - "] unknown token [" + token + "] after [" + currentFieldName + "]"); - } + static { + declareStandardFields(PARSER); + } + + public static Optional fromXContent(QueryParseContext context) { + try { + return Optional.of(PARSER.apply(context.parser(), context)); + } catch (IllegalArgumentException e) { + throw new ParsingException(context.parser().getTokenLocation(), e.getMessage(), e); } - MatchAllQueryBuilder queryBuilder = new MatchAllQueryBuilder(); - queryBuilder.boost(boost); - queryBuilder.queryName(queryName); - return Optional.of(queryBuilder); } @Override - protected Query doToQuery(QueryShardContext context) throws IOException { + protected Query doToQuery(QueryShardContext context) { return Queries.newMatchAllQuery(); } diff --git a/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java index 2ad557dcc76..ee22db7fb07 100644 --- a/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java @@ -32,17 +32,9 @@ import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.contains; public class IdsQueryBuilderTests extends AbstractQueryTestCase { - /** - * Check that parser throws exception on missing values field. - */ - public void testIdsNotProvided() throws IOException { - String noIdsFieldQuery = "{\"ids\" : { \"type\" : \"my_type\" }"; - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(noIdsFieldQuery)); - assertThat(e.getMessage(), containsString("no ids values provided")); - } @Override protected IdsQueryBuilder doCreateTestQueryBuilder() { @@ -102,7 +94,7 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase public void testIdsQueryWithInvalidValues() throws Exception { String query = "{ \"ids\": { \"values\": [[1]] } }"; ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(query)); - assertEquals("Illegal value for id, expecting a string or number, got: START_ARRAY", e.getMessage()); + assertEquals("[ids] failed to parse field [values]", e.getMessage()); } public void testFromJson() throws IOException { @@ -116,44 +108,70 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase "}"; IdsQueryBuilder parsed = (IdsQueryBuilder) parseQuery(json); checkGeneratedJson(json, parsed); - assertEquals(json, 3, parsed.ids().size()); + assertThat(parsed.ids(), contains("1","100","4")); assertEquals(json, "my_type", parsed.types()[0]); + + // check that type that is not an array and also ids that are numbers are parsed + json = + "{\n" + + " \"ids\" : {\n" + + " \"type\" : \"my_type\",\n" + + " \"values\" : [ 1, 100, 4 ],\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; + parsed = (IdsQueryBuilder) parseQuery(json); + assertThat(parsed.ids(), contains("1","100","4")); + assertEquals(json, "my_type", parsed.types()[0]); + + // check with empty type array + json = + "{\n" + + " \"ids\" : {\n" + + " \"type\" : [ ],\n" + + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; + parsed = (IdsQueryBuilder) parseQuery(json); + assertThat(parsed.ids(), contains("1","100","4")); + assertEquals(json, 0, parsed.types().length); } public void testFromJsonDeprecatedSyntax() throws IOException { - IdsQueryBuilder tempQuery = createTestQueryBuilder(); - assumeTrue("test requires at least one type", tempQuery.types() != null && tempQuery.types().length > 0); - - String type = tempQuery.types()[0]; - IdsQueryBuilder testQuery = new IdsQueryBuilder(type); + IdsQueryBuilder testQuery = new IdsQueryBuilder("my_type"); //single value type can also be called _type final String contentString = "{\n" + " \"ids\" : {\n" + - " \"_type\" : \"" + type + "\",\n" + - " \"values\" : []\n" + + " \"_type\" : \"my_type\",\n" + + " \"values\" : [ ]\n" + " }\n" + "}"; IdsQueryBuilder parsed = (IdsQueryBuilder) parseQuery(contentString, ParseFieldMatcher.EMPTY); assertEquals(testQuery, parsed); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(contentString)); - assertEquals("Deprecated field [_type] used, expected [type] instead", e.getMessage()); + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(contentString)); checkWarningHeaders("Deprecated field [_type] used, expected [type] instead"); + assertEquals("Deprecated field [_type] used, expected [type] instead", e.getMessage()); + assertEquals(3, e.getLineNumber()); + assertEquals(19, e.getColumnNumber()); //array of types can also be called type rather than types final String contentString2 = "{\n" + " \"ids\" : {\n" + - " \"types\" : [\"" + type + "\"],\n" + - " \"values\" : []\n" + + " \"types\" : [\"my_type\"],\n" + + " \"values\" : [ ]\n" + " }\n" + "}"; parsed = (IdsQueryBuilder) parseQuery(contentString, ParseFieldMatcher.EMPTY); assertEquals(testQuery, parsed); - e = expectThrows(IllegalArgumentException.class, () -> parseQuery(contentString2)); - assertEquals("Deprecated field [types] used, expected [type] instead", e.getMessage()); + e = expectThrows(ParsingException.class, () -> parseQuery(contentString2)); checkWarningHeaders("Deprecated field [_type] used, expected [type] instead"); + assertEquals("Deprecated field [types] used, expected [type] instead", e.getMessage()); + assertEquals(3, e.getLineNumber()); + assertEquals(19, e.getColumnNumber()); } } From 4a7b70cc082953dc7cd09b715916e3d1f8813f74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 16 Nov 2016 11:08:47 +0100 Subject: [PATCH 06/50] Don't require `types` parameter in IdsQueryBuilder constructor According to the docs and our own tests we accept an ids query without specified types and default to all types in the index mapping in this case. This changes the builder to reflect this by making the types no longer a required constructor argument and changes the parser to reflect that. --- .../index/query/AbstractQueryBuilder.java | 5 ++- .../index/query/IdsQueryBuilder.java | 45 ++++++++++++------- .../index/query/QueryBuilders.java | 4 +- .../index/query/IdsQueryBuilderTests.java | 26 ++++++++--- 4 files changed, 54 insertions(+), 26 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index 47beb64cb11..e48373a905c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -304,7 +304,10 @@ public abstract class AbstractQueryBuilder> } /** - * Adds 'boost' and 'query_name' parsing to all query builder parsers passed in + * Adds {@code boost} and {@code query_name} parsing to the + * {@link AbstractObjectParser} passed in. All query builders except + * {@link MatchAllQueryBuilder} and {@link MatchNoneQueryBuilder} support these fields so they + * should use this method. */ protected static void declareStandardFields(AbstractObjectParser parser) { parser.declareFloat((builder, value) -> builder.boost(value), AbstractQueryBuilder.BOOST_FIELD); diff --git a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java index 0b41a7277af..f829c2f45ae 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java @@ -24,10 +24,11 @@ import org.apache.lucene.search.Query; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.UidFieldMapper; @@ -37,12 +38,11 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; -import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ObjectParser.fromList; /** * A query that will return only documents matching specific ids (and a type). @@ -55,16 +55,22 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { private final Set ids = new HashSet<>(); - private final String[] types; + private String[] types = Strings.EMPTY_ARRAY; + + /** + * Creates a new IdsQueryBuilder with no types specified upfront + */ + public IdsQueryBuilder() { + // nothing to do + } /** * Creates a new IdsQueryBuilder by providing the types of the documents to look for + * @deprecated Replaced by {@link #types(String...)} */ + @Deprecated public IdsQueryBuilder(String... types) { - if (types == null) { - throw new IllegalArgumentException("[ids] types cannot be null"); - } - this.types = types; + types(types); } /** @@ -82,6 +88,17 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { out.writeStringArray(ids.toArray(new String[ids.size()])); } + /** + * Add types to query + */ + public IdsQueryBuilder types(String... types) { + if (types == null) { + throw new IllegalArgumentException("[" + NAME + "] types cannot be null"); + } + this.types = types; + return this; + } + /** * Returns the types used in this query */ @@ -94,7 +111,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { */ public IdsQueryBuilder addIds(String... ids) { if (ids == null) { - throw new IllegalArgumentException("[ids] ids cannot be null"); + throw new IllegalArgumentException("[" + NAME + "] ids cannot be null"); } Collections.addAll(this.ids, ids); return this; @@ -120,14 +137,12 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { builder.endObject(); } - @SuppressWarnings("unchecked") - private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - a -> new IdsQueryBuilder(((List) a[0]).toArray(new String[0]))); + private static ObjectParser PARSER = new ObjectParser<>(NAME, + () -> new IdsQueryBuilder()); static { - PARSER.declareStringArray(constructorArg(), IdsQueryBuilder.TYPE_FIELD); - PARSER.declareStringArray((builder, values) -> builder.addIds(values.toArray(new String[values.size()])), - IdsQueryBuilder.VALUES_FIELD); + PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::types), IdsQueryBuilder.TYPE_FIELD); + PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::addIds), IdsQueryBuilder.VALUES_FIELD); declareStandardFields(PARSER); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index d63696ec4fb..2765eaa38ca 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -29,12 +29,10 @@ import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptService; import java.io.IOException; import java.util.Collection; import java.util.List; -import java.util.Map; /** * A static factory for simple "import static" usage. @@ -120,7 +118,7 @@ public abstract class QueryBuilders { * @param types The mapping/doc type */ public static IdsQueryBuilder idsQuery(String... types) { - return new IdsQueryBuilder(types); + return new IdsQueryBuilder().types(types); } /** diff --git a/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java index ee22db7fb07..5913a038661 100644 --- a/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java @@ -63,7 +63,7 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase } IdsQueryBuilder query; if (types.length > 0 || randomBoolean()) { - query = new IdsQueryBuilder(types); + query = new IdsQueryBuilder().types(types); query.addIds(ids); } else { query = new IdsQueryBuilder(); @@ -82,11 +82,11 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase } public void testIllegalArguments() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IdsQueryBuilder((String[]) null)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IdsQueryBuilder().types((String[]) null)); assertEquals("[ids] types cannot be null", e.getMessage()); IdsQueryBuilder idsQueryBuilder = new IdsQueryBuilder(); - e = expectThrows(IllegalArgumentException.class, () -> idsQueryBuilder.addIds((String[])null)); + e = expectThrows(IllegalArgumentException.class, () -> idsQueryBuilder.addIds((String[]) null)); assertEquals("[ids] ids cannot be null", e.getMessage()); } @@ -136,10 +136,22 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase parsed = (IdsQueryBuilder) parseQuery(json); assertThat(parsed.ids(), contains("1","100","4")); assertEquals(json, 0, parsed.types().length); + + // check without type + json = + "{\n" + + " \"ids\" : {\n" + + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; + parsed = (IdsQueryBuilder) parseQuery(json); + assertThat(parsed.ids(), contains("1","100","4")); + assertEquals(json, 0, parsed.types().length); } public void testFromJsonDeprecatedSyntax() throws IOException { - IdsQueryBuilder testQuery = new IdsQueryBuilder("my_type"); + IdsQueryBuilder testQuery = new IdsQueryBuilder().types("my_type"); //single value type can also be called _type final String contentString = "{\n" + @@ -158,18 +170,18 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase assertEquals(3, e.getLineNumber()); assertEquals(19, e.getColumnNumber()); - //array of types can also be called type rather than types + //array of types can also be called types rather than type final String contentString2 = "{\n" + " \"ids\" : {\n" + " \"types\" : [\"my_type\"],\n" + " \"values\" : [ ]\n" + " }\n" + "}"; - parsed = (IdsQueryBuilder) parseQuery(contentString, ParseFieldMatcher.EMPTY); + parsed = (IdsQueryBuilder) parseQuery(contentString2, ParseFieldMatcher.EMPTY); assertEquals(testQuery, parsed); e = expectThrows(ParsingException.class, () -> parseQuery(contentString2)); - checkWarningHeaders("Deprecated field [_type] used, expected [type] instead"); + checkWarningHeaders("Deprecated field [types] used, expected [type] instead"); assertEquals("Deprecated field [types] used, expected [type] instead", e.getMessage()); assertEquals(3, e.getLineNumber()); assertEquals(19, e.getColumnNumber()); From ced433e9a83a6958b34e6216dcb92a1d873ee631 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Thu, 17 Nov 2016 13:39:08 -0800 Subject: [PATCH 07/50] Fix reserved variable availability in lambdas in Painless --- .../org/elasticsearch/painless/Locals.java | 77 ++++++++++------- .../painless/node/SFunction.java | 7 +- .../elasticsearch/painless/node/SSource.java | 2 +- .../elasticsearch/painless/LambdaTests.java | 83 ++++++++++++------- .../test/plan_a/20_scriptfield.yaml | 25 ++++++ 5 files changed, 128 insertions(+), 66 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index b02ea085904..5588c943dd2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -36,7 +36,7 @@ import java.util.Set; * Tracks user defined methods and variables across compilation phases. */ public final class Locals { - + /** Reserved word: params map parameter */ public static final String PARAMS = "params"; /** Reserved word: Lucene scorer parameter */ @@ -53,25 +53,35 @@ public final class Locals { public static final String THIS = "#this"; /** Reserved word: unused */ public static final String DOC = "doc"; - - /** Map of always reserved keywords */ - public static final Set KEYWORDS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( - THIS,PARAMS,SCORER,DOC,VALUE,SCORE,CTX,LOOP + + /** Map of always reserved keywords for the main scope */ + public static final Set MAIN_KEYWORDS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + THIS,PARAMS,SCORER,DOC,VALUE,SCORE,CTX,LOOP ))); - + + /** Map of always reserved keywords for a function scope */ + public static final Set FUNCTION_KEYWORDS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + THIS,LOOP + ))); + + /** Map of always reserved keywords for a lambda scope */ + public static final Set LAMBDA_KEYWORDS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + THIS,LOOP + ))); + /** Creates a new local variable scope (e.g. loop) inside the current scope */ public static Locals newLocalScope(Locals currentScope) { return new Locals(currentScope); } - - /** + + /** * Creates a new lambda scope inside the current scope *

* This is just like {@link #newFunctionScope}, except the captured parameters are made read-only. */ - public static Locals newLambdaScope(Locals programScope, Type returnType, List parameters, + public static Locals newLambdaScope(Locals programScope, Type returnType, List parameters, int captureCount, int maxLoopCounter) { - Locals locals = new Locals(programScope, returnType); + Locals locals = new Locals(programScope, returnType, LAMBDA_KEYWORDS); for (int i = 0; i < parameters.size(); i++) { Parameter parameter = parameters.get(i); // TODO: allow non-captures to be r/w: @@ -87,10 +97,10 @@ public final class Locals { } return locals; } - + /** Creates a new function scope inside the current scope */ public static Locals newFunctionScope(Locals programScope, Type returnType, List parameters, int maxLoopCounter) { - Locals locals = new Locals(programScope, returnType); + Locals locals = new Locals(programScope, returnType, FUNCTION_KEYWORDS); for (Parameter parameter : parameters) { locals.addVariable(parameter.location, parameter.type, parameter.name, false); } @@ -100,10 +110,10 @@ public final class Locals { } return locals; } - + /** Creates a new main method scope */ public static Locals newMainMethodScope(Locals programScope, boolean usesScore, boolean usesCtx, int maxLoopCounter) { - Locals locals = new Locals(programScope, Definition.OBJECT_TYPE); + Locals locals = new Locals(programScope, Definition.OBJECT_TYPE, MAIN_KEYWORDS); // This reference. Internal use only. locals.defineVariable(null, Definition.getType("Object"), THIS, true); @@ -137,16 +147,16 @@ public final class Locals { } return locals; } - + /** Creates a new program scope: the list of methods. It is the parent for all methods */ public static Locals newProgramScope(Collection methods) { - Locals locals = new Locals(null, null); + Locals locals = new Locals(null, null, null); for (Method method : methods) { locals.addMethod(method); } return locals; } - + /** Checks if a variable exists or not, in this scope or any parents. */ public boolean hasVariable(String name) { Variable variable = lookupVariable(null, name); @@ -158,7 +168,7 @@ public final class Locals { } return false; } - + /** Accesses a variable. This will throw IAE if the variable does not exist */ public Variable getVariable(Location location, String name) { Variable variable = lookupVariable(location, name); @@ -170,7 +180,7 @@ public final class Locals { } throw location.createError(new IllegalArgumentException("Variable [" + name + "] is not defined.")); } - + /** Looks up a method. Returns null if the method does not exist. */ public Method getMethod(MethodKey key) { Method method = lookupMethod(key); @@ -182,23 +192,23 @@ public final class Locals { } return null; } - + /** Creates a new variable. Throws IAE if the variable has already been defined (even in a parent) or reserved. */ public Variable addVariable(Location location, Type type, String name, boolean readonly) { if (hasVariable(name)) { throw location.createError(new IllegalArgumentException("Variable [" + name + "] is already defined.")); } - if (KEYWORDS.contains(name)) { + if (keywords.contains(name)) { throw location.createError(new IllegalArgumentException("Variable [" + name + "] is reserved.")); } return defineVariable(location, type, name, readonly); } - + /** Return type of this scope (e.g. int, if inside a function that returns int) */ public Type getReturnType() { return returnType; } - + /** Returns the top-level program scope. */ public Locals getProgramScope() { Locals locals = this; @@ -207,13 +217,15 @@ public final class Locals { } return locals; } - + ///// private impl // parent scope private final Locals parent; // return type of this scope private final Type returnType; + // keywords for this scope + private final Set keywords; // next slot number to assign private int nextSlotNumber; // variable name -> variable @@ -225,15 +237,16 @@ public final class Locals { * Create a new Locals */ private Locals(Locals parent) { - this(parent, parent.getReturnType()); + this(parent, parent.returnType, parent.keywords); } - + /** * Create a new Locals with specified return type */ - private Locals(Locals parent, Type returnType) { + private Locals(Locals parent, Type returnType, Set keywords) { this.parent = parent; this.returnType = returnType; + this.keywords = keywords; if (parent == null) { this.nextSlotNumber = 0; } else { @@ -262,7 +275,7 @@ public final class Locals { return methods.get(key); } - + /** Defines a variable at this scope internally. */ private Variable defineVariable(Location location, Type type, String name, boolean readonly) { if (variables == null) { @@ -273,7 +286,7 @@ public final class Locals { nextSlotNumber += type.type.getSize(); return variable; } - + private void addMethod(Method method) { if (methods == null) { methods = new HashMap<>(); @@ -293,7 +306,7 @@ public final class Locals { public final Type type; public final boolean readonly; private final int slot; - + public Variable(Location location, String name, Type type, int slot, boolean readonly) { this.location = location; this.name = name; @@ -301,12 +314,12 @@ public final class Locals { this.slot = slot; this.readonly = readonly; } - + public int getSlot() { return slot; } } - + public static final class Parameter { public final Location location; public final String name; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 44afe828ef2..22c7c6d96b0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -53,9 +53,6 @@ import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; */ public final class SFunction extends AStatement { public static final class FunctionReserved implements Reserved { - public static final String THIS = "#this"; - public static final String LOOP = "#loop"; - private int maxLoopCounter = 0; public void markReserved(String name) { @@ -63,7 +60,7 @@ public final class SFunction extends AStatement { } public boolean isReserved(String name) { - return name.equals(THIS) || name.equals(LOOP); + return Locals.FUNCTION_KEYWORDS.contains(name); } @Override @@ -173,7 +170,7 @@ public final class SFunction extends AStatement { } if (reserved.getMaxLoopCounter() > 0) { - loop = locals.getVariable(null, FunctionReserved.LOOP); + loop = locals.getVariable(null, Locals.LOOP); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index a4cf1cc8eee..9d4a74d3cb3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -89,7 +89,7 @@ public final class SSource extends AStatement { @Override public boolean isReserved(String name) { - return Locals.KEYWORDS.contains(name); + return Locals.MAIN_KEYWORDS.contains(name); } public boolean usesScore() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java index 6bb800eb92c..4958942b2a2 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java @@ -19,61 +19,64 @@ package org.elasticsearch.painless; +import java.util.HashMap; +import java.util.Map; + public class LambdaTests extends ScriptTestCase { public void testNoArgLambda() { assertEquals(1, exec("Optional.empty().orElseGet(() -> 1);")); } - + public void testNoArgLambdaDef() { assertEquals(1, exec("def x = Optional.empty(); x.orElseGet(() -> 1);")); } - + public void testLambdaWithArgs() { - assertEquals("short", exec("List l = new ArrayList(); l.add('looooong'); l.add('short'); " + assertEquals("short", exec("List l = new ArrayList(); l.add('looooong'); l.add('short'); " + "l.sort((a, b) -> a.length() - b.length()); return l.get(0)")); } - + public void testLambdaWithTypedArgs() { - assertEquals("short", exec("List l = new ArrayList(); l.add('looooong'); l.add('short'); " + assertEquals("short", exec("List l = new ArrayList(); l.add('looooong'); l.add('short'); " + "l.sort((String a, String b) -> a.length() - b.length()); return l.get(0)")); } - + public void testPrimitiveLambdas() { assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(x -> x + 1).sum();")); } - + public void testPrimitiveLambdasWithTypedArgs() { assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(int x -> x + 1).sum();")); } - + public void testPrimitiveLambdasDef() { assertEquals(4, exec("def l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(x -> x + 1).sum();")); } - + public void testPrimitiveLambdasWithTypedArgsDef() { assertEquals(4, exec("def l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(int x -> x + 1).sum();")); } - + public void testPrimitiveLambdasConvertible() { assertEquals(2, exec("List l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(byte x -> x).sum();")); } - + public void testPrimitiveArgs() { assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(x -> x + 1)")); } - + public void testPrimitiveArgsTyped() { assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(int x -> x + 1)")); } - + public void testPrimitiveArgsTypedOddly() { assertEquals(2L, exec("long applyOne(IntFunction arg) { arg.apply(1) } applyOne(long x -> x + 1)")); } @@ -85,7 +88,7 @@ public class LambdaTests extends ScriptTestCase { public void testUnneededCurlyStatements() { assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(x -> { x + 1 })")); } - + /** interface ignores return value */ public void testVoidReturn() { assertEquals(2, exec("List list = new ArrayList(); " @@ -94,7 +97,7 @@ public class LambdaTests extends ScriptTestCase { + "list.forEach(x -> list2.add(x));" + "return list[0]")); } - + /** interface ignores return value */ public void testVoidReturnDef() { assertEquals(2, exec("def list = new ArrayList(); " @@ -121,15 +124,15 @@ public class LambdaTests extends ScriptTestCase { "}" + "return sum;")); } - + public void testCapture() { assertEquals(5, exec("int x = 5; return Optional.empty().orElseGet(() -> x);")); } - + public void testTwoCaptures() { assertEquals("1test", exec("int x = 1; String y = 'test'; return Optional.empty().orElseGet(() -> x + y);")); } - + public void testCapturesAreReadOnly() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = new ArrayList(); l.add(1); l.add(1); " @@ -137,13 +140,13 @@ public class LambdaTests extends ScriptTestCase { }); assertTrue(expected.getMessage().contains("is read-only")); } - + @AwaitsFix(bugUrl = "def type tracking") public void testOnlyCapturesAreReadOnly() { assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(x -> { x += 1; return x }).sum();")); } - + /** Lambda parameters shouldn't be able to mask a variable already in scope */ public void testNoParamMasking() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { @@ -156,31 +159,31 @@ public class LambdaTests extends ScriptTestCase { public void testCaptureDef() { assertEquals(5, exec("int x = 5; def y = Optional.empty(); y.orElseGet(() -> x);")); } - + public void testNestedCapture() { assertEquals(1, exec("boolean x = false; int y = 1;" + "return Optional.empty().orElseGet(() -> x ? 5 : Optional.empty().orElseGet(() -> y));")); } - + public void testNestedCaptureParams() { assertEquals(2, exec("int foo(Function f) { return f.apply(1) }" + "return foo(x -> foo(y -> x + 1))")); } - + public void testWrongArity() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("Optional.empty().orElseGet(x -> x);"); }); assertTrue(expected.getMessage().contains("Incorrect number of parameters")); } - + public void testWrongArityDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def y = Optional.empty(); return y.orElseGet(x -> x);"); }); assertTrue(expected.getMessage(), expected.getMessage().contains("Incorrect number of parameters")); } - + public void testWrongArityNotEnough() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = new ArrayList(); l.add(1); l.add(1); " @@ -188,7 +191,7 @@ public class LambdaTests extends ScriptTestCase { }); assertTrue(expected.getMessage().contains("Incorrect number of parameters")); } - + public void testWrongArityNotEnoughDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def l = new ArrayList(); l.add(1); l.add(1); " @@ -196,12 +199,36 @@ public class LambdaTests extends ScriptTestCase { }); assertTrue(expected.getMessage().contains("Incorrect number of parameters")); } - + public void testLambdaInFunction() { assertEquals(5, exec("def foo() { Optional.empty().orElseGet(() -> 5) } return foo();")); } - + public void testLambdaCaptureFunctionParam() { assertEquals(5, exec("def foo(int x) { Optional.empty().orElseGet(() -> x) } return foo(5);")); } + + public void testReservedCapture() { + String compare = "boolean compare(Supplier s, def v) {s.get() == v}"; + assertEquals(true, exec(compare + "compare(() -> new ArrayList(), new ArrayList())")); + assertEquals(true, exec(compare + "compare(() -> { new ArrayList() }, new ArrayList())")); + + Map params = new HashMap<>(); + params.put("key", "value"); + params.put("number", 2); + + assertEquals(true, exec(compare + "compare(() -> { return params['key'] }, 'value')", params, true)); + assertEquals(false, exec(compare + "compare(() -> { return params['nokey'] }, 'value')", params, true)); + assertEquals(true, exec(compare + "compare(() -> { return params['nokey'] }, null)", params, true)); + assertEquals(true, exec(compare + "compare(() -> { return params['number'] }, 2)", params, true)); + assertEquals(false, exec(compare + "compare(() -> { return params['number'] }, 'value')", params, true)); + assertEquals(false, exec(compare + "compare(() -> { if (params['number'] == 2) { return params['number'] }" + + "else { return params['key'] } }, 'value')", params, true)); + assertEquals(true, exec(compare + "compare(() -> { if (params['number'] == 2) { return params['number'] }" + + "else { return params['key'] } }, 2)", params, true)); + assertEquals(true, exec(compare + "compare(() -> { if (params['number'] == 1) { return params['number'] }" + + "else { return params['key'] } }, 'value')", params, true)); + assertEquals(false, exec(compare + "compare(() -> { if (params['number'] == 1) { return params['number'] }" + + "else { return params['key'] } }, 2)", params, true)); + } } diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml index 902c6950245..01f3048300b 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml @@ -34,7 +34,32 @@ setup: x: "bbb" - match: { hits.hits.0.fields.bar.0: "aaabbb"} +--- +"Scripted Field Doing Compare": + - do: + search: + body: + script_fields: + bar: + script: + inline: "boolean compare(Supplier s, def v) {return s.get() == v;} + compare(() -> { return doc['foo'].value }, params.x);" + params: + x: "aaa" + - match: { hits.hits.0.fields.bar.0: true} + - do: + search: + body: + script_fields: + bar: + script: + inline: "boolean compare(Supplier s, def v) {return s.get() == v;} + compare(() -> { return doc['foo'].value }, params.x);" + params: + x: "bbb" + + - match: { hits.hits.0.fields.bar.0: false} --- "Scripted Field with a null safe dereference (non-null)": - do: From 23217827037264c638fda00a492a3b2e44b384e5 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Fri, 18 Nov 2016 09:51:42 +0000 Subject: [PATCH 08/50] Fix Windows test failure (#21647) --- .../bootstrap/SpawnerNoBootstrapTests.java | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 5e08696ebf3..d1556d02758 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -51,6 +51,30 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { + "\n" + "read SOMETHING\n"; + /** + * Simplest case: a plugin with no controller daemon. + */ + public void testNoControllerSpawn() throws IOException, InterruptedException { + Path esHome = createTempDir().resolve("esHome"); + Settings.Builder settingsBuilder = Settings.builder(); + settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.toString()); + Settings settings = settingsBuilder.build(); + + Environment environment = new Environment(settings); + + // This plugin will NOT have a controller daemon + Path plugin = environment.pluginsFile().resolve("a_plugin"); + Files.createDirectories(plugin); + + try (Spawner spawner = new Spawner()) { + spawner.spawnNativePluginControllers(environment); + assertTrue(spawner.getProcesses().isEmpty()); + } + } + + /** + * Two plugins - one with a controller daemon and one without. + */ public void testControllerSpawn() throws IOException, InterruptedException { // On Windows you cannot directly run a batch file - you have to run cmd.exe with the batch file // as an argument and that's out of the remit of the controller daemon process spawner. If From b9bee8bca34f75be4acd075818971a3a0cf685f6 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 18 Nov 2016 11:54:22 +0100 Subject: [PATCH 09/50] Remove transport-netty3-client mention (#21650) Now netty3 is gone, this mention must also be removed. --- build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/build.gradle b/build.gradle index e1ab4576639..2bda78cd1c4 100644 --- a/build.gradle +++ b/build.gradle @@ -112,7 +112,6 @@ subprojects { "org.elasticsearch.distribution.deb:elasticsearch:${version}": ':distribution:deb', "org.elasticsearch.test:logger-usage:${version}": ':test:logger-usage', // for transport client - "org.elasticsearch.plugin:transport-netty3-client:${version}": ':modules:transport-netty3', "org.elasticsearch.plugin:transport-netty4-client:${version}": ':modules:transport-netty4', "org.elasticsearch.plugin:reindex-client:${version}": ':modules:reindex', "org.elasticsearch.plugin:lang-mustache-client:${version}": ':modules:lang-mustache', From 277f4b8d248cc523a6ce0d9ba6a834ec295056f6 Mon Sep 17 00:00:00 2001 From: Luca Wintergerst Date: Fri, 18 Nov 2016 12:05:49 +0100 Subject: [PATCH 10/50] fix two errors in suggester docs The first changed referred to an example of the 2.4 documentation. I removed the no longer relevant parts. We should consider adding a little more here. The second change was just then->than in the suggest_mode popular section --- docs/reference/search/suggesters/phrase-suggest.asciidoc | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/docs/reference/search/suggesters/phrase-suggest.asciidoc b/docs/reference/search/suggesters/phrase-suggest.asciidoc index 6c502421e4e..3fce27c6d95 100644 --- a/docs/reference/search/suggesters/phrase-suggest.asciidoc +++ b/docs/reference/search/suggesters/phrase-suggest.asciidoc @@ -106,12 +106,7 @@ POST _suggest -------------------------------------------------- // CONSOLE -The response contains suggestions scored by the most likely spell -correction first. In this case we received the expected correction -`xorr the god jewel` first while the second correction is less -conservative where only one of the errors is corrected. Note, the -request is executed with `max_errors` set to `0.5` so 50% of the terms -can contain misspellings (See parameter descriptions below). +The response contains suggestions scored by the most likely spell correction first. In this case we received the expected correction "nobel prize". [source,js] -------------------------------------------------- @@ -320,7 +315,7 @@ The direct generators support the following parameters: filtered out using `confidence`. Three possible values can be specified: ** `missing`: Only generate suggestions for terms that are not in the shard. This is the default. - ** `popular`: Only suggest terms that occur in more docs on the shard then + ** `popular`: Only suggest terms that occur in more docs on the shard than the original term. ** `always`: Suggest any matching suggestions based on terms in the suggest text. From b1fd257c42b69f536b8fc1adf888170d5d4a934c Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Fri, 18 Nov 2016 12:30:19 +0100 Subject: [PATCH 11/50] [TEST] Fix testTimedOutUpdateTaskCleanedUp to wait for blocking task to be completed The "test" task can complete its execution with a timeout exception before the "block-task" actually starts executing. The test thus has to wait for both to be completed before checking that the updateTasksPerExecutor map has been properly cleaned up. --- .../org/elasticsearch/cluster/service/ClusterServiceTests.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java index 026d3a16185..a5d4790f742 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java @@ -150,6 +150,7 @@ public class ClusterServiceTests extends ESTestCase { public void testTimedOutUpdateTaskCleanedUp() throws Exception { final CountDownLatch block = new CountDownLatch(1); + final CountDownLatch blockCompleted = new CountDownLatch(1); clusterService.submitStateUpdateTask("block-task", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { @@ -158,6 +159,7 @@ public class ClusterServiceTests extends ESTestCase { } catch (InterruptedException e) { throw new RuntimeException(e); } + blockCompleted.countDown(); return currentState; } @@ -187,6 +189,7 @@ public class ClusterServiceTests extends ESTestCase { }); block.countDown(); block2.await(); + blockCompleted.await(); synchronized (clusterService.updateTasksPerExecutor) { assertTrue("expected empty map but was " + clusterService.updateTasksPerExecutor, clusterService.updateTasksPerExecutor.isEmpty()); From 5ca60fa78b35b54af17804f591d4255984f5127a Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 18 Nov 2016 06:56:23 -0500 Subject: [PATCH 12/50] Remove stale mention of minimum master nodes check This commit removes a stale mention of the minimum master nodes boostrap check from the docs. Relates #21653 --- docs/reference/setup/important-settings.asciidoc | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/docs/reference/setup/important-settings.asciidoc b/docs/reference/setup/important-settings.asciidoc index 63f988325a7..bdf7c314476 100644 --- a/docs/reference/setup/important-settings.asciidoc +++ b/docs/reference/setup/important-settings.asciidoc @@ -158,16 +158,15 @@ discovery.zen.ping.unicast.hosts: === `discovery.zen.minimum_master_nodes` To prevent data loss, it is vital to configure the -`discovery.zen.minimum_master_nodes setting` so that each master-eligible node +`discovery.zen.minimum_master_nodes` setting so that each master-eligible node knows the _minimum number of master-eligible nodes_ that must be visible in order to form a cluster. Without this setting, a cluster that suffers a network failure is at risk of having the cluster split into two independent clusters -- a split brain -- -which will lead to data loss. A more detailed explanation is provided +which will lead to data loss. A more detailed explanation is provided in <>. - To avoid a split brain, this setting should be set to a _quorum_ of master- eligible nodes: @@ -181,6 +180,3 @@ nodes should be set to `(3 / 2) + 1` or `2`: discovery.zen.minimum_master_nodes: 2 -------------------------------------------------- -IMPORTANT: If `discovery.zen.minimum_master_nodes` is not set when -Elasticsearch is running in <>, an exception will -be thrown which will prevent the node from starting. From 1d2a1540ccf0667e137cd1b1e201818a52f280d9 Mon Sep 17 00:00:00 2001 From: Ali Beyad Date: Fri, 18 Nov 2016 10:19:27 -0500 Subject: [PATCH 13/50] Makes allocator decision classes top-level classes (#21662) This commit moves several allocation decider related inner classes into their own top-level class, in order to use more easily in the allocation explain API. This commit also renames some of those decision related classes to more suitable names. This is simply a cosmetic change - no functionality changes with this commit whatsoever. To summarize the changes: 1. ShardAllocationDecision renamed to AllocateUnassignedDecision 2. RelocationDecision moved to a top-level class 3. MoveDecision moved to a top-level class 4. RebalanceDecision moved to a top-level class 5. ShardAllocationDecisionTests renamed to AllocateUnassignedDecisionTests 6. NodeRebalanceResult moved to a top-level class 7. ShardAllocationDecision#WeightedDecision moved to a top-level class and renamed to NodeAllocationResult. --- ...n.java => AllocateUnassignedDecision.java} | 133 +++----- .../routing/allocation/MoveDecision.java | 127 +++++++ .../allocation/NodeAllocationResult.java | 77 +++++ .../allocation/NodeRebalanceResult.java | 88 +++++ .../routing/allocation/RebalanceDecision.java | 95 ++++++ .../allocation/RelocationDecision.java | 74 ++++ .../allocator/BalancedShardsAllocator.java | 316 +----------------- .../gateway/BaseGatewayShardAllocator.java | 22 +- .../gateway/PrimaryShardAllocator.java | 28 +- .../gateway/ReplicaShardAllocator.java | 24 +- ...a => AllocateUnassignedDecisionTests.java} | 73 ++-- .../allocation/BalancedSingleShardTests.java | 6 +- .../routing/allocation/MoveDecisionTests.java | 8 +- 13 files changed, 597 insertions(+), 474 deletions(-) rename core/src/main/java/org/elasticsearch/cluster/routing/allocation/{ShardAllocationDecision.java => AllocateUnassignedDecision.java} (61%) create mode 100644 core/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java create mode 100644 core/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResult.java create mode 100644 core/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeRebalanceResult.java create mode 100644 core/src/main/java/org/elasticsearch/cluster/routing/allocation/RebalanceDecision.java create mode 100644 core/src/main/java/org/elasticsearch/cluster/routing/allocation/RelocationDecision.java rename core/src/test/java/org/elasticsearch/cluster/routing/allocation/{ShardAllocationDecisionTests.java => AllocateUnassignedDecisionTests.java} (59%) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardAllocationDecision.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java similarity index 61% rename from core/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardAllocationDecision.java rename to core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java index 74fd7668a01..5a82d0dfa90 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardAllocationDecision.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java @@ -30,29 +30,29 @@ import java.util.Map; import java.util.Objects; /** - * Represents the allocation decision by an allocator for a shard. + * Represents the allocation decision by an allocator for an unassigned shard. */ -public class ShardAllocationDecision { +public class AllocateUnassignedDecision { /** a constant representing a shard decision where no decision was taken */ - public static final ShardAllocationDecision DECISION_NOT_TAKEN = - new ShardAllocationDecision(null, null, null, null, null, null, null); + public static final AllocateUnassignedDecision NOT_TAKEN = + new AllocateUnassignedDecision(null, null, null, null, null, null, null); /** * a map of cached common no/throttle decisions that don't need explanations, * this helps prevent unnecessary object allocations for the non-explain API case */ - private static final Map CACHED_DECISIONS; + private static final Map CACHED_DECISIONS; static { - Map cachedDecisions = new HashMap<>(); + Map cachedDecisions = new HashMap<>(); cachedDecisions.put(AllocationStatus.FETCHING_SHARD_DATA, - new ShardAllocationDecision(Type.NO, AllocationStatus.FETCHING_SHARD_DATA, null, null, null, null, null)); + new AllocateUnassignedDecision(Type.NO, AllocationStatus.FETCHING_SHARD_DATA, null, null, null, null, null)); cachedDecisions.put(AllocationStatus.NO_VALID_SHARD_COPY, - new ShardAllocationDecision(Type.NO, AllocationStatus.NO_VALID_SHARD_COPY, null, null, null, null, null)); + new AllocateUnassignedDecision(Type.NO, AllocationStatus.NO_VALID_SHARD_COPY, null, null, null, null, null)); cachedDecisions.put(AllocationStatus.DECIDERS_NO, - new ShardAllocationDecision(Type.NO, AllocationStatus.DECIDERS_NO, null, null, null, null, null)); + new AllocateUnassignedDecision(Type.NO, AllocationStatus.DECIDERS_NO, null, null, null, null, null)); cachedDecisions.put(AllocationStatus.DECIDERS_THROTTLED, - new ShardAllocationDecision(Type.THROTTLE, AllocationStatus.DECIDERS_THROTTLED, null, null, null, null, null)); + new AllocateUnassignedDecision(Type.THROTTLE, AllocationStatus.DECIDERS_THROTTLED, null, null, null, null, null)); cachedDecisions.put(AllocationStatus.DELAYED_ALLOCATION, - new ShardAllocationDecision(Type.NO, AllocationStatus.DELAYED_ALLOCATION, null, null, null, null, null)); + new AllocateUnassignedDecision(Type.NO, AllocationStatus.DELAYED_ALLOCATION, null, null, null, null, null)); CACHED_DECISIONS = Collections.unmodifiableMap(cachedDecisions); } @@ -67,17 +67,17 @@ public class ShardAllocationDecision { @Nullable private final String allocationId; @Nullable - private final Map nodeDecisions; + private final Map nodeDecisions; @Nullable private final Decision shardDecision; - private ShardAllocationDecision(Type finalDecision, - AllocationStatus allocationStatus, - String finalExplanation, - String assignedNodeId, - String allocationId, - Map nodeDecisions, - Decision shardDecision) { + private AllocateUnassignedDecision(Type finalDecision, + AllocationStatus allocationStatus, + String finalExplanation, + String assignedNodeId, + String allocationId, + Map nodeDecisions, + Decision shardDecision) { assert assignedNodeId != null || finalDecision == null || finalDecision != Type.YES : "a yes decision must have a node to assign the shard to"; assert allocationStatus != null || finalDecision == null || finalDecision == Type.YES : @@ -96,9 +96,9 @@ public class ShardAllocationDecision { /** * Returns a NO decision with the given shard-level decision and explanation (if in explain mode). */ - public static ShardAllocationDecision no(Decision shardDecision, @Nullable String explanation) { + public static AllocateUnassignedDecision no(Decision shardDecision, @Nullable String explanation) { if (explanation != null) { - return new ShardAllocationDecision(Type.NO, AllocationStatus.DECIDERS_NO, explanation, null, null, null, shardDecision); + return new AllocateUnassignedDecision(Type.NO, AllocationStatus.DECIDERS_NO, explanation, null, null, null, shardDecision); } else { return getCachedDecision(AllocationStatus.DECIDERS_NO); } @@ -107,7 +107,7 @@ public class ShardAllocationDecision { /** * Returns a NO decision with the given {@link AllocationStatus} and explanation for the NO decision, if in explain mode. */ - public static ShardAllocationDecision no(AllocationStatus allocationStatus, @Nullable String explanation) { + public static AllocateUnassignedDecision no(AllocationStatus allocationStatus, @Nullable String explanation) { return no(allocationStatus, explanation, null); } @@ -115,11 +115,11 @@ public class ShardAllocationDecision { * Returns a NO decision with the given {@link AllocationStatus}, and the explanation for the NO decision * as well as the individual node-level decisions that comprised the final NO decision if in explain mode. */ - public static ShardAllocationDecision no(AllocationStatus allocationStatus, @Nullable String explanation, - @Nullable Map nodeDecisions) { + public static AllocateUnassignedDecision no(AllocationStatus allocationStatus, @Nullable String explanation, + @Nullable Map nodeDecisions) { Objects.requireNonNull(allocationStatus, "allocationStatus must not be null"); if (explanation != null) { - return new ShardAllocationDecision(Type.NO, allocationStatus, explanation, null, null, asExplanations(nodeDecisions), null); + return new AllocateUnassignedDecision(Type.NO, allocationStatus, explanation, null, null, asExplanations(nodeDecisions), null); } else { return getCachedDecision(allocationStatus); } @@ -129,9 +129,9 @@ public class ShardAllocationDecision { * Returns a THROTTLE decision, with the given explanation and individual node-level decisions that * comprised the final THROTTLE decision if in explain mode. */ - public static ShardAllocationDecision throttle(@Nullable String explanation, @Nullable Map nodeDecisions) { + public static AllocateUnassignedDecision throttle(@Nullable String explanation, @Nullable Map nodeDecisions) { if (explanation != null) { - return new ShardAllocationDecision(Type.THROTTLE, AllocationStatus.DECIDERS_THROTTLED, explanation, null, null, + return new AllocateUnassignedDecision(Type.THROTTLE, AllocationStatus.DECIDERS_THROTTLED, explanation, null, null, asExplanations(nodeDecisions), null); } else { return getCachedDecision(AllocationStatus.DECIDERS_THROTTLED); @@ -143,17 +143,18 @@ public class ShardAllocationDecision { * comprised the final YES decision, along with the node id to which the shard is assigned and * the allocation id for the shard, if available. */ - public static ShardAllocationDecision yes(String assignedNodeId, @Nullable String explanation, @Nullable String allocationId, - @Nullable Map nodeDecisions) { + public static AllocateUnassignedDecision yes(String assignedNodeId, @Nullable String explanation, @Nullable String allocationId, + @Nullable Map nodeDecisions) { Objects.requireNonNull(assignedNodeId, "assignedNodeId must not be null"); - return new ShardAllocationDecision(Type.YES, null, explanation, assignedNodeId, allocationId, asExplanations(nodeDecisions), null); + return new AllocateUnassignedDecision(Type.YES, null, explanation, assignedNodeId, allocationId, + asExplanations(nodeDecisions), null); } /** - * Creates a {@link ShardAllocationDecision} from the given {@link Decision} and the assigned node, if any. + * Creates a {@link AllocateUnassignedDecision} from the given {@link Decision} and the assigned node, if any. */ - public static ShardAllocationDecision fromDecision(Decision decision, @Nullable String assignedNodeId, boolean explain, - @Nullable Map nodeDecisions) { + public static AllocateUnassignedDecision fromDecision(Decision decision, @Nullable String assignedNodeId, boolean explain, + @Nullable Map nodeDecisions) { final Type decisionType = decision.type(); AllocationStatus allocationStatus = decisionType != Type.YES ? AllocationStatus.fromDecision(decisionType) : null; String explanation = null; @@ -168,19 +169,19 @@ public class ShardAllocationDecision { explanation = "shard cannot be assigned to any node in the cluster"; } } - return new ShardAllocationDecision(decisionType, allocationStatus, explanation, assignedNodeId, null, nodeDecisions, null); + return new AllocateUnassignedDecision(decisionType, allocationStatus, explanation, assignedNodeId, null, nodeDecisions, null); } - private static ShardAllocationDecision getCachedDecision(AllocationStatus allocationStatus) { - ShardAllocationDecision decision = CACHED_DECISIONS.get(allocationStatus); + private static AllocateUnassignedDecision getCachedDecision(AllocationStatus allocationStatus) { + AllocateUnassignedDecision decision = CACHED_DECISIONS.get(allocationStatus); return Objects.requireNonNull(decision, "precomputed decision not found for " + allocationStatus); } - private static Map asExplanations(Map decisionMap) { + private static Map asExplanations(Map decisionMap) { if (decisionMap != null) { - Map explanationMap = new HashMap<>(); + Map explanationMap = new HashMap<>(); for (Map.Entry entry : decisionMap.entrySet()) { - explanationMap.put(entry.getKey(), new WeightedDecision(entry.getValue(), Float.POSITIVE_INFINITY)); + explanationMap.put(entry.getKey(), new NodeAllocationResult(entry.getValue(), Float.POSITIVE_INFINITY)); } return explanationMap; } @@ -259,7 +260,7 @@ public class ShardAllocationDecision { * as the decision for the given node. */ @Nullable - public Map getNodeDecisions() { + public Map getNodeDecisions() { return nodeDecisions; } @@ -273,56 +274,4 @@ public class ShardAllocationDecision { return shardDecision; } - /** - * This class represents the shard allocation decision for a single node, - * including the {@link Decision} whether to allocate to the node and the - * weight assigned to the node for the shard in question. - */ - public static final class WeightedDecision { - - private final Decision decision; - private final float weight; - - public WeightedDecision(Decision decision) { - this.decision = Objects.requireNonNull(decision); - this.weight = Float.POSITIVE_INFINITY; - } - - public WeightedDecision(Decision decision, float weight) { - this.decision = Objects.requireNonNull(decision); - this.weight = Objects.requireNonNull(weight); - } - - /** - * The decision for allocating to the node. - */ - public Decision getDecision() { - return decision; - } - - /** - * The calculated weight for allocating a shard to the node. A value of {@link Float#POSITIVE_INFINITY} - * means the weight was not calculated or factored into the decision. - */ - public float getWeight() { - return weight; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - WeightedDecision that = (WeightedDecision) other; - return decision.equals(that.decision) && Float.compare(weight, that.weight) == 0; - } - - @Override - public int hashCode() { - return Objects.hash(decision, weight); - } - } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java new file mode 100644 index 00000000000..77e5bacf390 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation; + +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.Nullable; + +import java.util.Collections; +import java.util.Map; +import java.util.Objects; + +/** + * Represents a decision to move a started shard because it is no longer allowed to remain on its current node. + */ +public final class MoveDecision extends RelocationDecision { + /** a constant representing no decision taken */ + public static final MoveDecision NOT_TAKEN = new MoveDecision(null, null, null, null, null); + /** cached decisions so we don't have to recreate objects for common decisions when not in explain mode. */ + private static final MoveDecision CACHED_STAY_DECISION = new MoveDecision(Decision.YES, Decision.Type.NO, null, null, null); + private static final MoveDecision CACHED_CANNOT_MOVE_DECISION = new MoveDecision(Decision.NO, Decision.Type.NO, null, null, null); + + @Nullable + private final Decision canRemainDecision; + @Nullable + private final Map nodeDecisions; + + private MoveDecision(Decision canRemainDecision, Decision.Type finalDecision, String finalExplanation, + String assignedNodeId, Map nodeDecisions) { + super(finalDecision, finalExplanation, assignedNodeId); + this.canRemainDecision = canRemainDecision; + this.nodeDecisions = nodeDecisions != null ? Collections.unmodifiableMap(nodeDecisions) : null; + } + + /** + * Creates a move decision for the shard being able to remain on its current node, so not moving. + */ + public static MoveDecision stay(Decision canRemainDecision, boolean explain) { + assert canRemainDecision.type() != Decision.Type.NO; + if (explain) { + final String explanation; + if (explain) { + explanation = "shard is allowed to remain on its current node, so no reason to move"; + } else { + explanation = null; + } + return new MoveDecision(Objects.requireNonNull(canRemainDecision), Decision.Type.NO, explanation, null, null); + } else { + return CACHED_STAY_DECISION; + } + } + + /** + * Creates a move decision for the shard not being able to remain on its current node. + * + * @param canRemainDecision the decision for whether the shard is allowed to remain on its current node + * @param finalDecision the decision of whether to move the shard to another node + * @param explain true if in explain mode + * @param currentNodeId the current node id where the shard is assigned + * @param assignedNodeId the node id for where the shard can move to + * @param nodeDecisions the node-level decisions that comprised the final decision, non-null iff explain is true + * @return the {@link MoveDecision} for moving the shard to another node + */ + public static MoveDecision decision(Decision canRemainDecision, Decision.Type finalDecision, boolean explain, String currentNodeId, + String assignedNodeId, Map nodeDecisions) { + assert canRemainDecision != null; + assert canRemainDecision.type() != Decision.Type.YES : "create decision with MoveDecision#stay instead"; + String finalExplanation = null; + if (explain) { + assert currentNodeId != null; + if (finalDecision == Decision.Type.YES) { + assert assignedNodeId != null; + finalExplanation = "shard cannot remain on node [" + currentNodeId + "], moving to node [" + assignedNodeId + "]"; + } else if (finalDecision == Decision.Type.THROTTLE) { + finalExplanation = "shard cannot remain on node [" + currentNodeId + "], throttled on moving to another node"; + } else { + finalExplanation = "shard cannot remain on node [" + currentNodeId + "], but cannot be assigned to any other node"; + } + } + if (finalExplanation == null && finalDecision == Decision.Type.NO) { + // the final decision is NO (no node to move the shard to) and we are not in explain mode, return a cached version + return CACHED_CANNOT_MOVE_DECISION; + } else { + assert ((assignedNodeId == null) == (finalDecision != Decision.Type.YES)); + return new MoveDecision(canRemainDecision, finalDecision, finalExplanation, assignedNodeId, nodeDecisions); + } + } + + /** + * Returns {@code true} if the shard cannot remain on its current node and can be moved, returns {@code false} otherwise. + */ + public boolean move() { + return cannotRemain() && getFinalDecisionType() == Decision.Type.YES; + } + + /** + * Returns {@code true} if the shard cannot remain on its current node. + */ + public boolean cannotRemain() { + return isDecisionTaken() && canRemainDecision.type() == Decision.Type.NO; + } + + /** + * Gets the individual node-level decisions that went into making the final decision as represented by + * {@link #getFinalDecisionType()}. The map that is returned has the node id as the key and a {@link NodeAllocationResult}. + */ + @Nullable + public Map getNodeDecisions() { + return nodeDecisions; + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResult.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResult.java new file mode 100644 index 00000000000..3e753ee0866 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResult.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation; + +import org.elasticsearch.cluster.routing.allocation.decider.Decision; + +import java.util.Objects; + +/** + * This class represents the shard allocation decision for a single node, + * including the {@link Decision} whether to allocate to the node and other + * information related to obtaining the decision for the node. + */ +public final class NodeAllocationResult { + + private final Decision decision; + private final float weight; + + public NodeAllocationResult(Decision decision) { + this.decision = Objects.requireNonNull(decision); + this.weight = Float.POSITIVE_INFINITY; + } + + public NodeAllocationResult(Decision decision, float weight) { + this.decision = Objects.requireNonNull(decision); + this.weight = Objects.requireNonNull(weight); + } + + /** + * The decision for allocating to the node. + */ + public Decision getDecision() { + return decision; + } + + /** + * The calculated weight for allocating a shard to the node. A value of {@link Float#POSITIVE_INFINITY} + * means the weight was not calculated or factored into the decision. + */ + public float getWeight() { + return weight; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + NodeAllocationResult that = (NodeAllocationResult) other; + return decision.equals(that.decision) && Float.compare(weight, that.weight) == 0; + } + + @Override + public int hashCode() { + return Objects.hash(decision, weight); + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeRebalanceResult.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeRebalanceResult.java new file mode 100644 index 00000000000..1cb5d70f046 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeRebalanceResult.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation; + +import org.elasticsearch.cluster.routing.allocation.decider.Decision; + +import java.util.Objects; + +/** + * A node-level explanation for the decision to rebalance a shard. + */ +public final class NodeRebalanceResult { + private final Decision.Type nodeDecisionType; + private final Decision canAllocate; + private final boolean betterWeightThanCurrent; + private final boolean deltaAboveThreshold; + private final float currentWeight; + private final float weightWithShardAdded; + + public NodeRebalanceResult(Decision.Type nodeDecisionType, Decision canAllocate, boolean betterWeightThanCurrent, + boolean deltaAboveThreshold, float currentWeight, float weightWithShardAdded) { + this.nodeDecisionType = Objects.requireNonNull(nodeDecisionType); + this.canAllocate = Objects.requireNonNull(canAllocate); + this.betterWeightThanCurrent = betterWeightThanCurrent; + this.deltaAboveThreshold = deltaAboveThreshold; + this.currentWeight = currentWeight; + this.weightWithShardAdded = weightWithShardAdded; + } + + /** + * Returns the decision to rebalance to the node. + */ + public Decision.Type getNodeDecisionType() { + return nodeDecisionType; + } + + /** + * Returns whether the shard is allowed to be allocated to the node. + */ + public Decision getCanAllocateDecision() { + return canAllocate; + } + + /** + * Returns whether the weight of the node is better than the weight of the node where the shard currently resides. + */ + public boolean isBetterWeightThanCurrent() { + return betterWeightThanCurrent; + } + + /** + * Returns if the weight delta by assigning to this node was above the threshold to warrant a rebalance. + */ + public boolean isDeltaAboveThreshold() { + return deltaAboveThreshold; + } + + /** + * Returns the current weight of the node if the shard is not added to the node. + */ + public float getCurrentWeight() { + return currentWeight; + } + + /** + * Returns the weight of the node if the shard is added to the node. + */ + public float getWeightWithShardAdded() { + return weightWithShardAdded; + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RebalanceDecision.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RebalanceDecision.java new file mode 100644 index 00000000000..1987cbefa28 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RebalanceDecision.java @@ -0,0 +1,95 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation; + +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; +import org.elasticsearch.common.Nullable; + +import java.util.Collections; +import java.util.Map; + +/** + * Represents a decision to move a started shard to form a more optimally balanced cluster. + */ +public final class RebalanceDecision extends RelocationDecision { + /** a constant representing no decision taken */ + public static final RebalanceDecision NOT_TAKEN = new RebalanceDecision(null, null, null, null, null, Float.POSITIVE_INFINITY); + + @Nullable + private final Decision canRebalanceDecision; + @Nullable + private final Map nodeDecisions; + private float currentWeight; + + public RebalanceDecision(Decision canRebalanceDecision, Type finalDecision, String finalExplanation) { + this(canRebalanceDecision, finalDecision, finalExplanation, null, null, Float.POSITIVE_INFINITY); + } + + public RebalanceDecision(Decision canRebalanceDecision, Type finalDecision, String finalExplanation, + String assignedNodeId, Map nodeDecisions, float currentWeight) { + super(finalDecision, finalExplanation, assignedNodeId); + this.canRebalanceDecision = canRebalanceDecision; + this.nodeDecisions = nodeDecisions != null ? Collections.unmodifiableMap(nodeDecisions) : null; + this.currentWeight = currentWeight; + } + + /** + * Creates a new {@link RebalanceDecision}, computing the explanation based on the decision parameters. + */ + public static RebalanceDecision decision(Decision canRebalanceDecision, Type finalDecision, String assignedNodeId, + Map nodeDecisions, float currentWeight, float threshold) { + final String explanation = produceFinalExplanation(finalDecision, assignedNodeId, threshold); + return new RebalanceDecision(canRebalanceDecision, finalDecision, explanation, assignedNodeId, nodeDecisions, currentWeight); + } + + /** + * Returns the decision for being allowed to rebalance the shard. + */ + @Nullable + public Decision getCanRebalanceDecision() { + return canRebalanceDecision; + } + + /** + * Gets the individual node-level decisions that went into making the final decision as represented by + * {@link #getFinalDecisionType()}. The map that is returned has the node id as the key and a {@link NodeRebalanceResult}. + */ + @Nullable + public Map getNodeDecisions() { + return nodeDecisions; + } + + private static String produceFinalExplanation(final Type finalDecisionType, final String assignedNodeId, final float threshold) { + final String finalExplanation; + if (assignedNodeId != null) { + if (finalDecisionType == Type.THROTTLE) { + finalExplanation = "throttle moving shard to node [" + assignedNodeId + "], as it is " + + "currently busy with other shard relocations"; + } else { + finalExplanation = "moving shard to node [" + assignedNodeId + "] to form a more balanced cluster"; + } + } else { + finalExplanation = "cannot rebalance shard, no other node exists that would form a more balanced " + + "cluster within the defined threshold [" + threshold + "]"; + } + return finalExplanation; + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RelocationDecision.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RelocationDecision.java new file mode 100644 index 00000000000..8fa8fe9d092 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RelocationDecision.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation; + +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.Nullable; + +/** + * Represents a decision to relocate a started shard from its current node. + */ +public abstract class RelocationDecision { + @Nullable + private final Decision.Type finalDecision; + @Nullable + private final String finalExplanation; + @Nullable + private final String assignedNodeId; + + protected RelocationDecision(Decision.Type finalDecision, String finalExplanation, String assignedNodeId) { + this.finalDecision = finalDecision; + this.finalExplanation = finalExplanation; + this.assignedNodeId = assignedNodeId; + } + + /** + * Returns {@code true} if a decision was taken by the allocator, {@code false} otherwise. + * If no decision was taken, then the rest of the fields in this object are meaningless and return {@code null}. + */ + public boolean isDecisionTaken() { + return finalDecision != null; + } + + /** + * Returns the final decision made by the allocator on whether to assign the shard, and + * {@code null} if no decision was taken. + */ + public Decision.Type getFinalDecisionType() { + return finalDecision; + } + + /** + * Returns the free-text explanation for the reason behind the decision taken in {@link #getFinalDecisionType()}. + */ + @Nullable + public String getFinalExplanation() { + return finalExplanation; + } + + /** + * Get the node id that the allocator will assign the shard to, unless {@link #getFinalDecisionType()} returns + * a value other than {@link Decision.Type#YES}, in which case this returns {@code null}. + */ + @Nullable + public String getAssignedNodeId() { + return assignedNodeId; + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 3a6c1c45f01..7601f15f5d2 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -30,14 +30,16 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.routing.allocation.AllocateUnassignedDecision; +import org.elasticsearch.cluster.routing.allocation.MoveDecision; +import org.elasticsearch.cluster.routing.allocation.NodeRebalanceResult; +import org.elasticsearch.cluster.routing.allocation.RebalanceDecision; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision; -import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision.WeightedDecision; +import org.elasticsearch.cluster.routing.allocation.NodeAllocationResult; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; @@ -54,7 +56,6 @@ import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.Map; -import java.util.Objects; import java.util.Set; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; @@ -368,7 +369,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards final float currentWeight = sorter.weight(currentNode); final AllocationDeciders deciders = allocation.deciders(); final String idxName = shard.getIndexName(); - Map nodeDecisions = new HashMap<>(modelNodes.length - 1); + Map nodeDecisions = new HashMap<>(modelNodes.length - 1); Type rebalanceDecisionType = Type.NO; String assignedNodeId = null; for (ModelNode node : modelNodes) { @@ -412,7 +413,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards assignedNodeId = node.getNodeId(); } } - nodeDecisions.put(node.getNodeId(), new NodeRebalanceDecision( + nodeDecisions.put(node.getNodeId(), new NodeRebalanceResult( rebalanceConditionsMet ? canAllocate.type() : Type.NO, canAllocate, betterWeightThanCurrent, @@ -683,14 +684,14 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards */ Type bestDecision = Type.NO; RoutingNode targetNode = null; - final Map nodeExplanationMap = explain ? new HashMap<>() : null; + final Map nodeExplanationMap = explain ? new HashMap<>() : null; for (ModelNode currentNode : sorter.modelNodes) { if (currentNode != sourceNode) { RoutingNode target = currentNode.getRoutingNode(); // don't use canRebalance as we want hard filtering rules to apply. See #17698 Decision allocationDecision = allocation.deciders().canAllocate(shardRouting, target, allocation); if (explain) { - nodeExplanationMap.put(currentNode.getNodeId(), new WeightedDecision(allocationDecision, sorter.weight(currentNode))); + nodeExplanationMap.put(currentNode.getNodeId(), new NodeAllocationResult(allocationDecision, sorter.weight(currentNode))); } // TODO maybe we can respect throttling here too? if (allocationDecision.type().higherThan(bestDecision)) { @@ -791,7 +792,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards do { for (int i = 0; i < primaryLength; i++) { ShardRouting shard = primary[i]; - ShardAllocationDecision allocationDecision = decideAllocateUnassigned(shard, throttledNodes); + AllocateUnassignedDecision allocationDecision = decideAllocateUnassigned(shard, throttledNodes); final Type decisionType = allocationDecision.getFinalDecisionType(); final String assignedNodeId = allocationDecision.getAssignedNodeId(); final ModelNode minNode = assignedNodeId != null ? nodes.get(assignedNodeId) : null; @@ -864,16 +865,16 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards * {@link ModelNode} representing the node that the shard should be assigned to. If the decision returned * is of type {@link Type#NO}, then the assigned node will be null. */ - private ShardAllocationDecision decideAllocateUnassigned(final ShardRouting shard, final Set throttledNodes) { + private AllocateUnassignedDecision decideAllocateUnassigned(final ShardRouting shard, final Set throttledNodes) { if (shard.assignedToNode()) { // we only make decisions for unassigned shards here - return ShardAllocationDecision.DECISION_NOT_TAKEN; + return AllocateUnassignedDecision.NOT_TAKEN; } Decision shardLevelDecision = allocation.deciders().canAllocate(shard, allocation); if (shardLevelDecision.type() == Type.NO) { // NO decision for allocating the shard, irrespective of any particular node, so exit early - return ShardAllocationDecision.no(shardLevelDecision, explain("cannot allocate shard in its current state")); + return AllocateUnassignedDecision.no(shardLevelDecision, explain("cannot allocate shard in its current state")); } /* find an node with minimal weight we can allocate on*/ @@ -884,11 +885,11 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards if (throttledNodes.size() >= nodes.size() && explain == false) { // all nodes are throttled, so we know we won't be able to allocate this round, // so if we are not in explain mode, short circuit - return ShardAllocationDecision.no(UnassignedInfo.AllocationStatus.DECIDERS_NO, null); + return AllocateUnassignedDecision.no(UnassignedInfo.AllocationStatus.DECIDERS_NO, null); } /* Don't iterate over an identity hashset here the * iteration order is different for each run and makes testing hard */ - Map nodeExplanationMap = explain ? new HashMap<>() : null; + Map nodeExplanationMap = explain ? new HashMap<>() : null; for (ModelNode node : nodes.values()) { if ((throttledNodes.contains(node) || node.containsShard(shard)) && explain == false) { // decision is NO without needing to check anything further, so short circuit @@ -904,7 +905,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards Decision currentDecision = allocation.deciders().canAllocate(shard, node.getRoutingNode(), allocation); if (explain) { - nodeExplanationMap.put(node.getNodeId(), new WeightedDecision(currentDecision, currentWeight)); + nodeExplanationMap.put(node.getNodeId(), new NodeAllocationResult(currentDecision, currentWeight)); } if (currentDecision.type() == Type.YES || currentDecision.type() == Type.THROTTLE) { final boolean updateMinNode; @@ -945,7 +946,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards // decision was not set and a node was not assigned, so treat it as a NO decision decision = Decision.NO; } - return ShardAllocationDecision.fromDecision( + return AllocateUnassignedDecision.fromDecision( decision, minNode != null ? minNode.getNodeId() : null, explain, @@ -1223,287 +1224,4 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } } - /** - * Represents a decision to relocate a started shard from its current node. - */ - public abstract static class RelocationDecision { - @Nullable - private final Type finalDecision; - @Nullable - private final String finalExplanation; - @Nullable - private final String assignedNodeId; - - protected RelocationDecision(Type finalDecision, String finalExplanation, String assignedNodeId) { - this.finalDecision = finalDecision; - this.finalExplanation = finalExplanation; - this.assignedNodeId = assignedNodeId; - } - - /** - * Returns {@code true} if a decision was taken by the allocator, {@code false} otherwise. - * If no decision was taken, then the rest of the fields in this object are meaningless and return {@code null}. - */ - public boolean isDecisionTaken() { - return finalDecision != null; - } - - /** - * Returns the final decision made by the allocator on whether to assign the shard, and - * {@code null} if no decision was taken. - */ - public Type getFinalDecisionType() { - return finalDecision; - } - - /** - * Returns the free-text explanation for the reason behind the decision taken in {@link #getFinalDecisionType()}. - */ - @Nullable - public String getFinalExplanation() { - return finalExplanation; - } - - /** - * Get the node id that the allocator will assign the shard to, unless {@link #getFinalDecisionType()} returns - * a value other than {@link Decision.Type#YES}, in which case this returns {@code null}. - */ - @Nullable - public String getAssignedNodeId() { - return assignedNodeId; - } - } - - /** - * Represents a decision to move a started shard because it is no longer allowed to remain on its current node. - */ - public static final class MoveDecision extends RelocationDecision { - /** a constant representing no decision taken */ - public static final MoveDecision NOT_TAKEN = new MoveDecision(null, null, null, null, null); - /** cached decisions so we don't have to recreate objects for common decisions when not in explain mode. */ - private static final MoveDecision CACHED_STAY_DECISION = new MoveDecision(Decision.YES, Type.NO, null, null, null); - private static final MoveDecision CACHED_CANNOT_MOVE_DECISION = new MoveDecision(Decision.NO, Type.NO, null, null, null); - - @Nullable - private final Decision canRemainDecision; - @Nullable - private final Map nodeDecisions; - - private MoveDecision(Decision canRemainDecision, Type finalDecision, String finalExplanation, - String assignedNodeId, Map nodeDecisions) { - super(finalDecision, finalExplanation, assignedNodeId); - this.canRemainDecision = canRemainDecision; - this.nodeDecisions = nodeDecisions != null ? Collections.unmodifiableMap(nodeDecisions) : null; - } - - /** - * Creates a move decision for the shard being able to remain on its current node, so not moving. - */ - public static MoveDecision stay(Decision canRemainDecision, boolean explain) { - assert canRemainDecision.type() != Type.NO; - if (explain) { - final String explanation; - if (explain) { - explanation = "shard is allowed to remain on its current node, so no reason to move"; - } else { - explanation = null; - } - return new MoveDecision(Objects.requireNonNull(canRemainDecision), Type.NO, explanation, null, null); - } else { - return CACHED_STAY_DECISION; - } - } - - /** - * Creates a move decision for the shard not being able to remain on its current node. - * - * @param canRemainDecision the decision for whether the shard is allowed to remain on its current node - * @param finalDecision the decision of whether to move the shard to another node - * @param explain true if in explain mode - * @param currentNodeId the current node id where the shard is assigned - * @param assignedNodeId the node id for where the shard can move to - * @param nodeDecisions the node-level decisions that comprised the final decision, non-null iff explain is true - * @return the {@link MoveDecision} for moving the shard to another node - */ - public static MoveDecision decision(Decision canRemainDecision, Type finalDecision, boolean explain, String currentNodeId, - String assignedNodeId, Map nodeDecisions) { - assert canRemainDecision != null; - assert canRemainDecision.type() != Type.YES : "create decision with MoveDecision#stay instead"; - String finalExplanation = null; - if (explain) { - assert currentNodeId != null; - if (finalDecision == Type.YES) { - assert assignedNodeId != null; - finalExplanation = "shard cannot remain on node [" + currentNodeId + "], moving to node [" + assignedNodeId + "]"; - } else if (finalDecision == Type.THROTTLE) { - finalExplanation = "shard cannot remain on node [" + currentNodeId + "], throttled on moving to another node"; - } else { - finalExplanation = "shard cannot remain on node [" + currentNodeId + "], but cannot be assigned to any other node"; - } - } - if (finalExplanation == null && finalDecision == Type.NO) { - // the final decision is NO (no node to move the shard to) and we are not in explain mode, return a cached version - return CACHED_CANNOT_MOVE_DECISION; - } else { - assert ((assignedNodeId == null) == (finalDecision != Type.YES)); - return new MoveDecision(canRemainDecision, finalDecision, finalExplanation, assignedNodeId, nodeDecisions); - } - } - - /** - * Returns {@code true} if the shard cannot remain on its current node and can be moved, returns {@code false} otherwise. - */ - public boolean move() { - return cannotRemain() && getFinalDecisionType() == Type.YES; - } - - /** - * Returns {@code true} if the shard cannot remain on its current node. - */ - public boolean cannotRemain() { - return isDecisionTaken() && canRemainDecision.type() == Type.NO; - } - - /** - * Gets the individual node-level decisions that went into making the final decision as represented by - * {@link #getFinalDecisionType()}. The map that is returned has the node id as the key and a {@link WeightedDecision}. - */ - @Nullable - public Map getNodeDecisions() { - return nodeDecisions; - } - } - - /** - * Represents a decision to move a started shard to form a more optimally balanced cluster. - */ - public static final class RebalanceDecision extends RelocationDecision { - /** a constant representing no decision taken */ - public static final RebalanceDecision NOT_TAKEN = new RebalanceDecision(null, null, null, null, null, Float.POSITIVE_INFINITY); - - @Nullable - private final Decision canRebalanceDecision; - @Nullable - private final Map nodeDecisions; - private float currentWeight; - - protected RebalanceDecision(Decision canRebalanceDecision, Type finalDecision, String finalExplanation) { - this(canRebalanceDecision, finalDecision, finalExplanation, null, null, Float.POSITIVE_INFINITY); - } - - protected RebalanceDecision(Decision canRebalanceDecision, Type finalDecision, String finalExplanation, - String assignedNodeId, Map nodeDecisions, float currentWeight) { - super(finalDecision, finalExplanation, assignedNodeId); - this.canRebalanceDecision = canRebalanceDecision; - this.nodeDecisions = nodeDecisions != null ? Collections.unmodifiableMap(nodeDecisions) : null; - this.currentWeight = currentWeight; - } - - /** - * Creates a new {@link RebalanceDecision}, computing the explanation based on the decision parameters. - */ - public static RebalanceDecision decision(Decision canRebalanceDecision, Type finalDecision, String assignedNodeId, - Map nodeDecisions, float currentWeight, float threshold) { - final String explanation = produceFinalExplanation(finalDecision, assignedNodeId, threshold); - return new RebalanceDecision(canRebalanceDecision, finalDecision, explanation, assignedNodeId, nodeDecisions, currentWeight); - } - - /** - * Returns the decision for being allowed to rebalance the shard. - */ - @Nullable - public Decision getCanRebalanceDecision() { - return canRebalanceDecision; - } - - /** - * Gets the individual node-level decisions that went into making the final decision as represented by - * {@link #getFinalDecisionType()}. The map that is returned has the node id as the key and a {@link NodeRebalanceDecision}. - */ - @Nullable - public Map getNodeDecisions() { - return nodeDecisions; - } - - private static String produceFinalExplanation(final Type finalDecisionType, final String assignedNodeId, final float threshold) { - final String finalExplanation; - if (assignedNodeId != null) { - if (finalDecisionType == Type.THROTTLE) { - finalExplanation = "throttle moving shard to node [" + assignedNodeId + "], as it is " + - "currently busy with other shard relocations"; - } else { - finalExplanation = "moving shard to node [" + assignedNodeId + "] to form a more balanced cluster"; - } - } else { - finalExplanation = "cannot rebalance shard, no other node exists that would form a more balanced " + - "cluster within the defined threshold [" + threshold + "]"; - } - return finalExplanation; - } - } - - /** - * A node-level explanation for the decision to rebalance a shard. - */ - public static final class NodeRebalanceDecision { - private final Type nodeDecisionType; - private final Decision canAllocate; - private final boolean betterWeightThanCurrent; - private final boolean deltaAboveThreshold; - private final float currentWeight; - private final float weightWithShardAdded; - - NodeRebalanceDecision(Type nodeDecisionType, Decision canAllocate, boolean betterWeightThanCurrent, - boolean deltaAboveThreshold, float currentWeight, float weightWithShardAdded) { - this.nodeDecisionType = Objects.requireNonNull(nodeDecisionType); - this.canAllocate = Objects.requireNonNull(canAllocate); - this.betterWeightThanCurrent = betterWeightThanCurrent; - this.deltaAboveThreshold = deltaAboveThreshold; - this.currentWeight = currentWeight; - this.weightWithShardAdded = weightWithShardAdded; - } - - /** - * Returns the decision to rebalance to the node. - */ - public Type getNodeDecisionType() { - return nodeDecisionType; - } - - /** - * Returns whether the shard is allowed to be allocated to the node. - */ - public Decision getCanAllocateDecision() { - return canAllocate; - } - - /** - * Returns whether the weight of the node is better than the weight of the node where the shard currently resides. - */ - public boolean isBetterWeightThanCurrent() { - return betterWeightThanCurrent; - } - - /** - * Returns if the weight delta by assigning to this node was above the threshold to warrant a rebalance. - */ - public boolean isDeltaAboveThreshold() { - return deltaAboveThreshold; - } - - /** - * Returns the current weight of the node if the shard is not added to the node. - */ - public float getCurrentWeight() { - return currentWeight; - } - - /** - * Returns the weight of the node if the shard is added to the node. - */ - public float getWeightWithShardAdded() { - return weightWithShardAdded; - } - } - } diff --git a/core/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java index 461fcf1e2d2..0e225c7a7f9 100644 --- a/core/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java @@ -22,8 +22,8 @@ package org.elasticsearch.gateway; import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.AllocateUnassignedDecision; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; @@ -53,21 +53,21 @@ public abstract class BaseGatewayShardAllocator extends AbstractComponent { final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator(); while (unassignedIterator.hasNext()) { final ShardRouting shard = unassignedIterator.next(); - final ShardAllocationDecision shardAllocationDecision = makeAllocationDecision(shard, allocation, logger); + final AllocateUnassignedDecision allocateUnassignedDecision = makeAllocationDecision(shard, allocation, logger); - if (shardAllocationDecision.isDecisionTaken() == false) { + if (allocateUnassignedDecision.isDecisionTaken() == false) { // no decision was taken by this allocator continue; } - if (shardAllocationDecision.getFinalDecisionSafe() == Decision.Type.YES) { - unassignedIterator.initialize(shardAllocationDecision.getAssignedNodeId(), - shardAllocationDecision.getAllocationId(), + if (allocateUnassignedDecision.getFinalDecisionSafe() == Decision.Type.YES) { + unassignedIterator.initialize(allocateUnassignedDecision.getAssignedNodeId(), + allocateUnassignedDecision.getAllocationId(), shard.primary() ? ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE : allocation.clusterInfo().getShardSize(shard, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE), allocation.changes()); } else { - unassignedIterator.removeAndIgnore(shardAllocationDecision.getAllocationStatus(), allocation.changes()); + unassignedIterator.removeAndIgnore(allocateUnassignedDecision.getAllocationStatus(), allocation.changes()); } } } @@ -80,9 +80,9 @@ public abstract class BaseGatewayShardAllocator extends AbstractComponent { * @param unassignedShard the unassigned shard to allocate * @param allocation the current routing state * @param logger the logger - * @return an {@link ShardAllocationDecision} with the final decision of whether to allocate and details of the decision + * @return an {@link AllocateUnassignedDecision} with the final decision of whether to allocate and details of the decision */ - public abstract ShardAllocationDecision makeAllocationDecision(ShardRouting unassignedShard, - RoutingAllocation allocation, - Logger logger); + public abstract AllocateUnassignedDecision makeAllocationDecision(ShardRouting unassignedShard, + RoutingAllocation allocation, + Logger logger); } diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index 7d8e8327d39..53788f7b84d 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -31,8 +31,8 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; +import org.elasticsearch.cluster.routing.allocation.AllocateUnassignedDecision; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; import org.elasticsearch.common.settings.Setting; @@ -110,19 +110,19 @@ public abstract class PrimaryShardAllocator extends BaseGatewayShardAllocator { } @Override - public ShardAllocationDecision makeAllocationDecision(final ShardRouting unassignedShard, - final RoutingAllocation allocation, - final Logger logger) { + public AllocateUnassignedDecision makeAllocationDecision(final ShardRouting unassignedShard, + final RoutingAllocation allocation, + final Logger logger) { if (isResponsibleFor(unassignedShard) == false) { // this allocator is not responsible for allocating this shard - return ShardAllocationDecision.DECISION_NOT_TAKEN; + return AllocateUnassignedDecision.NOT_TAKEN; } final boolean explain = allocation.debugDecision(); final FetchResult shardState = fetchData(unassignedShard, allocation); if (shardState.hasData() == false) { allocation.setHasPendingAsyncFetch(); - return ShardAllocationDecision.no(AllocationStatus.FETCHING_SHARD_DATA, + return AllocateUnassignedDecision.no(AllocationStatus.FETCHING_SHARD_DATA, explain ? "still fetching shard state from the nodes in the cluster" : null); } @@ -167,18 +167,18 @@ public abstract class PrimaryShardAllocator extends BaseGatewayShardAllocator { // let BalancedShardsAllocator take care of allocating this shard logger.debug("[{}][{}]: missing local data, will restore from [{}]", unassignedShard.index(), unassignedShard.id(), unassignedShard.recoverySource()); - return ShardAllocationDecision.DECISION_NOT_TAKEN; + return AllocateUnassignedDecision.NOT_TAKEN; } else if (recoverOnAnyNode) { // let BalancedShardsAllocator take care of allocating this shard logger.debug("[{}][{}]: missing local data, recover from any node", unassignedShard.index(), unassignedShard.id()); - return ShardAllocationDecision.DECISION_NOT_TAKEN; + return AllocateUnassignedDecision.NOT_TAKEN; } else { // We have a shard that was previously allocated, but we could not find a valid shard copy to allocate the primary. // We could just be waiting for the node that holds the primary to start back up, in which case the allocation for // this shard will be picked up when the node joins and we do another allocation reroute logger.debug("[{}][{}]: not allocating, number_of_allocated_shards_found [{}]", unassignedShard.index(), unassignedShard.id(), nodeShardsResult.allocationsFound); - return ShardAllocationDecision.no(AllocationStatus.NO_VALID_SHARD_COPY, + return AllocateUnassignedDecision.no(AllocationStatus.NO_VALID_SHARD_COPY, explain ? "shard was previously allocated, but no valid shard copy could be found amongst the nodes in the cluster" : null); } } @@ -191,7 +191,7 @@ public abstract class PrimaryShardAllocator extends BaseGatewayShardAllocator { logger.debug("[{}][{}]: allocating [{}] to [{}] on primary allocation", unassignedShard.index(), unassignedShard.id(), unassignedShard, decidedNode.nodeShardState.getNode()); final String nodeId = decidedNode.nodeShardState.getNode().getId(); - return ShardAllocationDecision.yes(nodeId, + return AllocateUnassignedDecision.yes(nodeId, "the allocation deciders returned a YES decision to allocate to node [" + nodeId + "]", decidedNode.nodeShardState.allocationId(), buildNodeDecisions(nodesToAllocate, explain)); @@ -207,20 +207,20 @@ public abstract class PrimaryShardAllocator extends BaseGatewayShardAllocator { logger.debug("[{}][{}]: allocating [{}] to [{}] on forced primary allocation", unassignedShard.index(), unassignedShard.id(), unassignedShard, nodeShardState.getNode()); final String nodeId = nodeShardState.getNode().getId(); - return ShardAllocationDecision.yes(nodeId, + return AllocateUnassignedDecision.yes(nodeId, "allocating the primary shard to node [" + nodeId+ "], which has a complete copy of the shard data", nodeShardState.allocationId(), buildNodeDecisions(nodesToForceAllocate, explain)); } else if (nodesToForceAllocate.throttleNodeShards.isEmpty() == false) { logger.debug("[{}][{}]: throttling allocation [{}] to [{}] on forced primary allocation", unassignedShard.index(), unassignedShard.id(), unassignedShard, nodesToForceAllocate.throttleNodeShards); - return ShardAllocationDecision.throttle( + return AllocateUnassignedDecision.throttle( explain ? "allocation throttled as all nodes to which the shard may be force allocated are busy with other recoveries" : null, buildNodeDecisions(nodesToForceAllocate, explain)); } else { logger.debug("[{}][{}]: forced primary allocation denied [{}]", unassignedShard.index(), unassignedShard.id(), unassignedShard); - return ShardAllocationDecision.no(AllocationStatus.DECIDERS_NO, + return AllocateUnassignedDecision.no(AllocationStatus.DECIDERS_NO, explain ? "all nodes that hold a valid shard copy returned a NO decision, and force allocation is not permitted" : null, buildNodeDecisions(nodesToForceAllocate, explain)); } @@ -229,7 +229,7 @@ public abstract class PrimaryShardAllocator extends BaseGatewayShardAllocator { // taking place on the node currently, ignore it for now logger.debug("[{}][{}]: throttling allocation [{}] to [{}] on primary allocation", unassignedShard.index(), unassignedShard.id(), unassignedShard, nodesToAllocate.throttleNodeShards); - return ShardAllocationDecision.throttle( + return AllocateUnassignedDecision.throttle( explain ? "allocation throttled as all nodes to which the shard may be allocated are busy with other recoveries" : null, buildNodeDecisions(nodesToAllocate, explain)); } diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index 4c73ae067b6..89da2300185 100644 --- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -31,8 +31,8 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; +import org.elasticsearch.cluster.routing.allocation.AllocateUnassignedDecision; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; @@ -139,12 +139,12 @@ public abstract class ReplicaShardAllocator extends BaseGatewayShardAllocator { } @Override - public ShardAllocationDecision makeAllocationDecision(final ShardRouting unassignedShard, - final RoutingAllocation allocation, - final Logger logger) { + public AllocateUnassignedDecision makeAllocationDecision(final ShardRouting unassignedShard, + final RoutingAllocation allocation, + final Logger logger) { if (isResponsibleFor(unassignedShard) == false) { // this allocator is not responsible for deciding on this shard - return ShardAllocationDecision.DECISION_NOT_TAKEN; + return AllocateUnassignedDecision.NOT_TAKEN; } final RoutingNodes routingNodes = allocation.routingNodes(); @@ -153,7 +153,7 @@ public abstract class ReplicaShardAllocator extends BaseGatewayShardAllocator { Tuple> allocateDecision = canBeAllocatedToAtLeastOneNode(unassignedShard, allocation, explain); if (allocateDecision.v1().type() != Decision.Type.YES) { logger.trace("{}: ignoring allocation, can't be allocated on any node", unassignedShard); - return ShardAllocationDecision.no(UnassignedInfo.AllocationStatus.fromDecision(allocateDecision.v1().type()), + return AllocateUnassignedDecision.no(UnassignedInfo.AllocationStatus.fromDecision(allocateDecision.v1().type()), explain ? "all nodes returned a " + allocateDecision.v1().type() + " decision for allocating the replica shard" : null, allocateDecision.v2()); } @@ -162,7 +162,7 @@ public abstract class ReplicaShardAllocator extends BaseGatewayShardAllocator { if (shardStores.hasData() == false) { logger.trace("{}: ignoring allocation, still fetching shard stores", unassignedShard); allocation.setHasPendingAsyncFetch(); - return ShardAllocationDecision.no(AllocationStatus.FETCHING_SHARD_DATA, + return AllocateUnassignedDecision.no(AllocationStatus.FETCHING_SHARD_DATA, explain ? "still fetching shard state from the nodes in the cluster" : null); } @@ -175,7 +175,7 @@ public abstract class ReplicaShardAllocator extends BaseGatewayShardAllocator { // will try and recover from // Note, this is the existing behavior, as exposed in running CorruptFileTest#testNoPrimaryData logger.trace("{}: no primary shard store found or allocated, letting actual allocation figure it out", unassignedShard); - return ShardAllocationDecision.DECISION_NOT_TAKEN; + return AllocateUnassignedDecision.NOT_TAKEN; } MatchingNodes matchingNodes = findMatchingNodes(unassignedShard, allocation, primaryStore, shardStores, explain); @@ -189,14 +189,14 @@ public abstract class ReplicaShardAllocator extends BaseGatewayShardAllocator { logger.debug("[{}][{}]: throttling allocation [{}] to [{}] in order to reuse its unallocated persistent store", unassignedShard.index(), unassignedShard.id(), unassignedShard, nodeWithHighestMatch.node()); // we are throttling this, as we have enough other shards to allocate to this node, so ignore it for now - return ShardAllocationDecision.throttle( + return AllocateUnassignedDecision.throttle( explain ? "returned a THROTTLE decision on each node that has an existing copy of the shard, so waiting to re-use one of those copies" : null, matchingNodes.nodeDecisions); } else { logger.debug("[{}][{}]: allocating [{}] to [{}] in order to reuse its unallocated persistent store", unassignedShard.index(), unassignedShard.id(), unassignedShard, nodeWithHighestMatch.node()); // we found a match - return ShardAllocationDecision.yes(nodeWithHighestMatch.nodeId(), + return AllocateUnassignedDecision.yes(nodeWithHighestMatch.nodeId(), "allocating to node [" + nodeWithHighestMatch.nodeId() + "] in order to re-use its unallocated persistent store", null, matchingNodes.nodeDecisions); @@ -206,11 +206,11 @@ public abstract class ReplicaShardAllocator extends BaseGatewayShardAllocator { // unassigned due to a node leaving, so we delay allocation of this replica to see if the // node with the shard copy will rejoin so we can re-use the copy it has logger.debug("{}: allocation of [{}] is delayed", unassignedShard.shardId(), unassignedShard); - return ShardAllocationDecision.no(AllocationStatus.DELAYED_ALLOCATION, + return AllocateUnassignedDecision.no(AllocationStatus.DELAYED_ALLOCATION, explain ? "not allocating this shard, no nodes contain data for the replica and allocation is delayed" : null); } - return ShardAllocationDecision.DECISION_NOT_TAKEN; + return AllocateUnassignedDecision.NOT_TAKEN; } /** diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardAllocationDecisionTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java similarity index 59% rename from core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardAllocationDecisionTests.java rename to core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java index d8e4570c04b..5648cb38117 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardAllocationDecisionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.routing.allocation; import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; -import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision.WeightedDecision; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.test.ESTestCase; @@ -32,27 +31,27 @@ import java.util.Map; import java.util.stream.Collectors; /** - * Unit tests for the {@link ShardAllocationDecision} class. + * Unit tests for the {@link AllocateUnassignedDecision} class. */ -public class ShardAllocationDecisionTests extends ESTestCase { +public class AllocateUnassignedDecisionTests extends ESTestCase { public void testDecisionNotTaken() { - ShardAllocationDecision shardAllocationDecision = ShardAllocationDecision.DECISION_NOT_TAKEN; - assertFalse(shardAllocationDecision.isDecisionTaken()); - assertNull(shardAllocationDecision.getFinalDecisionType()); - assertNull(shardAllocationDecision.getAllocationStatus()); - assertNull(shardAllocationDecision.getAllocationId()); - assertNull(shardAllocationDecision.getAssignedNodeId()); - assertNull(shardAllocationDecision.getFinalExplanation()); - assertNull(shardAllocationDecision.getNodeDecisions()); - expectThrows(IllegalArgumentException.class, () -> shardAllocationDecision.getFinalDecisionSafe()); + AllocateUnassignedDecision allocateUnassignedDecision = AllocateUnassignedDecision.NOT_TAKEN; + assertFalse(allocateUnassignedDecision.isDecisionTaken()); + assertNull(allocateUnassignedDecision.getFinalDecisionType()); + assertNull(allocateUnassignedDecision.getAllocationStatus()); + assertNull(allocateUnassignedDecision.getAllocationId()); + assertNull(allocateUnassignedDecision.getAssignedNodeId()); + assertNull(allocateUnassignedDecision.getFinalExplanation()); + assertNull(allocateUnassignedDecision.getNodeDecisions()); + expectThrows(IllegalArgumentException.class, () -> allocateUnassignedDecision.getFinalDecisionSafe()); } public void testNoDecision() { final AllocationStatus allocationStatus = randomFrom( AllocationStatus.DELAYED_ALLOCATION, AllocationStatus.NO_VALID_SHARD_COPY, AllocationStatus.FETCHING_SHARD_DATA ); - ShardAllocationDecision noDecision = ShardAllocationDecision.no(allocationStatus, "something is wrong"); + AllocateUnassignedDecision noDecision = AllocateUnassignedDecision.no(allocationStatus, "something is wrong"); assertTrue(noDecision.isDecisionTaken()); assertEquals(Decision.Type.NO, noDecision.getFinalDecisionType()); assertEquals(allocationStatus, noDecision.getAllocationStatus()); @@ -61,10 +60,10 @@ public class ShardAllocationDecisionTests extends ESTestCase { assertNull(noDecision.getAssignedNodeId()); assertNull(noDecision.getAllocationId()); - Map nodeDecisions = new HashMap<>(); - nodeDecisions.put("node1", new ShardAllocationDecision.WeightedDecision(Decision.NO)); - nodeDecisions.put("node2", new ShardAllocationDecision.WeightedDecision(Decision.NO)); - noDecision = ShardAllocationDecision.no(AllocationStatus.DECIDERS_NO, "something is wrong", + Map nodeDecisions = new HashMap<>(); + nodeDecisions.put("node1", new NodeAllocationResult(Decision.NO)); + nodeDecisions.put("node2", new NodeAllocationResult(Decision.NO)); + noDecision = AllocateUnassignedDecision.no(AllocationStatus.DECIDERS_NO, "something is wrong", nodeDecisions.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().getDecision())) ); assertTrue(noDecision.isDecisionTaken()); @@ -76,14 +75,14 @@ public class ShardAllocationDecisionTests extends ESTestCase { assertNull(noDecision.getAllocationId()); // test bad values - expectThrows(NullPointerException.class, () -> ShardAllocationDecision.no((AllocationStatus)null, "a")); + expectThrows(NullPointerException.class, () -> AllocateUnassignedDecision.no((AllocationStatus)null, "a")); } public void testThrottleDecision() { - Map nodeDecisions = new HashMap<>(); - nodeDecisions.put("node1", new ShardAllocationDecision.WeightedDecision(Decision.NO)); - nodeDecisions.put("node2", new ShardAllocationDecision.WeightedDecision(Decision.THROTTLE)); - ShardAllocationDecision throttleDecision = ShardAllocationDecision.throttle("too much happening", + Map nodeDecisions = new HashMap<>(); + nodeDecisions.put("node1", new NodeAllocationResult(Decision.NO)); + nodeDecisions.put("node2", new NodeAllocationResult(Decision.THROTTLE)); + AllocateUnassignedDecision throttleDecision = AllocateUnassignedDecision.throttle("too much happening", nodeDecisions.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().getDecision())) ); assertTrue(throttleDecision.isDecisionTaken()); @@ -96,11 +95,11 @@ public class ShardAllocationDecisionTests extends ESTestCase { } public void testYesDecision() { - Map nodeDecisions = new HashMap<>(); - nodeDecisions.put("node1", new ShardAllocationDecision.WeightedDecision(Decision.YES)); - nodeDecisions.put("node2", new ShardAllocationDecision.WeightedDecision(Decision.NO)); + Map nodeDecisions = new HashMap<>(); + nodeDecisions.put("node1", new NodeAllocationResult(Decision.YES)); + nodeDecisions.put("node2", new NodeAllocationResult(Decision.NO)); String allocId = randomBoolean() ? "allocId" : null; - ShardAllocationDecision yesDecision = ShardAllocationDecision.yes( + AllocateUnassignedDecision yesDecision = AllocateUnassignedDecision.yes( "node1", "node was very kind", allocId, nodeDecisions.entrySet().stream().collect( Collectors.toMap(Map.Entry::getKey, e -> e.getValue().getDecision()) ) @@ -119,27 +118,27 @@ public class ShardAllocationDecisionTests extends ESTestCase { AllocationStatus.NO_VALID_SHARD_COPY, AllocationStatus.FETCHING_SHARD_DATA, AllocationStatus.DELAYED_ALLOCATION); for (AllocationStatus allocationStatus : cachableStatuses) { if (allocationStatus == AllocationStatus.DECIDERS_THROTTLED) { - ShardAllocationDecision cached = ShardAllocationDecision.throttle(null, null); - ShardAllocationDecision another = ShardAllocationDecision.throttle(null, null); + AllocateUnassignedDecision cached = AllocateUnassignedDecision.throttle(null, null); + AllocateUnassignedDecision another = AllocateUnassignedDecision.throttle(null, null); assertSame(cached, another); - ShardAllocationDecision notCached = ShardAllocationDecision.throttle("abc", null); - another = ShardAllocationDecision.throttle("abc", null); + AllocateUnassignedDecision notCached = AllocateUnassignedDecision.throttle("abc", null); + another = AllocateUnassignedDecision.throttle("abc", null); assertNotSame(notCached, another); } else { - ShardAllocationDecision cached = ShardAllocationDecision.no(allocationStatus, null); - ShardAllocationDecision another = ShardAllocationDecision.no(allocationStatus, null); + AllocateUnassignedDecision cached = AllocateUnassignedDecision.no(allocationStatus, null); + AllocateUnassignedDecision another = AllocateUnassignedDecision.no(allocationStatus, null); assertSame(cached, another); - ShardAllocationDecision notCached = ShardAllocationDecision.no(allocationStatus, "abc"); - another = ShardAllocationDecision.no(allocationStatus, "abc"); + AllocateUnassignedDecision notCached = AllocateUnassignedDecision.no(allocationStatus, "abc"); + another = AllocateUnassignedDecision.no(allocationStatus, "abc"); assertNotSame(notCached, another); } } // yes decisions are not precomputed and cached Map dummyMap = Collections.emptyMap(); - ShardAllocationDecision first = ShardAllocationDecision.yes("node1", "abc", "alloc1", dummyMap); - ShardAllocationDecision second = ShardAllocationDecision.yes("node1", "abc", "alloc1", dummyMap); - // same fields for the ShardAllocationDecision, but should be different instances + AllocateUnassignedDecision first = AllocateUnassignedDecision.yes("node1", "abc", "alloc1", dummyMap); + AllocateUnassignedDecision second = AllocateUnassignedDecision.yes("node1", "abc", "alloc1", dummyMap); + // same fields for the AllocateUnassignedDecision, but should be different instances assertNotSame(first, second); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java index 1ae1620520e..d06fa3dc330 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java @@ -31,8 +31,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.Balancer; -import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.NodeRebalanceDecision; -import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.RebalanceDecision; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; @@ -222,8 +220,8 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { assertEquals(shardToRebalance.relocatingNodeId(), rebalanceDecision.getAssignedNodeId()); // make sure all excluded nodes returned a NO decision for (String exludedNode : excludeNodes) { - NodeRebalanceDecision nodeRebalanceDecision = rebalanceDecision.getNodeDecisions().get(exludedNode); - assertEquals(Type.NO, nodeRebalanceDecision.getCanAllocateDecision().type()); + NodeRebalanceResult nodeRebalanceResult = rebalanceDecision.getNodeDecisions().get(exludedNode); + assertEquals(Type.NO, nodeRebalanceResult.getCanAllocateDecision().type()); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/MoveDecisionTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/MoveDecisionTests.java index 783fe690365..797976a7125 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/MoveDecisionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/MoveDecisionTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision.WeightedDecision; -import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator.MoveDecision; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; import org.elasticsearch.test.ESTestCase; @@ -79,9 +77,9 @@ public class MoveDecisionTests extends ESTestCase { } public void testDecisionWithExplain() { - Map nodeDecisions = new HashMap<>(); - nodeDecisions.put("node1", new WeightedDecision(randomFrom(Decision.NO, Decision.THROTTLE, Decision.YES), randomFloat())); - nodeDecisions.put("node2", new WeightedDecision(randomFrom(Decision.NO, Decision.THROTTLE, Decision.YES), randomFloat())); + Map nodeDecisions = new HashMap<>(); + nodeDecisions.put("node1", new NodeAllocationResult(randomFrom(Decision.NO, Decision.THROTTLE, Decision.YES), randomFloat())); + nodeDecisions.put("node2", new NodeAllocationResult(randomFrom(Decision.NO, Decision.THROTTLE, Decision.YES), randomFloat())); MoveDecision decision = MoveDecision.decision(Decision.NO, Type.NO, true, "node1", null, nodeDecisions); assertNotNull(decision.getFinalDecisionType()); assertNotNull(decision.getFinalExplanation()); From 99f8c21d9aa9b0400e9838d4221e86c5d96104aa Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 18 Nov 2016 16:40:18 +0100 Subject: [PATCH 14/50] Don't reset non-dynamic settings unless explicitly requested (#21646) AbstractScopedSettings has the ability to only apply updates/deletes to dynamic settings. The flag is currently not respected when a setting is reset/deleted which causes static node settings to be reset if a non-dynamic key is reset via `null` value. Closes #21593 --- .../settings/AbstractScopedSettings.java | 23 +++++--- .../common/settings/ScopedSettingsTests.java | 53 +++++++++++++++++++ 2 files changed, 70 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index e72f274fd62..b993cef9290 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -40,6 +40,7 @@ import java.util.TreeMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -445,10 +446,18 @@ public abstract class AbstractScopedSettings extends AbstractComponent { boolean changed = false; final Set toRemove = new HashSet<>(); Settings.Builder settingsBuilder = Settings.builder(); + final Predicate canUpdate = (key) -> (onlyDynamic == false && get(key) != null) || hasDynamicSetting(key); + final Predicate canRemove = (key) ->( // we can delete if + onlyDynamic && hasDynamicSetting(key) // it's a dynamicSetting and we only do dynamic settings + || get(key) == null && key.startsWith(ARCHIVED_SETTINGS_PREFIX) // the setting is not registered AND it's been archived + || (onlyDynamic == false && get(key) != null)); // if it's not dynamic AND we have a key for (Map.Entry entry : toApply.getAsMap().entrySet()) { - if (entry.getValue() == null) { + if (entry.getValue() == null && (canRemove.test(entry.getKey()) || entry.getKey().endsWith("*"))) { + // this either accepts null values that suffice the canUpdate test OR wildcard expressions (key ends with *) + // we don't validate if there is any dynamic setting with that prefix yet we could do in the future toRemove.add(entry.getKey()); - } else if ((onlyDynamic == false && get(entry.getKey()) != null) || hasDynamicSetting(entry.getKey())) { + // we don't set changed here it's set after we apply deletes below if something actually changed + } else if (entry.getValue() != null && canUpdate.test(entry.getKey())) { validate(entry.getKey(), toApply); settingsBuilder.put(entry.getKey(), entry.getValue()); updates.put(entry.getKey(), entry.getValue()); @@ -456,20 +465,22 @@ public abstract class AbstractScopedSettings extends AbstractComponent { } else { throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable"); } - } - changed |= applyDeletes(toRemove, target); + changed |= applyDeletes(toRemove, target, canRemove); target.put(settingsBuilder.build()); return changed; } - private static boolean applyDeletes(Set deletes, Settings.Builder builder) { + private static boolean applyDeletes(Set deletes, Settings.Builder builder, Predicate canRemove) { boolean changed = false; for (String entry : deletes) { Set keysToRemove = new HashSet<>(); Set keySet = builder.internalMap().keySet(); for (String key : keySet) { - if (Regex.simpleMatch(entry, key)) { + if (Regex.simpleMatch(entry, key) && canRemove.test(key)) { + // we have to re-check with canRemove here since we might have a wildcard expression foo.* that matches + // dynamic as well as static settings if that is the case we might remove static settings since we resolve the + // wildcards late keysToRemove.add(key); } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 635c00c1860..99126dcccd4 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -45,6 +45,59 @@ import static org.hamcrest.CoreMatchers.equalTo; public class ScopedSettingsTests extends ESTestCase { + public void testResetSetting() { + Setting dynamicSetting = Setting.intSetting("some.dyn.setting", 1, Property.Dynamic, Property.NodeScope); + Setting staticSetting = Setting.intSetting("some.static.setting", 1, Property.NodeScope); + Settings currentSettings = Settings.builder().put("some.dyn.setting", 5).put("some.static.setting", 6).put("archived.foo.bar", 9) + .build(); + ClusterSettings service = new ClusterSettings(currentSettings + , new HashSet<>(Arrays.asList(dynamicSetting, staticSetting))); + + expectThrows(IllegalArgumentException.class, () -> + service.updateDynamicSettings(Settings.builder().put("some.dyn.setting", 8).putNull("some.static.setting").build(), + Settings.builder().put(currentSettings), Settings.builder(), "node")); + + Settings.Builder target = Settings.builder().put(currentSettings); + Settings.Builder update = Settings.builder(); + assertTrue(service.updateDynamicSettings(Settings.builder().put("some.dyn.setting", 8).build(), + target, update, "node")); + assertEquals(8, dynamicSetting.get(target.build()).intValue()); + assertEquals(6, staticSetting.get(target.build()).intValue()); + assertEquals(9, target.build().getAsInt("archived.foo.bar", null).intValue()); + + target = Settings.builder().put(currentSettings); + update = Settings.builder(); + assertTrue(service.updateDynamicSettings(Settings.builder().putNull("some.dyn.setting").build(), + target, update, "node")); + assertEquals(1, dynamicSetting.get(target.build()).intValue()); + assertEquals(6, staticSetting.get(target.build()).intValue()); + assertEquals(9, target.build().getAsInt("archived.foo.bar", null).intValue()); + + target = Settings.builder().put(currentSettings); + update = Settings.builder(); + assertTrue(service.updateDynamicSettings(Settings.builder().putNull("archived.foo.bar").build(), + target, update, "node")); + assertEquals(5, dynamicSetting.get(target.build()).intValue()); + assertEquals(6, staticSetting.get(target.build()).intValue()); + assertNull(target.build().getAsInt("archived.foo.bar", null)); + + target = Settings.builder().put(currentSettings); + update = Settings.builder(); + assertTrue(service.updateDynamicSettings(Settings.builder().putNull("some.*").build(), + target, update, "node")); + assertEquals(1, dynamicSetting.get(target.build()).intValue()); + assertEquals(6, staticSetting.get(target.build()).intValue()); + assertEquals(9, target.build().getAsInt("archived.foo.bar", null).intValue()); + + target = Settings.builder().put(currentSettings); + update = Settings.builder(); + assertTrue(service.updateDynamicSettings(Settings.builder().putNull("*").build(), + target, update, "node")); + assertEquals(1, dynamicSetting.get(target.build()).intValue()); + assertEquals(6, staticSetting.get(target.build()).intValue()); + assertNull(target.build().getAsInt("archived.foo.bar", null)); + } + public void testAddConsumer() { Setting testSetting = Setting.intSetting("foo.bar", 1, Property.Dynamic, Property.NodeScope); Setting testSetting2 = Setting.intSetting("foo.bar.baz", 1, Property.Dynamic, Property.NodeScope); From 7906db83d55eb32cbb5d34d8a9f644cea2c4f024 Mon Sep 17 00:00:00 2001 From: ismael-hasan Date: Fri, 18 Nov 2016 16:52:46 +0100 Subject: [PATCH 15/50] Update BulkProcessor size in the example By default, it is recommended to start bulk with a size of 10-15MB, and increase it gradually to get the right size for the environment. The example shows originally 1GB, which can lead to some users to just copy-paste the code snippet and start with excessively big sizes. Backport of #21664 in master branch. --- docs/java-api/docs/bulk.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/java-api/docs/bulk.asciidoc b/docs/java-api/docs/bulk.asciidoc index 0b43b89c07c..288bd8415ab 100644 --- a/docs/java-api/docs/bulk.asciidoc +++ b/docs/java-api/docs/bulk.asciidoc @@ -71,7 +71,7 @@ BulkProcessor bulkProcessor = BulkProcessor.builder( Throwable failure) { ... } <4> }) .setBulkActions(10000) <5> - .setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) <6> + .setBulkSize(new ByteSizeValue(5, ByteSizeUnit.MB)) <6> .setFlushInterval(TimeValue.timeValueSeconds(5)) <7> .setConcurrentRequests(1) <8> .setBackoffPolicy( @@ -85,7 +85,7 @@ BulkProcessor bulkProcessor = BulkProcessor.builder( with `response.hasFailures()` <4> This method is called when the bulk failed and raised a `Throwable` <5> We want to execute the bulk every 10 000 requests -<6> We want to flush the bulk every 1gb +<6> We want to flush the bulk every 5mb <7> We want to flush the bulk every 5 seconds whatever the number of requests <8> Set the number of concurrent requests. A value of 0 means that only a single request will be allowed to be executed. A value of 1 means 1 concurrent request is allowed to be executed while accumulating new bulk requests. From 52d4cd504a46c931e7f7baf99ac8aa9a233bf3fd Mon Sep 17 00:00:00 2001 From: Sakthipriyan Vairamani Date: Fri, 18 Nov 2016 21:25:04 +0530 Subject: [PATCH 16/50] minor grammatical improvements (#21640) --- docs/reference/indices/templates.asciidoc | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/docs/reference/indices/templates.asciidoc b/docs/reference/indices/templates.asciidoc index 96ca5dcd1c7..c5e50e18c6e 100644 --- a/docs/reference/indices/templates.asciidoc +++ b/docs/reference/indices/templates.asciidoc @@ -41,11 +41,11 @@ PUT _template/template_1 // TESTSETUP NOTE: Index templates provide C-style /* */ block comments. Comments are allowed -everywhere in the JSON document except before to the initial opening curly bracket. +everywhere in the JSON document except before the initial opening curly bracket. -Defines a template named template_1, with a template pattern of `te*`. +Defines a template named `template_1`, with a template pattern of `te*`. The settings and mappings will be applied to any index name that matches -the `te*` template. +the `te*` pattern. It is also possible to include aliases in an index template as follows: @@ -72,8 +72,8 @@ PUT _template/template_1 // CONSOLE // TEST[s/^/DELETE _template\/template_1\n/] -<1> the `{index}` placeholder within the alias name will be replaced with the -actual index name that the template gets applied to during index creation. +<1> the `{index}` placeholder in the alias name will be replaced with the +actual index name that the template gets applied to, during index creation. [float] [[delete]] @@ -120,7 +120,7 @@ GET /_template [float] [[indices-templates-exists]] -=== Templates exists +=== Template exists Used to check if the template exists or not. For example: @@ -131,11 +131,12 @@ HEAD _template/template_1 // CONSOLE The HTTP status code indicates if the template with the given name -exists or not. A status code `200` means it exists, a `404` it does not. +exists or not. Status code `200` means it exists and `404` means +it does not. [float] [[multiple-templates]] -=== Multiple Template Matching +=== Multiple Templates Matching Multiple index templates can potentially match an index, in this case, both the settings and mappings are merged into the final configuration @@ -177,7 +178,7 @@ PUT /_template/template_2 // TEST[s/^/DELETE _template\/template_1\n/] The above will disable storing the `_source` on all `type1` types, but -for indices of that start with `te*`, source will still be enabled. +for indices that start with `te*`, `_source` will still be enabled. Note, for mappings, the merging is "deep", meaning that specific object/property based mappings can easily be added/overridden on higher order templates, with lower order templates providing the basis. @@ -206,7 +207,7 @@ PUT /_template/template_1 -------------------------------------------------- // CONSOLE -To check for the `version`, you can +To check the `version`, you can <> using `filter_path` to limit the response to just the `version`: From 484ad31ed90d4c9f36201a44867044a3673adb32 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 18 Nov 2016 13:04:28 -0500 Subject: [PATCH 17/50] Clarify that plugins can be closed Plugins are closed if they implement java.io.Closeable but this is not clear from the plugin interface. This commit clarifies this by declaring that Plugins implement java.io.Closeable and adding an empty implementation to the base Plugin class. Relates #21669 --- .../src/main/java/org/elasticsearch/node/Node.java | 2 +- .../java/org/elasticsearch/plugins/Plugin.java | 14 +++++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 298d6712ff0..1268a115de1 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -734,7 +734,7 @@ public class Node implements Closeable { toClose.add(() -> stopWatch.stop().start("plugin(" + plugin.getClass().getName() + ")")); toClose.add(plugin); } - toClose.addAll(pluginsService.filterPlugins(Closeable.class)); + toClose.addAll(pluginsService.filterPlugins(Plugin.class)); toClose.add(() -> stopWatch.stop().start("script")); toClose.add(injector.getInstance(ScriptService.class)); diff --git a/core/src/main/java/org/elasticsearch/plugins/Plugin.java b/core/src/main/java/org/elasticsearch/plugins/Plugin.java index 7bb554df9a3..cc548b52f67 100644 --- a/core/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -19,6 +19,8 @@ package org.elasticsearch.plugins; +import java.io.Closeable; +import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -70,7 +72,7 @@ import java.util.function.UnaryOperator; * methods should cause any extensions of {@linkplain Plugin} that used the pre-5.x style extension syntax to fail to build and point the * plugin author at the new extension syntax. We hope that these make the process of upgrading a plugin from 2.x to 5.x only mildly painful. */ -public abstract class Plugin { +public abstract class Plugin implements Closeable { /** * Node level guice modules. @@ -162,6 +164,16 @@ public abstract class Plugin { return Collections.emptyList(); } + /** + * Close the resources opened by this plugin. + * + * @throws IOException if the plugin failed to close its resources + */ + @Override + public void close() throws IOException { + + } + /** * Old-style guice index level extension point. {@code @Deprecated} and {@code final} to act as a signpost for plugin authors upgrading * from 2.x. From ae468441dcdf3fcfb874fb71e8aaada83f5c2683 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 18 Nov 2016 13:54:26 -0500 Subject: [PATCH 18/50] Implement the ?: operator in painless (#21506) Implements a null coalescing operator in painless that looks like `?:`. This form was chosen to emulate Groovy's `?:` operator. It is different in that it only coalesces null values, instead of Groovy's `?:` operator which coalesces all falsy values. I believe that makes it the same as Kotlin's `?:` operator. In other languages this operator looks like `??` (C#) and `COALESCE` (SQL) and `:-` (bash). This operator is lazy, meaning the right hand side is only evaluated at all if the left hand side is null. --- .../scripting/painless-syntax.asciidoc | 15 + .../src/main/antlr/PainlessLexer.g4 | 4 +- .../src/main/antlr/PainlessLexer.tokens | 276 +++--- .../src/main/antlr/PainlessParser.g4 | 5 +- .../src/main/antlr/PainlessParser.tokens | 276 +++--- .../painless/antlr/PainlessLexer.java | 484 ++++----- .../painless/antlr/PainlessParser.java | 926 +++++++++--------- .../antlr/PainlessParserBaseVisitor.java | 7 + .../painless/antlr/PainlessParserVisitor.java | 7 + .../elasticsearch/painless/antlr/Walker.java | 14 +- .../elasticsearch/painless/node/EElvis.java | 108 ++ .../painless/node/PSubNullSafeCallInvoke.java | 8 +- .../painless/node/PSubNullSafeField.java | 8 +- .../painless/BasicExpressionTests.java | 56 +- .../elasticsearch/painless/ElvisTests.java | 143 +++ .../painless/WhenThingsGoWrongTests.java | 6 + .../test/plan_a/20_scriptfield.yaml | 4 +- 17 files changed, 1344 insertions(+), 1003 deletions(-) create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/ElvisTests.java diff --git a/docs/reference/modules/scripting/painless-syntax.asciidoc b/docs/reference/modules/scripting/painless-syntax.asciidoc index fa8c1e60aa2..d89e79fc5d4 100644 --- a/docs/reference/modules/scripting/painless-syntax.asciidoc +++ b/docs/reference/modules/scripting/painless-syntax.asciidoc @@ -175,6 +175,21 @@ There are only a few minor differences and add-ons: * `=~` true if a portion of the text matches a pattern (e.g. `x =~ /b/`) * `==~` true if the entire text matches a pattern (e.g. `x ==~ /[Bb]ob/`) +The `?:` (aka Elvis) operator coalesces null values. So `x ?: 0` is `0` if `x` +is `null` and whatever value `x` has otherwise. It is a convenient way to write +default values like `doc['x'].value ?: 0` which is 0 if `x` is not in the +document being processed. It can also work with null safe dereferences to +efficiently handle null in chains. For example, +`doc['foo.keyword'].value?.length() ?: 0` is 0 if the document being processed +doesn't have a `foo.keyword` field but is the length of that field if it does. +Lastly, `?:` is lazy so the right hand side is not evaluated at all if the left +hand side isn't null. + +NOTE: Unlike Groovy, Painless' `?:` operator only coalesces `null`, not `false` +or http://groovy-lang.org/semantics.html#Groovy-Truth[falsy] values. Strictly +speaking Painless' `?:` is more like Kotlin's `?:` than Groovy's `?:`. + + [float] [[painless-control-flow]] === Control flow diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 index 2f07c5f9a42..9504a3d911a 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 @@ -36,7 +36,8 @@ RP: ')'; // between shortcuts and decimal values. Without the mode switch // shortcuts such as id.0.0 will fail because 0.0 will be interpreted // as a decimal value instead of two individual list-style shortcuts. -DOT: '.' -> mode(AFTER_DOT); +DOT: '.' -> mode(AFTER_DOT); +NSDOT: '?.' -> mode(AFTER_DOT); COMMA: ','; SEMICOLON: ';'; IF: 'if'; @@ -80,6 +81,7 @@ BOOLAND: '&&'; BOOLOR: '||'; COND: '?'; COLON: ':'; +ELVIS: '?:'; REF: '::'; ARROW: '->'; FIND: '=~'; diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.tokens b/modules/lang-painless/src/main/antlr/PainlessLexer.tokens index dc62fe36b28..90422e09160 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.tokens +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.tokens @@ -7,79 +7,81 @@ RBRACE=6 LP=7 RP=8 DOT=9 -COMMA=10 -SEMICOLON=11 -IF=12 -IN=13 -ELSE=14 -WHILE=15 -DO=16 -FOR=17 -CONTINUE=18 -BREAK=19 -RETURN=20 -NEW=21 -TRY=22 -CATCH=23 -THROW=24 -THIS=25 -INSTANCEOF=26 -BOOLNOT=27 -BWNOT=28 -MUL=29 -DIV=30 -REM=31 -ADD=32 -SUB=33 -LSH=34 -RSH=35 -USH=36 -LT=37 -LTE=38 -GT=39 -GTE=40 -EQ=41 -EQR=42 -NE=43 -NER=44 -BWAND=45 -XOR=46 -BWOR=47 -BOOLAND=48 -BOOLOR=49 -COND=50 -COLON=51 -REF=52 -ARROW=53 -FIND=54 -MATCH=55 -INCR=56 -DECR=57 -ASSIGN=58 -AADD=59 -ASUB=60 -AMUL=61 -ADIV=62 -AREM=63 -AAND=64 -AXOR=65 -AOR=66 -ALSH=67 -ARSH=68 -AUSH=69 -OCTAL=70 -HEX=71 -INTEGER=72 -DECIMAL=73 -STRING=74 -REGEX=75 -TRUE=76 -FALSE=77 -NULL=78 -TYPE=79 -ID=80 -DOTINTEGER=81 -DOTID=82 +NSDOT=10 +COMMA=11 +SEMICOLON=12 +IF=13 +IN=14 +ELSE=15 +WHILE=16 +DO=17 +FOR=18 +CONTINUE=19 +BREAK=20 +RETURN=21 +NEW=22 +TRY=23 +CATCH=24 +THROW=25 +THIS=26 +INSTANCEOF=27 +BOOLNOT=28 +BWNOT=29 +MUL=30 +DIV=31 +REM=32 +ADD=33 +SUB=34 +LSH=35 +RSH=36 +USH=37 +LT=38 +LTE=39 +GT=40 +GTE=41 +EQ=42 +EQR=43 +NE=44 +NER=45 +BWAND=46 +XOR=47 +BWOR=48 +BOOLAND=49 +BOOLOR=50 +COND=51 +COLON=52 +ELVIS=53 +REF=54 +ARROW=55 +FIND=56 +MATCH=57 +INCR=58 +DECR=59 +ASSIGN=60 +AADD=61 +ASUB=62 +AMUL=63 +ADIV=64 +AREM=65 +AAND=66 +AXOR=67 +AOR=68 +ALSH=69 +ARSH=70 +AUSH=71 +OCTAL=72 +HEX=73 +INTEGER=74 +DECIMAL=75 +STRING=76 +REGEX=77 +TRUE=78 +FALSE=79 +NULL=80 +TYPE=81 +ID=82 +DOTINTEGER=83 +DOTID=84 '{'=3 '}'=4 '['=5 @@ -87,66 +89,68 @@ DOTID=82 '('=7 ')'=8 '.'=9 -','=10 -';'=11 -'if'=12 -'in'=13 -'else'=14 -'while'=15 -'do'=16 -'for'=17 -'continue'=18 -'break'=19 -'return'=20 -'new'=21 -'try'=22 -'catch'=23 -'throw'=24 -'this'=25 -'instanceof'=26 -'!'=27 -'~'=28 -'*'=29 -'/'=30 -'%'=31 -'+'=32 -'-'=33 -'<<'=34 -'>>'=35 -'>>>'=36 -'<'=37 -'<='=38 -'>'=39 -'>='=40 -'=='=41 -'==='=42 -'!='=43 -'!=='=44 -'&'=45 -'^'=46 -'|'=47 -'&&'=48 -'||'=49 -'?'=50 -':'=51 -'::'=52 -'->'=53 -'=~'=54 -'==~'=55 -'++'=56 -'--'=57 -'='=58 -'+='=59 -'-='=60 -'*='=61 -'/='=62 -'%='=63 -'&='=64 -'^='=65 -'|='=66 -'<<='=67 -'>>='=68 -'>>>='=69 -'true'=76 -'false'=77 -'null'=78 +'?.'=10 +','=11 +';'=12 +'if'=13 +'in'=14 +'else'=15 +'while'=16 +'do'=17 +'for'=18 +'continue'=19 +'break'=20 +'return'=21 +'new'=22 +'try'=23 +'catch'=24 +'throw'=25 +'this'=26 +'instanceof'=27 +'!'=28 +'~'=29 +'*'=30 +'/'=31 +'%'=32 +'+'=33 +'-'=34 +'<<'=35 +'>>'=36 +'>>>'=37 +'<'=38 +'<='=39 +'>'=40 +'>='=41 +'=='=42 +'==='=43 +'!='=44 +'!=='=45 +'&'=46 +'^'=47 +'|'=48 +'&&'=49 +'||'=50 +'?'=51 +':'=52 +'?:'=53 +'::'=54 +'->'=55 +'=~'=56 +'==~'=57 +'++'=58 +'--'=59 +'='=60 +'+='=61 +'-='=62 +'*='=63 +'/='=64 +'%='=65 +'&='=66 +'^='=67 +'|='=68 +'<<='=69 +'>>='=70 +'>>>='=71 +'true'=78 +'false'=79 +'null'=80 diff --git a/modules/lang-painless/src/main/antlr/PainlessParser.g4 b/modules/lang-painless/src/main/antlr/PainlessParser.g4 index cca96e65b8b..bfa4ee28dcc 100644 --- a/modules/lang-painless/src/main/antlr/PainlessParser.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessParser.g4 @@ -110,6 +110,7 @@ expression | expression BOOLAND expression # bool | expression BOOLOR expression # bool | expression COND expression COLON expression # conditional + | expression ELVIS expression # elvis | expression ( ASSIGN | AADD | ASUB | AMUL | ADIV | AREM | AAND | AXOR | AOR | ALSH | ARSH | AUSH ) expression # assignment @@ -156,11 +157,11 @@ postdot ; callinvoke - : COND? DOT DOTID arguments + : ( DOT | NSDOT ) DOTID arguments ; fieldaccess - : COND? DOT ( DOTID | DOTINTEGER ) + : ( DOT | NSDOT ) ( DOTID | DOTINTEGER ) ; braceaccess diff --git a/modules/lang-painless/src/main/antlr/PainlessParser.tokens b/modules/lang-painless/src/main/antlr/PainlessParser.tokens index dc62fe36b28..90422e09160 100644 --- a/modules/lang-painless/src/main/antlr/PainlessParser.tokens +++ b/modules/lang-painless/src/main/antlr/PainlessParser.tokens @@ -7,79 +7,81 @@ RBRACE=6 LP=7 RP=8 DOT=9 -COMMA=10 -SEMICOLON=11 -IF=12 -IN=13 -ELSE=14 -WHILE=15 -DO=16 -FOR=17 -CONTINUE=18 -BREAK=19 -RETURN=20 -NEW=21 -TRY=22 -CATCH=23 -THROW=24 -THIS=25 -INSTANCEOF=26 -BOOLNOT=27 -BWNOT=28 -MUL=29 -DIV=30 -REM=31 -ADD=32 -SUB=33 -LSH=34 -RSH=35 -USH=36 -LT=37 -LTE=38 -GT=39 -GTE=40 -EQ=41 -EQR=42 -NE=43 -NER=44 -BWAND=45 -XOR=46 -BWOR=47 -BOOLAND=48 -BOOLOR=49 -COND=50 -COLON=51 -REF=52 -ARROW=53 -FIND=54 -MATCH=55 -INCR=56 -DECR=57 -ASSIGN=58 -AADD=59 -ASUB=60 -AMUL=61 -ADIV=62 -AREM=63 -AAND=64 -AXOR=65 -AOR=66 -ALSH=67 -ARSH=68 -AUSH=69 -OCTAL=70 -HEX=71 -INTEGER=72 -DECIMAL=73 -STRING=74 -REGEX=75 -TRUE=76 -FALSE=77 -NULL=78 -TYPE=79 -ID=80 -DOTINTEGER=81 -DOTID=82 +NSDOT=10 +COMMA=11 +SEMICOLON=12 +IF=13 +IN=14 +ELSE=15 +WHILE=16 +DO=17 +FOR=18 +CONTINUE=19 +BREAK=20 +RETURN=21 +NEW=22 +TRY=23 +CATCH=24 +THROW=25 +THIS=26 +INSTANCEOF=27 +BOOLNOT=28 +BWNOT=29 +MUL=30 +DIV=31 +REM=32 +ADD=33 +SUB=34 +LSH=35 +RSH=36 +USH=37 +LT=38 +LTE=39 +GT=40 +GTE=41 +EQ=42 +EQR=43 +NE=44 +NER=45 +BWAND=46 +XOR=47 +BWOR=48 +BOOLAND=49 +BOOLOR=50 +COND=51 +COLON=52 +ELVIS=53 +REF=54 +ARROW=55 +FIND=56 +MATCH=57 +INCR=58 +DECR=59 +ASSIGN=60 +AADD=61 +ASUB=62 +AMUL=63 +ADIV=64 +AREM=65 +AAND=66 +AXOR=67 +AOR=68 +ALSH=69 +ARSH=70 +AUSH=71 +OCTAL=72 +HEX=73 +INTEGER=74 +DECIMAL=75 +STRING=76 +REGEX=77 +TRUE=78 +FALSE=79 +NULL=80 +TYPE=81 +ID=82 +DOTINTEGER=83 +DOTID=84 '{'=3 '}'=4 '['=5 @@ -87,66 +89,68 @@ DOTID=82 '('=7 ')'=8 '.'=9 -','=10 -';'=11 -'if'=12 -'in'=13 -'else'=14 -'while'=15 -'do'=16 -'for'=17 -'continue'=18 -'break'=19 -'return'=20 -'new'=21 -'try'=22 -'catch'=23 -'throw'=24 -'this'=25 -'instanceof'=26 -'!'=27 -'~'=28 -'*'=29 -'/'=30 -'%'=31 -'+'=32 -'-'=33 -'<<'=34 -'>>'=35 -'>>>'=36 -'<'=37 -'<='=38 -'>'=39 -'>='=40 -'=='=41 -'==='=42 -'!='=43 -'!=='=44 -'&'=45 -'^'=46 -'|'=47 -'&&'=48 -'||'=49 -'?'=50 -':'=51 -'::'=52 -'->'=53 -'=~'=54 -'==~'=55 -'++'=56 -'--'=57 -'='=58 -'+='=59 -'-='=60 -'*='=61 -'/='=62 -'%='=63 -'&='=64 -'^='=65 -'|='=66 -'<<='=67 -'>>='=68 -'>>>='=69 -'true'=76 -'false'=77 -'null'=78 +'?.'=10 +','=11 +';'=12 +'if'=13 +'in'=14 +'else'=15 +'while'=16 +'do'=17 +'for'=18 +'continue'=19 +'break'=20 +'return'=21 +'new'=22 +'try'=23 +'catch'=24 +'throw'=25 +'this'=26 +'instanceof'=27 +'!'=28 +'~'=29 +'*'=30 +'/'=31 +'%'=32 +'+'=33 +'-'=34 +'<<'=35 +'>>'=36 +'>>>'=37 +'<'=38 +'<='=39 +'>'=40 +'>='=41 +'=='=42 +'==='=43 +'!='=44 +'!=='=45 +'&'=46 +'^'=47 +'|'=48 +'&&'=49 +'||'=50 +'?'=51 +':'=52 +'?:'=53 +'::'=54 +'->'=55 +'=~'=56 +'==~'=57 +'++'=58 +'--'=59 +'='=60 +'+='=61 +'-='=62 +'*='=63 +'/='=64 +'%='=65 +'&='=66 +'^='=67 +'|='=68 +'<<='=69 +'>>='=70 +'>>>='=71 +'true'=78 +'false'=79 +'null'=80 diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java index 54e164c37c3..529ae6f89ea 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java @@ -21,16 +21,16 @@ class PainlessLexer extends Lexer { new PredictionContextCache(); public static final int WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - COMMA=10, SEMICOLON=11, IF=12, IN=13, ELSE=14, WHILE=15, DO=16, FOR=17, - CONTINUE=18, BREAK=19, RETURN=20, NEW=21, TRY=22, CATCH=23, THROW=24, - THIS=25, INSTANCEOF=26, BOOLNOT=27, BWNOT=28, MUL=29, DIV=30, REM=31, - ADD=32, SUB=33, LSH=34, RSH=35, USH=36, LT=37, LTE=38, GT=39, GTE=40, - EQ=41, EQR=42, NE=43, NER=44, BWAND=45, XOR=46, BWOR=47, BOOLAND=48, BOOLOR=49, - COND=50, COLON=51, REF=52, ARROW=53, FIND=54, MATCH=55, INCR=56, DECR=57, - ASSIGN=58, AADD=59, ASUB=60, AMUL=61, ADIV=62, AREM=63, AAND=64, AXOR=65, - AOR=66, ALSH=67, ARSH=68, AUSH=69, OCTAL=70, HEX=71, INTEGER=72, DECIMAL=73, - STRING=74, REGEX=75, TRUE=76, FALSE=77, NULL=78, TYPE=79, ID=80, DOTINTEGER=81, - DOTID=82; + NSDOT=10, COMMA=11, SEMICOLON=12, IF=13, IN=14, ELSE=15, WHILE=16, DO=17, + FOR=18, CONTINUE=19, BREAK=20, RETURN=21, NEW=22, TRY=23, CATCH=24, THROW=25, + THIS=26, INSTANCEOF=27, BOOLNOT=28, BWNOT=29, MUL=30, DIV=31, REM=32, + ADD=33, SUB=34, LSH=35, RSH=36, USH=37, LT=38, LTE=39, GT=40, GTE=41, + EQ=42, EQR=43, NE=44, NER=45, BWAND=46, XOR=47, BWOR=48, BOOLAND=49, BOOLOR=50, + COND=51, COLON=52, ELVIS=53, REF=54, ARROW=55, FIND=56, MATCH=57, INCR=58, + DECR=59, ASSIGN=60, AADD=61, ASUB=62, AMUL=63, ADIV=64, AREM=65, AAND=66, + AXOR=67, AOR=68, ALSH=69, ARSH=70, AUSH=71, OCTAL=72, HEX=73, INTEGER=74, + DECIMAL=75, STRING=76, REGEX=77, TRUE=78, FALSE=79, NULL=80, TYPE=81, + ID=82, DOTINTEGER=83, DOTID=84; public static final int AFTER_DOT = 1; public static String[] modeNames = { "DEFAULT_MODE", "AFTER_DOT" @@ -38,39 +38,39 @@ class PainlessLexer extends Lexer { public static final String[] ruleNames = { "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT", - "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", - "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", "INSTANCEOF", - "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", - "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "XOR", "BWOR", - "BOOLAND", "BOOLOR", "COND", "COLON", "REF", "ARROW", "FIND", "MATCH", - "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", "AAND", - "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", "INTEGER", "DECIMAL", - "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", "ID", "DOTINTEGER", - "DOTID" - }; - - private static final String[] _LITERAL_NAMES = { - null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", - "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", - "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", - "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", - "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", - "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'::'", "'->'", - "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", - "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, null, - null, null, null, null, "'true'", "'false'", "'null'" - }; - private static final String[] _SYMBOLIC_NAMES = { - null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", "FOR", + "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", - "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "REF", "ARROW", "FIND", - "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", - "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", "INTEGER", - "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", "ID", "DOTINTEGER", - "DOTID" + "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", "REF", "ARROW", + "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", + "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", + "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", + "ID", "DOTINTEGER", "DOTID" + }; + + private static final String[] _LITERAL_NAMES = { + null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "'?.'", + "','", "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", + "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", + "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", + "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", + "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'?:'", "'::'", + "'->'", "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", + "'/='", "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, + null, null, null, null, null, "'true'", "'false'", "'null'" + }; + private static final String[] _SYMBOLIC_NAMES = { + null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", + "DOT", "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", + "FOR", "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", + "THIS", "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", + "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", + "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", + "REF", "ARROW", "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", + "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", + "NULL", "TYPE", "ID", "DOTINTEGER", "DOTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -129,11 +129,11 @@ class PainlessLexer extends Lexer { @Override public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 29: + case 30: return DIV_sempred((RuleContext)_localctx, predIndex); - case 74: + case 76: return REGEX_sempred((RuleContext)_localctx, predIndex); - case 78: + case 80: return TYPE_sempred((RuleContext)_localctx, predIndex); } return true; @@ -161,7 +161,7 @@ class PainlessLexer extends Lexer { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2T\u024b\b\1\b\1\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2V\u0257\b\1\b\1\4"+ "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ @@ -170,202 +170,206 @@ class PainlessLexer extends Lexer { "+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+ "\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t"+ "=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4"+ - "I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\3\2\6"+ - "\2\u00aa\n\2\r\2\16\2\u00ab\3\2\3\2\3\3\3\3\3\3\3\3\7\3\u00b4\n\3\f\3"+ - "\16\3\u00b7\13\3\3\3\3\3\3\3\3\3\3\3\7\3\u00be\n\3\f\3\16\3\u00c1\13\3"+ - "\3\3\3\3\5\3\u00c5\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b"+ - "\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\r\3\16\3\16\3\16"+ - "\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21"+ - "\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24"+ - "\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26"+ - "\3\26\3\26\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31"+ - "\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33"+ - "\3\33\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\37\3\37"+ - "\3\37\3 \3 \3!\3!\3\"\3\"\3#\3#\3#\3$\3$\3$\3%\3%\3%\3%\3&\3&\3\'\3\'"+ - "\3\'\3(\3(\3)\3)\3)\3*\3*\3*\3+\3+\3+\3+\3,\3,\3,\3-\3-\3-\3-\3.\3.\3"+ - "/\3/\3\60\3\60\3\61\3\61\3\61\3\62\3\62\3\62\3\63\3\63\3\64\3\64\3\65"+ - "\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\38\38\38\38\39\39\39\3:\3:\3"+ - ":\3;\3;\3<\3<\3<\3=\3=\3=\3>\3>\3>\3?\3?\3?\3@\3@\3@\3A\3A\3A\3B\3B\3"+ - "B\3C\3C\3C\3D\3D\3D\3D\3E\3E\3E\3E\3F\3F\3F\3F\3F\3G\3G\6G\u01ac\nG\r"+ - "G\16G\u01ad\3G\5G\u01b1\nG\3H\3H\3H\6H\u01b6\nH\rH\16H\u01b7\3H\5H\u01bb"+ - "\nH\3I\3I\3I\7I\u01c0\nI\fI\16I\u01c3\13I\5I\u01c5\nI\3I\5I\u01c8\nI\3"+ - "J\3J\3J\7J\u01cd\nJ\fJ\16J\u01d0\13J\5J\u01d2\nJ\3J\3J\6J\u01d6\nJ\rJ"+ - "\16J\u01d7\5J\u01da\nJ\3J\3J\5J\u01de\nJ\3J\6J\u01e1\nJ\rJ\16J\u01e2\5"+ - "J\u01e5\nJ\3J\5J\u01e8\nJ\3K\3K\3K\3K\3K\3K\7K\u01f0\nK\fK\16K\u01f3\13"+ - "K\3K\3K\3K\3K\3K\3K\3K\7K\u01fc\nK\fK\16K\u01ff\13K\3K\5K\u0202\nK\3L"+ - "\3L\3L\3L\6L\u0208\nL\rL\16L\u0209\3L\3L\7L\u020e\nL\fL\16L\u0211\13L"+ - "\3L\3L\3M\3M\3M\3M\3M\3N\3N\3N\3N\3N\3N\3O\3O\3O\3O\3O\3P\3P\3P\3P\7P"+ - "\u0229\nP\fP\16P\u022c\13P\3P\3P\3Q\3Q\7Q\u0232\nQ\fQ\16Q\u0235\13Q\3"+ - "R\3R\3R\7R\u023a\nR\fR\16R\u023d\13R\5R\u023f\nR\3R\3R\3S\3S\7S\u0245"+ - "\nS\fS\16S\u0248\13S\3S\3S\6\u00b5\u00bf\u01f1\u01fd\2T\4\3\6\4\b\5\n"+ - "\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32\16\34\17\36\20 \21\"\22$\23&"+ - "\24(\25*\26,\27.\30\60\31\62\32\64\33\66\348\35:\36<\37> @!B\"D#F$H%J"+ - "&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64h\65j\66l\67n8p9r:t;v|?"+ - "~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008aF\u008cG\u008eH\u0090I\u0092"+ - "J\u0094K\u0096L\u0098M\u009aN\u009cO\u009eP\u00a0Q\u00a2R\u00a4S\u00a6"+ - "T\4\2\3\24\5\2\13\f\17\17\"\"\4\2\f\f\17\17\3\2\629\4\2NNnn\4\2ZZzz\5"+ - "\2\62;CHch\3\2\63;\3\2\62;\b\2FFHHNNffhhnn\4\2GGgg\4\2--//\6\2FFHHffh"+ - "h\4\2$$^^\4\2\f\f\61\61\3\2\f\f\t\2WWeekknouuwwzz\5\2C\\aac|\6\2\62;C"+ - "\\aac|\u026b\2\4\3\2\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2"+ - "\2\2\16\3\2\2\2\2\20\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2"+ - "\30\3\2\2\2\2\32\3\2\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2"+ - "\2\2\2$\3\2\2\2\2&\3\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2"+ - "\2\60\3\2\2\2\2\62\3\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2"+ - "\2\2\2<\3\2\2\2\2>\3\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2"+ - "\2H\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T"+ - "\3\2\2\2\2V\3\2\2\2\2X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3"+ - "\2\2\2\2b\3\2\2\2\2d\3\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2"+ - "\2\2n\3\2\2\2\2p\3\2\2\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2"+ - "z\3\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084"+ - "\3\2\2\2\2\u0086\3\2\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2"+ - "\2\2\u008e\3\2\2\2\2\u0090\3\2\2\2\2\u0092\3\2\2\2\2\u0094\3\2\2\2\2\u0096"+ - "\3\2\2\2\2\u0098\3\2\2\2\2\u009a\3\2\2\2\2\u009c\3\2\2\2\2\u009e\3\2\2"+ - "\2\2\u00a0\3\2\2\2\2\u00a2\3\2\2\2\3\u00a4\3\2\2\2\3\u00a6\3\2\2\2\4\u00a9"+ - "\3\2\2\2\6\u00c4\3\2\2\2\b\u00c8\3\2\2\2\n\u00ca\3\2\2\2\f\u00cc\3\2\2"+ - "\2\16\u00ce\3\2\2\2\20\u00d0\3\2\2\2\22\u00d2\3\2\2\2\24\u00d4\3\2\2\2"+ - "\26\u00d8\3\2\2\2\30\u00da\3\2\2\2\32\u00dc\3\2\2\2\34\u00df\3\2\2\2\36"+ - "\u00e2\3\2\2\2 \u00e7\3\2\2\2\"\u00ed\3\2\2\2$\u00f0\3\2\2\2&\u00f4\3"+ - "\2\2\2(\u00fd\3\2\2\2*\u0103\3\2\2\2,\u010a\3\2\2\2.\u010e\3\2\2\2\60"+ - "\u0112\3\2\2\2\62\u0118\3\2\2\2\64\u011e\3\2\2\2\66\u0123\3\2\2\28\u012e"+ - "\3\2\2\2:\u0130\3\2\2\2<\u0132\3\2\2\2>\u0134\3\2\2\2@\u0137\3\2\2\2B"+ - "\u0139\3\2\2\2D\u013b\3\2\2\2F\u013d\3\2\2\2H\u0140\3\2\2\2J\u0143\3\2"+ - "\2\2L\u0147\3\2\2\2N\u0149\3\2\2\2P\u014c\3\2\2\2R\u014e\3\2\2\2T\u0151"+ - "\3\2\2\2V\u0154\3\2\2\2X\u0158\3\2\2\2Z\u015b\3\2\2\2\\\u015f\3\2\2\2"+ - "^\u0161\3\2\2\2`\u0163\3\2\2\2b\u0165\3\2\2\2d\u0168\3\2\2\2f\u016b\3"+ - "\2\2\2h\u016d\3\2\2\2j\u016f\3\2\2\2l\u0172\3\2\2\2n\u0175\3\2\2\2p\u0178"+ - "\3\2\2\2r\u017c\3\2\2\2t\u017f\3\2\2\2v\u0182\3\2\2\2x\u0184\3\2\2\2z"+ - "\u0187\3\2\2\2|\u018a\3\2\2\2~\u018d\3\2\2\2\u0080\u0190\3\2\2\2\u0082"+ - "\u0193\3\2\2\2\u0084\u0196\3\2\2\2\u0086\u0199\3\2\2\2\u0088\u019c\3\2"+ - "\2\2\u008a\u01a0\3\2\2\2\u008c\u01a4\3\2\2\2\u008e\u01a9\3\2\2\2\u0090"+ - "\u01b2\3\2\2\2\u0092\u01c4\3\2\2\2\u0094\u01d1\3\2\2\2\u0096\u0201\3\2"+ - "\2\2\u0098\u0203\3\2\2\2\u009a\u0214\3\2\2\2\u009c\u0219\3\2\2\2\u009e"+ - "\u021f\3\2\2\2\u00a0\u0224\3\2\2\2\u00a2\u022f\3\2\2\2\u00a4\u023e\3\2"+ - "\2\2\u00a6\u0242\3\2\2\2\u00a8\u00aa\t\2\2\2\u00a9\u00a8\3\2\2\2\u00aa"+ - "\u00ab\3\2\2\2\u00ab\u00a9\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac\u00ad\3\2"+ - "\2\2\u00ad\u00ae\b\2\2\2\u00ae\5\3\2\2\2\u00af\u00b0\7\61\2\2\u00b0\u00b1"+ - "\7\61\2\2\u00b1\u00b5\3\2\2\2\u00b2\u00b4\13\2\2\2\u00b3\u00b2\3\2\2\2"+ - "\u00b4\u00b7\3\2\2\2\u00b5\u00b6\3\2\2\2\u00b5\u00b3\3\2\2\2\u00b6\u00b8"+ - "\3\2\2\2\u00b7\u00b5\3\2\2\2\u00b8\u00c5\t\3\2\2\u00b9\u00ba\7\61\2\2"+ - "\u00ba\u00bb\7,\2\2\u00bb\u00bf\3\2\2\2\u00bc\u00be\13\2\2\2\u00bd\u00bc"+ - "\3\2\2\2\u00be\u00c1\3\2\2\2\u00bf\u00c0\3\2\2\2\u00bf\u00bd\3\2\2\2\u00c0"+ - "\u00c2\3\2\2\2\u00c1\u00bf\3\2\2\2\u00c2\u00c3\7,\2\2\u00c3\u00c5\7\61"+ - "\2\2\u00c4\u00af\3\2\2\2\u00c4\u00b9\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c6"+ - "\u00c7\b\3\2\2\u00c7\7\3\2\2\2\u00c8\u00c9\7}\2\2\u00c9\t\3\2\2\2\u00ca"+ - "\u00cb\7\177\2\2\u00cb\13\3\2\2\2\u00cc\u00cd\7]\2\2\u00cd\r\3\2\2\2\u00ce"+ - "\u00cf\7_\2\2\u00cf\17\3\2\2\2\u00d0\u00d1\7*\2\2\u00d1\21\3\2\2\2\u00d2"+ - "\u00d3\7+\2\2\u00d3\23\3\2\2\2\u00d4\u00d5\7\60\2\2\u00d5\u00d6\3\2\2"+ - "\2\u00d6\u00d7\b\n\3\2\u00d7\25\3\2\2\2\u00d8\u00d9\7.\2\2\u00d9\27\3"+ - "\2\2\2\u00da\u00db\7=\2\2\u00db\31\3\2\2\2\u00dc\u00dd\7k\2\2\u00dd\u00de"+ - "\7h\2\2\u00de\33\3\2\2\2\u00df\u00e0\7k\2\2\u00e0\u00e1\7p\2\2\u00e1\35"+ - "\3\2\2\2\u00e2\u00e3\7g\2\2\u00e3\u00e4\7n\2\2\u00e4\u00e5\7u\2\2\u00e5"+ - "\u00e6\7g\2\2\u00e6\37\3\2\2\2\u00e7\u00e8\7y\2\2\u00e8\u00e9\7j\2\2\u00e9"+ - "\u00ea\7k\2\2\u00ea\u00eb\7n\2\2\u00eb\u00ec\7g\2\2\u00ec!\3\2\2\2\u00ed"+ - "\u00ee\7f\2\2\u00ee\u00ef\7q\2\2\u00ef#\3\2\2\2\u00f0\u00f1\7h\2\2\u00f1"+ - "\u00f2\7q\2\2\u00f2\u00f3\7t\2\2\u00f3%\3\2\2\2\u00f4\u00f5\7e\2\2\u00f5"+ - "\u00f6\7q\2\2\u00f6\u00f7\7p\2\2\u00f7\u00f8\7v\2\2\u00f8\u00f9\7k\2\2"+ - "\u00f9\u00fa\7p\2\2\u00fa\u00fb\7w\2\2\u00fb\u00fc\7g\2\2\u00fc\'\3\2"+ - "\2\2\u00fd\u00fe\7d\2\2\u00fe\u00ff\7t\2\2\u00ff\u0100\7g\2\2\u0100\u0101"+ - "\7c\2\2\u0101\u0102\7m\2\2\u0102)\3\2\2\2\u0103\u0104\7t\2\2\u0104\u0105"+ - "\7g\2\2\u0105\u0106\7v\2\2\u0106\u0107\7w\2\2\u0107\u0108\7t\2\2\u0108"+ - "\u0109\7p\2\2\u0109+\3\2\2\2\u010a\u010b\7p\2\2\u010b\u010c\7g\2\2\u010c"+ - "\u010d\7y\2\2\u010d-\3\2\2\2\u010e\u010f\7v\2\2\u010f\u0110\7t\2\2\u0110"+ - "\u0111\7{\2\2\u0111/\3\2\2\2\u0112\u0113\7e\2\2\u0113\u0114\7c\2\2\u0114"+ - "\u0115\7v\2\2\u0115\u0116\7e\2\2\u0116\u0117\7j\2\2\u0117\61\3\2\2\2\u0118"+ - "\u0119\7v\2\2\u0119\u011a\7j\2\2\u011a\u011b\7t\2\2\u011b\u011c\7q\2\2"+ - "\u011c\u011d\7y\2\2\u011d\63\3\2\2\2\u011e\u011f\7v\2\2\u011f\u0120\7"+ - "j\2\2\u0120\u0121\7k\2\2\u0121\u0122\7u\2\2\u0122\65\3\2\2\2\u0123\u0124"+ - "\7k\2\2\u0124\u0125\7p\2\2\u0125\u0126\7u\2\2\u0126\u0127\7v\2\2\u0127"+ - "\u0128\7c\2\2\u0128\u0129\7p\2\2\u0129\u012a\7e\2\2\u012a\u012b\7g\2\2"+ - "\u012b\u012c\7q\2\2\u012c\u012d\7h\2\2\u012d\67\3\2\2\2\u012e\u012f\7"+ - "#\2\2\u012f9\3\2\2\2\u0130\u0131\7\u0080\2\2\u0131;\3\2\2\2\u0132\u0133"+ - "\7,\2\2\u0133=\3\2\2\2\u0134\u0135\7\61\2\2\u0135\u0136\6\37\2\2\u0136"+ - "?\3\2\2\2\u0137\u0138\7\'\2\2\u0138A\3\2\2\2\u0139\u013a\7-\2\2\u013a"+ - "C\3\2\2\2\u013b\u013c\7/\2\2\u013cE\3\2\2\2\u013d\u013e\7>\2\2\u013e\u013f"+ - "\7>\2\2\u013fG\3\2\2\2\u0140\u0141\7@\2\2\u0141\u0142\7@\2\2\u0142I\3"+ - "\2\2\2\u0143\u0144\7@\2\2\u0144\u0145\7@\2\2\u0145\u0146\7@\2\2\u0146"+ - "K\3\2\2\2\u0147\u0148\7>\2\2\u0148M\3\2\2\2\u0149\u014a\7>\2\2\u014a\u014b"+ - "\7?\2\2\u014bO\3\2\2\2\u014c\u014d\7@\2\2\u014dQ\3\2\2\2\u014e\u014f\7"+ - "@\2\2\u014f\u0150\7?\2\2\u0150S\3\2\2\2\u0151\u0152\7?\2\2\u0152\u0153"+ - "\7?\2\2\u0153U\3\2\2\2\u0154\u0155\7?\2\2\u0155\u0156\7?\2\2\u0156\u0157"+ - "\7?\2\2\u0157W\3\2\2\2\u0158\u0159\7#\2\2\u0159\u015a\7?\2\2\u015aY\3"+ - "\2\2\2\u015b\u015c\7#\2\2\u015c\u015d\7?\2\2\u015d\u015e\7?\2\2\u015e"+ - "[\3\2\2\2\u015f\u0160\7(\2\2\u0160]\3\2\2\2\u0161\u0162\7`\2\2\u0162_"+ - "\3\2\2\2\u0163\u0164\7~\2\2\u0164a\3\2\2\2\u0165\u0166\7(\2\2\u0166\u0167"+ - "\7(\2\2\u0167c\3\2\2\2\u0168\u0169\7~\2\2\u0169\u016a\7~\2\2\u016ae\3"+ - "\2\2\2\u016b\u016c\7A\2\2\u016cg\3\2\2\2\u016d\u016e\7<\2\2\u016ei\3\2"+ - "\2\2\u016f\u0170\7<\2\2\u0170\u0171\7<\2\2\u0171k\3\2\2\2\u0172\u0173"+ - "\7/\2\2\u0173\u0174\7@\2\2\u0174m\3\2\2\2\u0175\u0176\7?\2\2\u0176\u0177"+ - "\7\u0080\2\2\u0177o\3\2\2\2\u0178\u0179\7?\2\2\u0179\u017a\7?\2\2\u017a"+ - "\u017b\7\u0080\2\2\u017bq\3\2\2\2\u017c\u017d\7-\2\2\u017d\u017e\7-\2"+ - "\2\u017es\3\2\2\2\u017f\u0180\7/\2\2\u0180\u0181\7/\2\2\u0181u\3\2\2\2"+ - "\u0182\u0183\7?\2\2\u0183w\3\2\2\2\u0184\u0185\7-\2\2\u0185\u0186\7?\2"+ - "\2\u0186y\3\2\2\2\u0187\u0188\7/\2\2\u0188\u0189\7?\2\2\u0189{\3\2\2\2"+ - "\u018a\u018b\7,\2\2\u018b\u018c\7?\2\2\u018c}\3\2\2\2\u018d\u018e\7\61"+ - "\2\2\u018e\u018f\7?\2\2\u018f\177\3\2\2\2\u0190\u0191\7\'\2\2\u0191\u0192"+ - "\7?\2\2\u0192\u0081\3\2\2\2\u0193\u0194\7(\2\2\u0194\u0195\7?\2\2\u0195"+ - "\u0083\3\2\2\2\u0196\u0197\7`\2\2\u0197\u0198\7?\2\2\u0198\u0085\3\2\2"+ - "\2\u0199\u019a\7~\2\2\u019a\u019b\7?\2\2\u019b\u0087\3\2\2\2\u019c\u019d"+ - "\7>\2\2\u019d\u019e\7>\2\2\u019e\u019f\7?\2\2\u019f\u0089\3\2\2\2\u01a0"+ - "\u01a1\7@\2\2\u01a1\u01a2\7@\2\2\u01a2\u01a3\7?\2\2\u01a3\u008b\3\2\2"+ - "\2\u01a4\u01a5\7@\2\2\u01a5\u01a6\7@\2\2\u01a6\u01a7\7@\2\2\u01a7\u01a8"+ - "\7?\2\2\u01a8\u008d\3\2\2\2\u01a9\u01ab\7\62\2\2\u01aa\u01ac\t\4\2\2\u01ab"+ - "\u01aa\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad\u01ab\3\2\2\2\u01ad\u01ae\3\2"+ - "\2\2\u01ae\u01b0\3\2\2\2\u01af\u01b1\t\5\2\2\u01b0\u01af\3\2\2\2\u01b0"+ - "\u01b1\3\2\2\2\u01b1\u008f\3\2\2\2\u01b2\u01b3\7\62\2\2\u01b3\u01b5\t"+ - "\6\2\2\u01b4\u01b6\t\7\2\2\u01b5\u01b4\3\2\2\2\u01b6\u01b7\3\2\2\2\u01b7"+ - "\u01b5\3\2\2\2\u01b7\u01b8\3\2\2\2\u01b8\u01ba\3\2\2\2\u01b9\u01bb\t\5"+ - "\2\2\u01ba\u01b9\3\2\2\2\u01ba\u01bb\3\2\2\2\u01bb\u0091\3\2\2\2\u01bc"+ - "\u01c5\7\62\2\2\u01bd\u01c1\t\b\2\2\u01be\u01c0\t\t\2\2\u01bf\u01be\3"+ - "\2\2\2\u01c0\u01c3\3\2\2\2\u01c1\u01bf\3\2\2\2\u01c1\u01c2\3\2\2\2\u01c2"+ - "\u01c5\3\2\2\2\u01c3\u01c1\3\2\2\2\u01c4\u01bc\3\2\2\2\u01c4\u01bd\3\2"+ - "\2\2\u01c5\u01c7\3\2\2\2\u01c6\u01c8\t\n\2\2\u01c7\u01c6\3\2\2\2\u01c7"+ - "\u01c8\3\2\2\2\u01c8\u0093\3\2\2\2\u01c9\u01d2\7\62\2\2\u01ca\u01ce\t"+ - "\b\2\2\u01cb\u01cd\t\t\2\2\u01cc\u01cb\3\2\2\2\u01cd\u01d0\3\2\2\2\u01ce"+ - "\u01cc\3\2\2\2\u01ce\u01cf\3\2\2\2\u01cf\u01d2\3\2\2\2\u01d0\u01ce\3\2"+ - "\2\2\u01d1\u01c9\3\2\2\2\u01d1\u01ca\3\2\2\2\u01d2\u01d9\3\2\2\2\u01d3"+ - "\u01d5\5\24\n\2\u01d4\u01d6\t\t\2\2\u01d5\u01d4\3\2\2\2\u01d6\u01d7\3"+ - "\2\2\2\u01d7\u01d5\3\2\2\2\u01d7\u01d8\3\2\2\2\u01d8\u01da\3\2\2\2\u01d9"+ - "\u01d3\3\2\2\2\u01d9\u01da\3\2\2\2\u01da\u01e4\3\2\2\2\u01db\u01dd\t\13"+ - "\2\2\u01dc\u01de\t\f\2\2\u01dd\u01dc\3\2\2\2\u01dd\u01de\3\2\2\2\u01de"+ - "\u01e0\3\2\2\2\u01df\u01e1\t\t\2\2\u01e0\u01df\3\2\2\2\u01e1\u01e2\3\2"+ - "\2\2\u01e2\u01e0\3\2\2\2\u01e2\u01e3\3\2\2\2\u01e3\u01e5\3\2\2\2\u01e4"+ - "\u01db\3\2\2\2\u01e4\u01e5\3\2\2\2\u01e5\u01e7\3\2\2\2\u01e6\u01e8\t\r"+ - "\2\2\u01e7\u01e6\3\2\2\2\u01e7\u01e8\3\2\2\2\u01e8\u0095\3\2\2\2\u01e9"+ - "\u01f1\7$\2\2\u01ea\u01eb\7^\2\2\u01eb\u01f0\7$\2\2\u01ec\u01ed\7^\2\2"+ - "\u01ed\u01f0\7^\2\2\u01ee\u01f0\n\16\2\2\u01ef\u01ea\3\2\2\2\u01ef\u01ec"+ - "\3\2\2\2\u01ef\u01ee\3\2\2\2\u01f0\u01f3\3\2\2\2\u01f1\u01f2\3\2\2\2\u01f1"+ - "\u01ef\3\2\2\2\u01f2\u01f4\3\2\2\2\u01f3\u01f1\3\2\2\2\u01f4\u0202\7$"+ - "\2\2\u01f5\u01fd\7)\2\2\u01f6\u01f7\7^\2\2\u01f7\u01fc\7)\2\2\u01f8\u01f9"+ - "\7^\2\2\u01f9\u01fc\7^\2\2\u01fa\u01fc\n\16\2\2\u01fb\u01f6\3\2\2\2\u01fb"+ - "\u01f8\3\2\2\2\u01fb\u01fa\3\2\2\2\u01fc\u01ff\3\2\2\2\u01fd\u01fe\3\2"+ - "\2\2\u01fd\u01fb\3\2\2\2\u01fe\u0200\3\2\2\2\u01ff\u01fd\3\2\2\2\u0200"+ - "\u0202\7)\2\2\u0201\u01e9\3\2\2\2\u0201\u01f5\3\2\2\2\u0202\u0097\3\2"+ - "\2\2\u0203\u0207\7\61\2\2\u0204\u0208\n\17\2\2\u0205\u0206\7^\2\2\u0206"+ - "\u0208\n\20\2\2\u0207\u0204\3\2\2\2\u0207\u0205\3\2\2\2\u0208\u0209\3"+ - "\2\2\2\u0209\u0207\3\2\2\2\u0209\u020a\3\2\2\2\u020a\u020b\3\2\2\2\u020b"+ - "\u020f\7\61\2\2\u020c\u020e\t\21\2\2\u020d\u020c\3\2\2\2\u020e\u0211\3"+ - "\2\2\2\u020f\u020d\3\2\2\2\u020f\u0210\3\2\2\2\u0210\u0212\3\2\2\2\u0211"+ - "\u020f\3\2\2\2\u0212\u0213\6L\3\2\u0213\u0099\3\2\2\2\u0214\u0215\7v\2"+ - "\2\u0215\u0216\7t\2\2\u0216\u0217\7w\2\2\u0217\u0218\7g\2\2\u0218\u009b"+ - "\3\2\2\2\u0219\u021a\7h\2\2\u021a\u021b\7c\2\2\u021b\u021c\7n\2\2\u021c"+ - "\u021d\7u\2\2\u021d\u021e\7g\2\2\u021e\u009d\3\2\2\2\u021f\u0220\7p\2"+ - "\2\u0220\u0221\7w\2\2\u0221\u0222\7n\2\2\u0222\u0223\7n\2\2\u0223\u009f"+ - "\3\2\2\2\u0224\u022a\5\u00a2Q\2\u0225\u0226\5\24\n\2\u0226\u0227\5\u00a2"+ - "Q\2\u0227\u0229\3\2\2\2\u0228\u0225\3\2\2\2\u0229\u022c\3\2\2\2\u022a"+ - "\u0228\3\2\2\2\u022a\u022b\3\2\2\2\u022b\u022d\3\2\2\2\u022c\u022a\3\2"+ - "\2\2\u022d\u022e\6P\4\2\u022e\u00a1\3\2\2\2\u022f\u0233\t\22\2\2\u0230"+ - "\u0232\t\23\2\2\u0231\u0230\3\2\2\2\u0232\u0235\3\2\2\2\u0233\u0231\3"+ - "\2\2\2\u0233\u0234\3\2\2\2\u0234\u00a3\3\2\2\2\u0235\u0233\3\2\2\2\u0236"+ - "\u023f\7\62\2\2\u0237\u023b\t\b\2\2\u0238\u023a\t\t\2\2\u0239\u0238\3"+ - "\2\2\2\u023a\u023d\3\2\2\2\u023b\u0239\3\2\2\2\u023b\u023c\3\2\2\2\u023c"+ - "\u023f\3\2\2\2\u023d\u023b\3\2\2\2\u023e\u0236\3\2\2\2\u023e\u0237\3\2"+ - "\2\2\u023f\u0240\3\2\2\2\u0240\u0241\bR\4\2\u0241\u00a5\3\2\2\2\u0242"+ - "\u0246\t\22\2\2\u0243\u0245\t\23\2\2\u0244\u0243\3\2\2\2\u0245\u0248\3"+ - "\2\2\2\u0246\u0244\3\2\2\2\u0246\u0247\3\2\2\2\u0247\u0249\3\2\2\2\u0248"+ - "\u0246\3\2\2\2\u0249\u024a\bS\4\2\u024a\u00a7\3\2\2\2$\2\3\u00ab\u00b5"+ - "\u00bf\u00c4\u01ad\u01b0\u01b7\u01ba\u01c1\u01c4\u01c7\u01ce\u01d1\u01d7"+ - "\u01d9\u01dd\u01e2\u01e4\u01e7\u01ef\u01f1\u01fb\u01fd\u0201\u0207\u0209"+ - "\u020f\u022a\u0233\u023b\u023e\u0246\5\b\2\2\4\3\2\4\2\2"; + "I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\t"+ + "T\4U\tU\3\2\6\2\u00ae\n\2\r\2\16\2\u00af\3\2\3\2\3\3\3\3\3\3\3\3\7\3\u00b8"+ + "\n\3\f\3\16\3\u00bb\13\3\3\3\3\3\3\3\3\3\3\3\7\3\u00c2\n\3\f\3\16\3\u00c5"+ + "\13\3\3\3\3\3\5\3\u00c9\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3"+ + "\b\3\b\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13\3\f\3\f\3\r\3"+ + "\r\3\16\3\16\3\16\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3"+ + "\21\3\21\3\21\3\21\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3"+ + "\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3"+ + "\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\31\3"+ + "\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3"+ + "\33\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3"+ + "\35\3\36\3\36\3\37\3\37\3 \3 \3 \3!\3!\3\"\3\"\3#\3#\3$\3$\3$\3%\3%\3"+ + "%\3&\3&\3&\3&\3\'\3\'\3(\3(\3(\3)\3)\3*\3*\3*\3+\3+\3+\3,\3,\3,\3,\3-"+ + "\3-\3-\3.\3.\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\62\3\63\3\63"+ + "\3\63\3\64\3\64\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\38\38\38\39\3"+ + "9\39\3:\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3>\3>\3>\3?\3?\3?\3@\3@\3@\3"+ + "A\3A\3A\3B\3B\3B\3C\3C\3C\3D\3D\3D\3E\3E\3E\3F\3F\3F\3F\3G\3G\3G\3G\3"+ + "H\3H\3H\3H\3H\3I\3I\6I\u01b8\nI\rI\16I\u01b9\3I\5I\u01bd\nI\3J\3J\3J\6"+ + "J\u01c2\nJ\rJ\16J\u01c3\3J\5J\u01c7\nJ\3K\3K\3K\7K\u01cc\nK\fK\16K\u01cf"+ + "\13K\5K\u01d1\nK\3K\5K\u01d4\nK\3L\3L\3L\7L\u01d9\nL\fL\16L\u01dc\13L"+ + "\5L\u01de\nL\3L\3L\6L\u01e2\nL\rL\16L\u01e3\5L\u01e6\nL\3L\3L\5L\u01ea"+ + "\nL\3L\6L\u01ed\nL\rL\16L\u01ee\5L\u01f1\nL\3L\5L\u01f4\nL\3M\3M\3M\3"+ + "M\3M\3M\7M\u01fc\nM\fM\16M\u01ff\13M\3M\3M\3M\3M\3M\3M\3M\7M\u0208\nM"+ + "\fM\16M\u020b\13M\3M\5M\u020e\nM\3N\3N\3N\3N\6N\u0214\nN\rN\16N\u0215"+ + "\3N\3N\7N\u021a\nN\fN\16N\u021d\13N\3N\3N\3O\3O\3O\3O\3O\3P\3P\3P\3P\3"+ + "P\3P\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\7R\u0235\nR\fR\16R\u0238\13R\3R\3R\3S"+ + "\3S\7S\u023e\nS\fS\16S\u0241\13S\3T\3T\3T\7T\u0246\nT\fT\16T\u0249\13"+ + "T\5T\u024b\nT\3T\3T\3U\3U\7U\u0251\nU\fU\16U\u0254\13U\3U\3U\6\u00b9\u00c3"+ + "\u01fd\u0209\2V\4\3\6\4\b\5\n\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32"+ + "\16\34\17\36\20 \21\"\22$\23&\24(\25*\26,\27.\30\60\31\62\32\64\33\66"+ + "\348\35:\36<\37> @!B\"D#F$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64"+ + "h\65j\66l\67n8p9r:t;v|?~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008a"+ + "F\u008cG\u008eH\u0090I\u0092J\u0094K\u0096L\u0098M\u009aN\u009cO\u009e"+ + "P\u00a0Q\u00a2R\u00a4S\u00a6T\u00a8U\u00aaV\4\2\3\24\5\2\13\f\17\17\""+ + "\"\4\2\f\f\17\17\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2\62;\b"+ + "\2FFHHNNffhhnn\4\2GGgg\4\2--//\6\2FFHHffhh\4\2$$^^\4\2\f\f\61\61\3\2\f"+ + "\f\t\2WWeekknouuwwzz\5\2C\\aac|\6\2\62;C\\aac|\u0277\2\4\3\2\2\2\2\6\3"+ + "\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2\16\3\2\2\2\2\20\3\2\2\2\2"+ + "\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3\2\2\2\2\32\3\2\2\2\2\34\3"+ + "\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2$\3\2\2\2\2&\3\2\2\2\2(\3"+ + "\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60\3\2\2\2\2\62\3\2\2\2\2\64"+ + "\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2\2<\3\2\2\2\2>\3\2\2\2\2@\3"+ + "\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H\3\2\2\2\2J\3\2\2\2\2L\3\2\2"+ + "\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2\2\2\2V\3\2\2\2\2X\3\2\2\2\2"+ + "Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2\2\2b\3\2\2\2\2d\3\2\2\2\2f\3"+ + "\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2n\3\2\2\2\2p\3\2\2\2\2r\3\2\2"+ + "\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2"+ + "\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3\2\2\2\2\u0086\3\2\2\2\2\u0088"+ + "\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2\2\u008e\3\2\2\2\2\u0090\3\2\2"+ + "\2\2\u0092\3\2\2\2\2\u0094\3\2\2\2\2\u0096\3\2\2\2\2\u0098\3\2\2\2\2\u009a"+ + "\3\2\2\2\2\u009c\3\2\2\2\2\u009e\3\2\2\2\2\u00a0\3\2\2\2\2\u00a2\3\2\2"+ + "\2\2\u00a4\3\2\2\2\2\u00a6\3\2\2\2\3\u00a8\3\2\2\2\3\u00aa\3\2\2\2\4\u00ad"+ + "\3\2\2\2\6\u00c8\3\2\2\2\b\u00cc\3\2\2\2\n\u00ce\3\2\2\2\f\u00d0\3\2\2"+ + "\2\16\u00d2\3\2\2\2\20\u00d4\3\2\2\2\22\u00d6\3\2\2\2\24\u00d8\3\2\2\2"+ + "\26\u00dc\3\2\2\2\30\u00e1\3\2\2\2\32\u00e3\3\2\2\2\34\u00e5\3\2\2\2\36"+ + "\u00e8\3\2\2\2 \u00eb\3\2\2\2\"\u00f0\3\2\2\2$\u00f6\3\2\2\2&\u00f9\3"+ + "\2\2\2(\u00fd\3\2\2\2*\u0106\3\2\2\2,\u010c\3\2\2\2.\u0113\3\2\2\2\60"+ + "\u0117\3\2\2\2\62\u011b\3\2\2\2\64\u0121\3\2\2\2\66\u0127\3\2\2\28\u012c"+ + "\3\2\2\2:\u0137\3\2\2\2<\u0139\3\2\2\2>\u013b\3\2\2\2@\u013d\3\2\2\2B"+ + "\u0140\3\2\2\2D\u0142\3\2\2\2F\u0144\3\2\2\2H\u0146\3\2\2\2J\u0149\3\2"+ + "\2\2L\u014c\3\2\2\2N\u0150\3\2\2\2P\u0152\3\2\2\2R\u0155\3\2\2\2T\u0157"+ + "\3\2\2\2V\u015a\3\2\2\2X\u015d\3\2\2\2Z\u0161\3\2\2\2\\\u0164\3\2\2\2"+ + "^\u0168\3\2\2\2`\u016a\3\2\2\2b\u016c\3\2\2\2d\u016e\3\2\2\2f\u0171\3"+ + "\2\2\2h\u0174\3\2\2\2j\u0176\3\2\2\2l\u0178\3\2\2\2n\u017b\3\2\2\2p\u017e"+ + "\3\2\2\2r\u0181\3\2\2\2t\u0184\3\2\2\2v\u0188\3\2\2\2x\u018b\3\2\2\2z"+ + "\u018e\3\2\2\2|\u0190\3\2\2\2~\u0193\3\2\2\2\u0080\u0196\3\2\2\2\u0082"+ + "\u0199\3\2\2\2\u0084\u019c\3\2\2\2\u0086\u019f\3\2\2\2\u0088\u01a2\3\2"+ + "\2\2\u008a\u01a5\3\2\2\2\u008c\u01a8\3\2\2\2\u008e\u01ac\3\2\2\2\u0090"+ + "\u01b0\3\2\2\2\u0092\u01b5\3\2\2\2\u0094\u01be\3\2\2\2\u0096\u01d0\3\2"+ + "\2\2\u0098\u01dd\3\2\2\2\u009a\u020d\3\2\2\2\u009c\u020f\3\2\2\2\u009e"+ + "\u0220\3\2\2\2\u00a0\u0225\3\2\2\2\u00a2\u022b\3\2\2\2\u00a4\u0230\3\2"+ + "\2\2\u00a6\u023b\3\2\2\2\u00a8\u024a\3\2\2\2\u00aa\u024e\3\2\2\2\u00ac"+ + "\u00ae\t\2\2\2\u00ad\u00ac\3\2\2\2\u00ae\u00af\3\2\2\2\u00af\u00ad\3\2"+ + "\2\2\u00af\u00b0\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1\u00b2\b\2\2\2\u00b2"+ + "\5\3\2\2\2\u00b3\u00b4\7\61\2\2\u00b4\u00b5\7\61\2\2\u00b5\u00b9\3\2\2"+ + "\2\u00b6\u00b8\13\2\2\2\u00b7\u00b6\3\2\2\2\u00b8\u00bb\3\2\2\2\u00b9"+ + "\u00ba\3\2\2\2\u00b9\u00b7\3\2\2\2\u00ba\u00bc\3\2\2\2\u00bb\u00b9\3\2"+ + "\2\2\u00bc\u00c9\t\3\2\2\u00bd\u00be\7\61\2\2\u00be\u00bf\7,\2\2\u00bf"+ + "\u00c3\3\2\2\2\u00c0\u00c2\13\2\2\2\u00c1\u00c0\3\2\2\2\u00c2\u00c5\3"+ + "\2\2\2\u00c3\u00c4\3\2\2\2\u00c3\u00c1\3\2\2\2\u00c4\u00c6\3\2\2\2\u00c5"+ + "\u00c3\3\2\2\2\u00c6\u00c7\7,\2\2\u00c7\u00c9\7\61\2\2\u00c8\u00b3\3\2"+ + "\2\2\u00c8\u00bd\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00cb\b\3\2\2\u00cb"+ + "\7\3\2\2\2\u00cc\u00cd\7}\2\2\u00cd\t\3\2\2\2\u00ce\u00cf\7\177\2\2\u00cf"+ + "\13\3\2\2\2\u00d0\u00d1\7]\2\2\u00d1\r\3\2\2\2\u00d2\u00d3\7_\2\2\u00d3"+ + "\17\3\2\2\2\u00d4\u00d5\7*\2\2\u00d5\21\3\2\2\2\u00d6\u00d7\7+\2\2\u00d7"+ + "\23\3\2\2\2\u00d8\u00d9\7\60\2\2\u00d9\u00da\3\2\2\2\u00da\u00db\b\n\3"+ + "\2\u00db\25\3\2\2\2\u00dc\u00dd\7A\2\2\u00dd\u00de\7\60\2\2\u00de\u00df"+ + "\3\2\2\2\u00df\u00e0\b\13\3\2\u00e0\27\3\2\2\2\u00e1\u00e2\7.\2\2\u00e2"+ + "\31\3\2\2\2\u00e3\u00e4\7=\2\2\u00e4\33\3\2\2\2\u00e5\u00e6\7k\2\2\u00e6"+ + "\u00e7\7h\2\2\u00e7\35\3\2\2\2\u00e8\u00e9\7k\2\2\u00e9\u00ea\7p\2\2\u00ea"+ + "\37\3\2\2\2\u00eb\u00ec\7g\2\2\u00ec\u00ed\7n\2\2\u00ed\u00ee\7u\2\2\u00ee"+ + "\u00ef\7g\2\2\u00ef!\3\2\2\2\u00f0\u00f1\7y\2\2\u00f1\u00f2\7j\2\2\u00f2"+ + "\u00f3\7k\2\2\u00f3\u00f4\7n\2\2\u00f4\u00f5\7g\2\2\u00f5#\3\2\2\2\u00f6"+ + "\u00f7\7f\2\2\u00f7\u00f8\7q\2\2\u00f8%\3\2\2\2\u00f9\u00fa\7h\2\2\u00fa"+ + "\u00fb\7q\2\2\u00fb\u00fc\7t\2\2\u00fc\'\3\2\2\2\u00fd\u00fe\7e\2\2\u00fe"+ + "\u00ff\7q\2\2\u00ff\u0100\7p\2\2\u0100\u0101\7v\2\2\u0101\u0102\7k\2\2"+ + "\u0102\u0103\7p\2\2\u0103\u0104\7w\2\2\u0104\u0105\7g\2\2\u0105)\3\2\2"+ + "\2\u0106\u0107\7d\2\2\u0107\u0108\7t\2\2\u0108\u0109\7g\2\2\u0109\u010a"+ + "\7c\2\2\u010a\u010b\7m\2\2\u010b+\3\2\2\2\u010c\u010d\7t\2\2\u010d\u010e"+ + "\7g\2\2\u010e\u010f\7v\2\2\u010f\u0110\7w\2\2\u0110\u0111\7t\2\2\u0111"+ + "\u0112\7p\2\2\u0112-\3\2\2\2\u0113\u0114\7p\2\2\u0114\u0115\7g\2\2\u0115"+ + "\u0116\7y\2\2\u0116/\3\2\2\2\u0117\u0118\7v\2\2\u0118\u0119\7t\2\2\u0119"+ + "\u011a\7{\2\2\u011a\61\3\2\2\2\u011b\u011c\7e\2\2\u011c\u011d\7c\2\2\u011d"+ + "\u011e\7v\2\2\u011e\u011f\7e\2\2\u011f\u0120\7j\2\2\u0120\63\3\2\2\2\u0121"+ + "\u0122\7v\2\2\u0122\u0123\7j\2\2\u0123\u0124\7t\2\2\u0124\u0125\7q\2\2"+ + "\u0125\u0126\7y\2\2\u0126\65\3\2\2\2\u0127\u0128\7v\2\2\u0128\u0129\7"+ + "j\2\2\u0129\u012a\7k\2\2\u012a\u012b\7u\2\2\u012b\67\3\2\2\2\u012c\u012d"+ + "\7k\2\2\u012d\u012e\7p\2\2\u012e\u012f\7u\2\2\u012f\u0130\7v\2\2\u0130"+ + "\u0131\7c\2\2\u0131\u0132\7p\2\2\u0132\u0133\7e\2\2\u0133\u0134\7g\2\2"+ + "\u0134\u0135\7q\2\2\u0135\u0136\7h\2\2\u01369\3\2\2\2\u0137\u0138\7#\2"+ + "\2\u0138;\3\2\2\2\u0139\u013a\7\u0080\2\2\u013a=\3\2\2\2\u013b\u013c\7"+ + ",\2\2\u013c?\3\2\2\2\u013d\u013e\7\61\2\2\u013e\u013f\6 \2\2\u013fA\3"+ + "\2\2\2\u0140\u0141\7\'\2\2\u0141C\3\2\2\2\u0142\u0143\7-\2\2\u0143E\3"+ + "\2\2\2\u0144\u0145\7/\2\2\u0145G\3\2\2\2\u0146\u0147\7>\2\2\u0147\u0148"+ + "\7>\2\2\u0148I\3\2\2\2\u0149\u014a\7@\2\2\u014a\u014b\7@\2\2\u014bK\3"+ + "\2\2\2\u014c\u014d\7@\2\2\u014d\u014e\7@\2\2\u014e\u014f\7@\2\2\u014f"+ + "M\3\2\2\2\u0150\u0151\7>\2\2\u0151O\3\2\2\2\u0152\u0153\7>\2\2\u0153\u0154"+ + "\7?\2\2\u0154Q\3\2\2\2\u0155\u0156\7@\2\2\u0156S\3\2\2\2\u0157\u0158\7"+ + "@\2\2\u0158\u0159\7?\2\2\u0159U\3\2\2\2\u015a\u015b\7?\2\2\u015b\u015c"+ + "\7?\2\2\u015cW\3\2\2\2\u015d\u015e\7?\2\2\u015e\u015f\7?\2\2\u015f\u0160"+ + "\7?\2\2\u0160Y\3\2\2\2\u0161\u0162\7#\2\2\u0162\u0163\7?\2\2\u0163[\3"+ + "\2\2\2\u0164\u0165\7#\2\2\u0165\u0166\7?\2\2\u0166\u0167\7?\2\2\u0167"+ + "]\3\2\2\2\u0168\u0169\7(\2\2\u0169_\3\2\2\2\u016a\u016b\7`\2\2\u016ba"+ + "\3\2\2\2\u016c\u016d\7~\2\2\u016dc\3\2\2\2\u016e\u016f\7(\2\2\u016f\u0170"+ + "\7(\2\2\u0170e\3\2\2\2\u0171\u0172\7~\2\2\u0172\u0173\7~\2\2\u0173g\3"+ + "\2\2\2\u0174\u0175\7A\2\2\u0175i\3\2\2\2\u0176\u0177\7<\2\2\u0177k\3\2"+ + "\2\2\u0178\u0179\7A\2\2\u0179\u017a\7<\2\2\u017am\3\2\2\2\u017b\u017c"+ + "\7<\2\2\u017c\u017d\7<\2\2\u017do\3\2\2\2\u017e\u017f\7/\2\2\u017f\u0180"+ + "\7@\2\2\u0180q\3\2\2\2\u0181\u0182\7?\2\2\u0182\u0183\7\u0080\2\2\u0183"+ + "s\3\2\2\2\u0184\u0185\7?\2\2\u0185\u0186\7?\2\2\u0186\u0187\7\u0080\2"+ + "\2\u0187u\3\2\2\2\u0188\u0189\7-\2\2\u0189\u018a\7-\2\2\u018aw\3\2\2\2"+ + "\u018b\u018c\7/\2\2\u018c\u018d\7/\2\2\u018dy\3\2\2\2\u018e\u018f\7?\2"+ + "\2\u018f{\3\2\2\2\u0190\u0191\7-\2\2\u0191\u0192\7?\2\2\u0192}\3\2\2\2"+ + "\u0193\u0194\7/\2\2\u0194\u0195\7?\2\2\u0195\177\3\2\2\2\u0196\u0197\7"+ + ",\2\2\u0197\u0198\7?\2\2\u0198\u0081\3\2\2\2\u0199\u019a\7\61\2\2\u019a"+ + "\u019b\7?\2\2\u019b\u0083\3\2\2\2\u019c\u019d\7\'\2\2\u019d\u019e\7?\2"+ + "\2\u019e\u0085\3\2\2\2\u019f\u01a0\7(\2\2\u01a0\u01a1\7?\2\2\u01a1\u0087"+ + "\3\2\2\2\u01a2\u01a3\7`\2\2\u01a3\u01a4\7?\2\2\u01a4\u0089\3\2\2\2\u01a5"+ + "\u01a6\7~\2\2\u01a6\u01a7\7?\2\2\u01a7\u008b\3\2\2\2\u01a8\u01a9\7>\2"+ + "\2\u01a9\u01aa\7>\2\2\u01aa\u01ab\7?\2\2\u01ab\u008d\3\2\2\2\u01ac\u01ad"+ + "\7@\2\2\u01ad\u01ae\7@\2\2\u01ae\u01af\7?\2\2\u01af\u008f\3\2\2\2\u01b0"+ + "\u01b1\7@\2\2\u01b1\u01b2\7@\2\2\u01b2\u01b3\7@\2\2\u01b3\u01b4\7?\2\2"+ + "\u01b4\u0091\3\2\2\2\u01b5\u01b7\7\62\2\2\u01b6\u01b8\t\4\2\2\u01b7\u01b6"+ + "\3\2\2\2\u01b8\u01b9\3\2\2\2\u01b9\u01b7\3\2\2\2\u01b9\u01ba\3\2\2\2\u01ba"+ + "\u01bc\3\2\2\2\u01bb\u01bd\t\5\2\2\u01bc\u01bb\3\2\2\2\u01bc\u01bd\3\2"+ + "\2\2\u01bd\u0093\3\2\2\2\u01be\u01bf\7\62\2\2\u01bf\u01c1\t\6\2\2\u01c0"+ + "\u01c2\t\7\2\2\u01c1\u01c0\3\2\2\2\u01c2\u01c3\3\2\2\2\u01c3\u01c1\3\2"+ + "\2\2\u01c3\u01c4\3\2\2\2\u01c4\u01c6\3\2\2\2\u01c5\u01c7\t\5\2\2\u01c6"+ + "\u01c5\3\2\2\2\u01c6\u01c7\3\2\2\2\u01c7\u0095\3\2\2\2\u01c8\u01d1\7\62"+ + "\2\2\u01c9\u01cd\t\b\2\2\u01ca\u01cc\t\t\2\2\u01cb\u01ca\3\2\2\2\u01cc"+ + "\u01cf\3\2\2\2\u01cd\u01cb\3\2\2\2\u01cd\u01ce\3\2\2\2\u01ce\u01d1\3\2"+ + "\2\2\u01cf\u01cd\3\2\2\2\u01d0\u01c8\3\2\2\2\u01d0\u01c9\3\2\2\2\u01d1"+ + "\u01d3\3\2\2\2\u01d2\u01d4\t\n\2\2\u01d3\u01d2\3\2\2\2\u01d3\u01d4\3\2"+ + "\2\2\u01d4\u0097\3\2\2\2\u01d5\u01de\7\62\2\2\u01d6\u01da\t\b\2\2\u01d7"+ + "\u01d9\t\t\2\2\u01d8\u01d7\3\2\2\2\u01d9\u01dc\3\2\2\2\u01da\u01d8\3\2"+ + "\2\2\u01da\u01db\3\2\2\2\u01db\u01de\3\2\2\2\u01dc\u01da\3\2\2\2\u01dd"+ + "\u01d5\3\2\2\2\u01dd\u01d6\3\2\2\2\u01de\u01e5\3\2\2\2\u01df\u01e1\5\24"+ + "\n\2\u01e0\u01e2\t\t\2\2\u01e1\u01e0\3\2\2\2\u01e2\u01e3\3\2\2\2\u01e3"+ + "\u01e1\3\2\2\2\u01e3\u01e4\3\2\2\2\u01e4\u01e6\3\2\2\2\u01e5\u01df\3\2"+ + "\2\2\u01e5\u01e6\3\2\2\2\u01e6\u01f0\3\2\2\2\u01e7\u01e9\t\13\2\2\u01e8"+ + "\u01ea\t\f\2\2\u01e9\u01e8\3\2\2\2\u01e9\u01ea\3\2\2\2\u01ea\u01ec\3\2"+ + "\2\2\u01eb\u01ed\t\t\2\2\u01ec\u01eb\3\2\2\2\u01ed\u01ee\3\2\2\2\u01ee"+ + "\u01ec\3\2\2\2\u01ee\u01ef\3\2\2\2\u01ef\u01f1\3\2\2\2\u01f0\u01e7\3\2"+ + "\2\2\u01f0\u01f1\3\2\2\2\u01f1\u01f3\3\2\2\2\u01f2\u01f4\t\r\2\2\u01f3"+ + "\u01f2\3\2\2\2\u01f3\u01f4\3\2\2\2\u01f4\u0099\3\2\2\2\u01f5\u01fd\7$"+ + "\2\2\u01f6\u01f7\7^\2\2\u01f7\u01fc\7$\2\2\u01f8\u01f9\7^\2\2\u01f9\u01fc"+ + "\7^\2\2\u01fa\u01fc\n\16\2\2\u01fb\u01f6\3\2\2\2\u01fb\u01f8\3\2\2\2\u01fb"+ + "\u01fa\3\2\2\2\u01fc\u01ff\3\2\2\2\u01fd\u01fe\3\2\2\2\u01fd\u01fb\3\2"+ + "\2\2\u01fe\u0200\3\2\2\2\u01ff\u01fd\3\2\2\2\u0200\u020e\7$\2\2\u0201"+ + "\u0209\7)\2\2\u0202\u0203\7^\2\2\u0203\u0208\7)\2\2\u0204\u0205\7^\2\2"+ + "\u0205\u0208\7^\2\2\u0206\u0208\n\16\2\2\u0207\u0202\3\2\2\2\u0207\u0204"+ + "\3\2\2\2\u0207\u0206\3\2\2\2\u0208\u020b\3\2\2\2\u0209\u020a\3\2\2\2\u0209"+ + "\u0207\3\2\2\2\u020a\u020c\3\2\2\2\u020b\u0209\3\2\2\2\u020c\u020e\7)"+ + "\2\2\u020d\u01f5\3\2\2\2\u020d\u0201\3\2\2\2\u020e\u009b\3\2\2\2\u020f"+ + "\u0213\7\61\2\2\u0210\u0214\n\17\2\2\u0211\u0212\7^\2\2\u0212\u0214\n"+ + "\20\2\2\u0213\u0210\3\2\2\2\u0213\u0211\3\2\2\2\u0214\u0215\3\2\2\2\u0215"+ + "\u0213\3\2\2\2\u0215\u0216\3\2\2\2\u0216\u0217\3\2\2\2\u0217\u021b\7\61"+ + "\2\2\u0218\u021a\t\21\2\2\u0219\u0218\3\2\2\2\u021a\u021d\3\2\2\2\u021b"+ + "\u0219\3\2\2\2\u021b\u021c\3\2\2\2\u021c\u021e\3\2\2\2\u021d\u021b\3\2"+ + "\2\2\u021e\u021f\6N\3\2\u021f\u009d\3\2\2\2\u0220\u0221\7v\2\2\u0221\u0222"+ + "\7t\2\2\u0222\u0223\7w\2\2\u0223\u0224\7g\2\2\u0224\u009f\3\2\2\2\u0225"+ + "\u0226\7h\2\2\u0226\u0227\7c\2\2\u0227\u0228\7n\2\2\u0228\u0229\7u\2\2"+ + "\u0229\u022a\7g\2\2\u022a\u00a1\3\2\2\2\u022b\u022c\7p\2\2\u022c\u022d"+ + "\7w\2\2\u022d\u022e\7n\2\2\u022e\u022f\7n\2\2\u022f\u00a3\3\2\2\2\u0230"+ + "\u0236\5\u00a6S\2\u0231\u0232\5\24\n\2\u0232\u0233\5\u00a6S\2\u0233\u0235"+ + "\3\2\2\2\u0234\u0231\3\2\2\2\u0235\u0238\3\2\2\2\u0236\u0234\3\2\2\2\u0236"+ + "\u0237\3\2\2\2\u0237\u0239\3\2\2\2\u0238\u0236\3\2\2\2\u0239\u023a\6R"+ + "\4\2\u023a\u00a5\3\2\2\2\u023b\u023f\t\22\2\2\u023c\u023e\t\23\2\2\u023d"+ + "\u023c\3\2\2\2\u023e\u0241\3\2\2\2\u023f\u023d\3\2\2\2\u023f\u0240\3\2"+ + "\2\2\u0240\u00a7\3\2\2\2\u0241\u023f\3\2\2\2\u0242\u024b\7\62\2\2\u0243"+ + "\u0247\t\b\2\2\u0244\u0246\t\t\2\2\u0245\u0244\3\2\2\2\u0246\u0249\3\2"+ + "\2\2\u0247\u0245\3\2\2\2\u0247\u0248\3\2\2\2\u0248\u024b\3\2\2\2\u0249"+ + "\u0247\3\2\2\2\u024a\u0242\3\2\2\2\u024a\u0243\3\2\2\2\u024b\u024c\3\2"+ + "\2\2\u024c\u024d\bT\4\2\u024d\u00a9\3\2\2\2\u024e\u0252\t\22\2\2\u024f"+ + "\u0251\t\23\2\2\u0250\u024f\3\2\2\2\u0251\u0254\3\2\2\2\u0252\u0250\3"+ + "\2\2\2\u0252\u0253\3\2\2\2\u0253\u0255\3\2\2\2\u0254\u0252\3\2\2\2\u0255"+ + "\u0256\bU\4\2\u0256\u00ab\3\2\2\2$\2\3\u00af\u00b9\u00c3\u00c8\u01b9\u01bc"+ + "\u01c3\u01c6\u01cd\u01d0\u01d3\u01da\u01dd\u01e3\u01e5\u01e9\u01ee\u01f0"+ + "\u01f3\u01fb\u01fd\u0207\u0209\u020d\u0213\u0215\u021b\u0236\u023f\u0247"+ + "\u024a\u0252\5\b\2\2\4\3\2\4\2\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java index 964ef714838..619c582d04a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java @@ -18,16 +18,16 @@ class PainlessParser extends Parser { new PredictionContextCache(); public static final int WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - COMMA=10, SEMICOLON=11, IF=12, IN=13, ELSE=14, WHILE=15, DO=16, FOR=17, - CONTINUE=18, BREAK=19, RETURN=20, NEW=21, TRY=22, CATCH=23, THROW=24, - THIS=25, INSTANCEOF=26, BOOLNOT=27, BWNOT=28, MUL=29, DIV=30, REM=31, - ADD=32, SUB=33, LSH=34, RSH=35, USH=36, LT=37, LTE=38, GT=39, GTE=40, - EQ=41, EQR=42, NE=43, NER=44, BWAND=45, XOR=46, BWOR=47, BOOLAND=48, BOOLOR=49, - COND=50, COLON=51, REF=52, ARROW=53, FIND=54, MATCH=55, INCR=56, DECR=57, - ASSIGN=58, AADD=59, ASUB=60, AMUL=61, ADIV=62, AREM=63, AAND=64, AXOR=65, - AOR=66, ALSH=67, ARSH=68, AUSH=69, OCTAL=70, HEX=71, INTEGER=72, DECIMAL=73, - STRING=74, REGEX=75, TRUE=76, FALSE=77, NULL=78, TYPE=79, ID=80, DOTINTEGER=81, - DOTID=82; + NSDOT=10, COMMA=11, SEMICOLON=12, IF=13, IN=14, ELSE=15, WHILE=16, DO=17, + FOR=18, CONTINUE=19, BREAK=20, RETURN=21, NEW=22, TRY=23, CATCH=24, THROW=25, + THIS=26, INSTANCEOF=27, BOOLNOT=28, BWNOT=29, MUL=30, DIV=31, REM=32, + ADD=33, SUB=34, LSH=35, RSH=36, USH=37, LT=38, LTE=39, GT=40, GTE=41, + EQ=42, EQR=43, NE=44, NER=45, BWAND=46, XOR=47, BWOR=48, BOOLAND=49, BOOLOR=50, + COND=51, COLON=52, ELVIS=53, REF=54, ARROW=55, FIND=56, MATCH=57, INCR=58, + DECR=59, ASSIGN=60, AADD=61, ASUB=62, AMUL=63, ADIV=64, AREM=65, AAND=66, + AXOR=67, AOR=68, ALSH=69, ARSH=70, AUSH=71, OCTAL=72, HEX=73, INTEGER=74, + DECIMAL=75, STRING=76, REGEX=77, TRUE=78, FALSE=79, NULL=80, TYPE=81, + ID=82, DOTINTEGER=83, DOTID=84; public static final int RULE_source = 0, RULE_function = 1, RULE_parameters = 2, RULE_statement = 3, RULE_trailer = 4, RULE_block = 5, RULE_empty = 6, RULE_initializer = 7, @@ -48,27 +48,27 @@ class PainlessParser extends Parser { }; private static final String[] _LITERAL_NAMES = { - null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", - "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", + null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "'?.'", + "','", "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", - "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'::'", "'->'", - "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", - "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, null, - null, null, null, null, "'true'", "'false'", "'null'" + "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'?:'", "'::'", + "'->'", "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", + "'/='", "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, + null, null, null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", "FOR", - "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", - "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", - "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", - "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "REF", "ARROW", "FIND", - "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", - "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", "INTEGER", - "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", "ID", "DOTINTEGER", - "DOTID" + "DOT", "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", + "FOR", "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", + "THIS", "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", + "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", + "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", + "REF", "ARROW", "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", + "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", + "NULL", "TYPE", "ID", "DOTINTEGER", "DOTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -171,7 +171,7 @@ class PainlessParser extends Parser { setState(73); _errHandler.sync(this); _la = _input.LA(1); - while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (TYPE - 72)) | (1L << (ID - 72)))) != 0)) { { { setState(70); @@ -703,7 +703,7 @@ class PainlessParser extends Parser { match(LP); setState(128); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (TYPE - 72)) | (1L << (ID - 72)))) != 0)) { { setState(127); initializer(); @@ -714,7 +714,7 @@ class PainlessParser extends Parser { match(SEMICOLON); setState(132); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (TYPE - 72)) | (1L << (ID - 72)))) != 0)) { { setState(131); expression(0); @@ -725,7 +725,7 @@ class PainlessParser extends Parser { match(SEMICOLON); setState(136); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (TYPE - 72)) | (1L << (ID - 72)))) != 0)) { { setState(135); afterthought(); @@ -1044,7 +1044,7 @@ class PainlessParser extends Parser { setState(195); _errHandler.sync(this); _la = _input.LA(1); - while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (TYPE - 72)) | (1L << (ID - 72)))) != 0)) { { { setState(192); @@ -1588,6 +1588,21 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } + public static class ElvisContext extends ExpressionContext { + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); + } + public TerminalNode ELVIS() { return getToken(PainlessParser.ELVIS, 0); } + public ElvisContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitElvis(this); + else return visitor.visitChildren(this); + } + } public static class InstanceofContext extends ExpressionContext { public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); @@ -1629,7 +1644,7 @@ class PainlessParser extends Parser { unary(); } _ctx.stop = _input.LT(-1); - setState(289); + setState(292); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1637,14 +1652,14 @@ class PainlessParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(287); + setState(290); switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) { case 1: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(242); - if (!(precpred(_ctx, 14))) throw new FailedPredicateException(this, "precpred(_ctx, 14)"); + if (!(precpred(_ctx, 15))) throw new FailedPredicateException(this, "precpred(_ctx, 15)"); setState(243); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << MUL) | (1L << DIV) | (1L << REM))) != 0)) ) { @@ -1653,7 +1668,7 @@ class PainlessParser extends Parser { consume(); } setState(244); - expression(15); + expression(16); } break; case 2: @@ -1661,7 +1676,7 @@ class PainlessParser extends Parser { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(245); - if (!(precpred(_ctx, 13))) throw new FailedPredicateException(this, "precpred(_ctx, 13)"); + if (!(precpred(_ctx, 14))) throw new FailedPredicateException(this, "precpred(_ctx, 14)"); setState(246); _la = _input.LA(1); if ( !(_la==ADD || _la==SUB) ) { @@ -1670,7 +1685,7 @@ class PainlessParser extends Parser { consume(); } setState(247); - expression(14); + expression(15); } break; case 3: @@ -1678,7 +1693,7 @@ class PainlessParser extends Parser { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(248); - if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); + if (!(precpred(_ctx, 13))) throw new FailedPredicateException(this, "precpred(_ctx, 13)"); setState(249); _la = _input.LA(1); if ( !(_la==FIND || _la==MATCH) ) { @@ -1687,7 +1702,7 @@ class PainlessParser extends Parser { consume(); } setState(250); - expression(13); + expression(14); } break; case 4: @@ -1695,7 +1710,7 @@ class PainlessParser extends Parser { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(251); - if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); + if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); setState(252); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LSH) | (1L << RSH) | (1L << USH))) != 0)) ) { @@ -1704,7 +1719,7 @@ class PainlessParser extends Parser { consume(); } setState(253); - expression(12); + expression(13); } break; case 5: @@ -1712,7 +1727,7 @@ class PainlessParser extends Parser { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(254); - if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); + if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); setState(255); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) { @@ -1721,7 +1736,7 @@ class PainlessParser extends Parser { consume(); } setState(256); - expression(11); + expression(12); } break; case 6: @@ -1729,7 +1744,7 @@ class PainlessParser extends Parser { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(257); - if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); + if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); setState(258); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << EQR) | (1L << NE) | (1L << NER))) != 0)) ) { @@ -1738,7 +1753,7 @@ class PainlessParser extends Parser { consume(); } setState(259); - expression(9); + expression(10); } break; case 7: @@ -1746,11 +1761,11 @@ class PainlessParser extends Parser { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(260); - if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); + if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); setState(261); match(BWAND); setState(262); - expression(8); + expression(9); } break; case 8: @@ -1758,11 +1773,11 @@ class PainlessParser extends Parser { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(263); - if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); + if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); setState(264); match(XOR); setState(265); - expression(7); + expression(8); } break; case 9: @@ -1770,11 +1785,11 @@ class PainlessParser extends Parser { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(266); - if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); + if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); setState(267); match(BWOR); setState(268); - expression(6); + expression(7); } break; case 10: @@ -1782,11 +1797,11 @@ class PainlessParser extends Parser { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(269); - if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); + if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); setState(270); match(BOOLAND); setState(271); - expression(5); + expression(6); } break; case 11: @@ -1794,11 +1809,11 @@ class PainlessParser extends Parser { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(272); - if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); + if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); setState(273); match(BOOLOR); setState(274); - expression(4); + expression(5); } break; case 12: @@ -1806,7 +1821,7 @@ class PainlessParser extends Parser { _localctx = new ConditionalContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(275); - if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); + if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); setState(276); match(COND); setState(277); @@ -1814,42 +1829,54 @@ class PainlessParser extends Parser { setState(278); match(COLON); setState(279); - expression(2); + expression(3); } break; case 13: { - _localctx = new AssignmentContext(new ExpressionContext(_parentctx, _parentState)); + _localctx = new ElvisContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(281); - if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); + if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); setState(282); - _la = _input.LA(1); - if ( !(((((_la - 58)) & ~0x3f) == 0 && ((1L << (_la - 58)) & ((1L << (ASSIGN - 58)) | (1L << (AADD - 58)) | (1L << (ASUB - 58)) | (1L << (AMUL - 58)) | (1L << (ADIV - 58)) | (1L << (AREM - 58)) | (1L << (AAND - 58)) | (1L << (AXOR - 58)) | (1L << (AOR - 58)) | (1L << (ALSH - 58)) | (1L << (ARSH - 58)) | (1L << (AUSH - 58)))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } + match(ELVIS); setState(283); - expression(1); + expression(2); } break; case 14: { - _localctx = new InstanceofContext(new ExpressionContext(_parentctx, _parentState)); + _localctx = new AssignmentContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); setState(284); - if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); setState(285); - match(INSTANCEOF); + _la = _input.LA(1); + if ( !(((((_la - 60)) & ~0x3f) == 0 && ((1L << (_la - 60)) & ((1L << (ASSIGN - 60)) | (1L << (AADD - 60)) | (1L << (ASUB - 60)) | (1L << (AMUL - 60)) | (1L << (ADIV - 60)) | (1L << (AREM - 60)) | (1L << (AAND - 60)) | (1L << (AXOR - 60)) | (1L << (AOR - 60)) | (1L << (ALSH - 60)) | (1L << (ARSH - 60)) | (1L << (AUSH - 60)))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } setState(286); + expression(1); + } + break; + case 15: + { + _localctx = new InstanceofContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(287); + if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); + setState(288); + match(INSTANCEOF); + setState(289); decltype(); } break; } } } - setState(291); + setState(294); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } @@ -1951,20 +1978,20 @@ class PainlessParser extends Parser { enterRule(_localctx, 30, RULE_unary); int _la; try { - setState(305); + setState(308); switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { case 1: _localctx = new PreContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(292); + setState(295); _la = _input.LA(1); if ( !(_la==INCR || _la==DECR) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(293); + setState(296); chain(); } break; @@ -1972,9 +1999,9 @@ class PainlessParser extends Parser { _localctx = new PostContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(294); + setState(297); chain(); - setState(295); + setState(298); _la = _input.LA(1); if ( !(_la==INCR || _la==DECR) ) { _errHandler.recoverInline(this); @@ -1987,7 +2014,7 @@ class PainlessParser extends Parser { _localctx = new ReadContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(297); + setState(300); chain(); } break; @@ -1995,14 +2022,14 @@ class PainlessParser extends Parser { _localctx = new OperatorContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(298); + setState(301); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(299); + setState(302); unary(); } break; @@ -2010,13 +2037,13 @@ class PainlessParser extends Parser { _localctx = new CastContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(300); - match(LP); - setState(301); - decltype(); - setState(302); - match(RP); setState(303); + match(LP); + setState(304); + decltype(); + setState(305); + match(RP); + setState(306); unary(); } break; @@ -2098,27 +2125,27 @@ class PainlessParser extends Parser { enterRule(_localctx, 32, RULE_chain); try { int _alt; - setState(323); + setState(326); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: _localctx = new DynamicContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(307); + setState(310); primary(); - setState(311); + setState(314); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(308); + setState(311); postfix(); } } } - setState(313); + setState(316); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } @@ -2128,23 +2155,23 @@ class PainlessParser extends Parser { _localctx = new StaticContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(314); + setState(317); decltype(); - setState(315); + setState(318); postdot(); - setState(319); + setState(322); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(316); + setState(319); postfix(); } } } - setState(321); + setState(324); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } @@ -2154,7 +2181,7 @@ class PainlessParser extends Parser { _localctx = new NewarrayContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(322); + setState(325); arrayinitializer(); } break; @@ -2314,17 +2341,17 @@ class PainlessParser extends Parser { enterRule(_localctx, 34, RULE_primary); int _la; try { - setState(343); + setState(346); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: _localctx = new PrecedenceContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(325); + setState(328); match(LP); - setState(326); + setState(329); expression(0); - setState(327); + setState(330); match(RP); } break; @@ -2332,9 +2359,9 @@ class PainlessParser extends Parser { _localctx = new NumericContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(329); + setState(332); _la = _input.LA(1); - if ( !(((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)))) != 0)) ) { + if ( !(((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -2345,7 +2372,7 @@ class PainlessParser extends Parser { _localctx = new TrueContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(330); + setState(333); match(TRUE); } break; @@ -2353,7 +2380,7 @@ class PainlessParser extends Parser { _localctx = new FalseContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(331); + setState(334); match(FALSE); } break; @@ -2361,7 +2388,7 @@ class PainlessParser extends Parser { _localctx = new NullContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(332); + setState(335); match(NULL); } break; @@ -2369,7 +2396,7 @@ class PainlessParser extends Parser { _localctx = new StringContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(333); + setState(336); match(STRING); } break; @@ -2377,7 +2404,7 @@ class PainlessParser extends Parser { _localctx = new RegexContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(334); + setState(337); match(REGEX); } break; @@ -2385,7 +2412,7 @@ class PainlessParser extends Parser { _localctx = new ListinitContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(335); + setState(338); listinitializer(); } break; @@ -2393,7 +2420,7 @@ class PainlessParser extends Parser { _localctx = new MapinitContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(336); + setState(339); mapinitializer(); } break; @@ -2401,7 +2428,7 @@ class PainlessParser extends Parser { _localctx = new VariableContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(337); + setState(340); match(ID); } break; @@ -2409,9 +2436,9 @@ class PainlessParser extends Parser { _localctx = new CalllocalContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(338); + setState(341); match(ID); - setState(339); + setState(342); arguments(); } break; @@ -2419,11 +2446,11 @@ class PainlessParser extends Parser { _localctx = new NewobjectContext(_localctx); enterOuterAlt(_localctx, 12); { - setState(340); + setState(343); match(NEW); - setState(341); + setState(344); match(TYPE); - setState(342); + setState(345); arguments(); } break; @@ -2465,26 +2492,26 @@ class PainlessParser extends Parser { PostfixContext _localctx = new PostfixContext(_ctx, getState()); enterRule(_localctx, 36, RULE_postfix); try { - setState(348); + setState(351); switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(345); + setState(348); callinvoke(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(346); + setState(349); fieldaccess(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(347); + setState(350); braceaccess(); } break; @@ -2523,19 +2550,19 @@ class PainlessParser extends Parser { PostdotContext _localctx = new PostdotContext(_ctx, getState()); enterRule(_localctx, 38, RULE_postdot); try { - setState(352); + setState(355); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(350); + setState(353); callinvoke(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(351); + setState(354); fieldaccess(); } break; @@ -2553,12 +2580,12 @@ class PainlessParser extends Parser { } public static class CallinvokeContext extends ParserRuleContext { - public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } public TerminalNode DOTID() { return getToken(PainlessParser.DOTID, 0); } public ArgumentsContext arguments() { return getRuleContext(ArgumentsContext.class,0); } - public TerminalNode COND() { return getToken(PainlessParser.COND, 0); } + public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } + public TerminalNode NSDOT() { return getToken(PainlessParser.NSDOT, 0); } public CallinvokeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -2577,17 +2604,13 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(355); - _la = _input.LA(1); - if (_la==COND) { - { - setState(354); - match(COND); - } - } - setState(357); - match(DOT); + _la = _input.LA(1); + if ( !(_la==DOT || _la==NSDOT) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } setState(358); match(DOTID); setState(359); @@ -2607,9 +2630,9 @@ class PainlessParser extends Parser { public static class FieldaccessContext extends ParserRuleContext { public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } + public TerminalNode NSDOT() { return getToken(PainlessParser.NSDOT, 0); } public TerminalNode DOTID() { return getToken(PainlessParser.DOTID, 0); } public TerminalNode DOTINTEGER() { return getToken(PainlessParser.DOTINTEGER, 0); } - public TerminalNode COND() { return getToken(PainlessParser.COND, 0); } public FieldaccessContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -2628,18 +2651,14 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(362); + setState(361); _la = _input.LA(1); - if (_la==COND) { - { - setState(361); - match(COND); - } + if ( !(_la==DOT || _la==NSDOT) ) { + _errHandler.recoverInline(this); + } else { + consume(); } - - setState(364); - match(DOT); - setState(365); + setState(362); _la = _input.LA(1); if ( !(_la==DOTINTEGER || _la==DOTID) ) { _errHandler.recoverInline(this); @@ -2682,11 +2701,11 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(367); + setState(364); match(LBRACE); - setState(368); + setState(365); expression(0); - setState(369); + setState(366); match(RBRACE); } } @@ -2783,17 +2802,17 @@ class PainlessParser extends Parser { int _la; try { int _alt; - setState(415); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + setState(412); + switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { case 1: _localctx = new NewstandardarrayContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(371); + setState(368); match(NEW); - setState(372); + setState(369); match(TYPE); - setState(377); + setState(374); _errHandler.sync(this); _alt = 1; do { @@ -2801,11 +2820,11 @@ class PainlessParser extends Parser { case 1: { { - setState(373); + setState(370); match(LBRACE); - setState(374); + setState(371); expression(0); - setState(375); + setState(372); match(RBRACE); } } @@ -2813,31 +2832,31 @@ class PainlessParser extends Parser { default: throw new NoViableAltException(this); } - setState(379); + setState(376); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,29,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(388); - switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { + setState(385); + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(381); + setState(378); postdot(); - setState(385); + setState(382); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,30,_ctx); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(382); + setState(379); postfix(); } } } - setState(387); + setState(384); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,30,_ctx); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } } break; @@ -2848,67 +2867,67 @@ class PainlessParser extends Parser { _localctx = new NewinitializedarrayContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(390); + setState(387); match(NEW); - setState(391); + setState(388); match(TYPE); - setState(392); + setState(389); match(LBRACE); - setState(393); + setState(390); match(RBRACE); - setState(394); + setState(391); match(LBRACK); - setState(403); + setState(400); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (TYPE - 72)) | (1L << (ID - 72)))) != 0)) { { - setState(395); + setState(392); expression(0); - setState(400); + setState(397); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(396); + setState(393); match(COMMA); - setState(397); + setState(394); expression(0); } } - setState(402); + setState(399); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(406); + setState(403); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(405); + setState(402); match(SEMICOLON); } } - setState(408); + setState(405); match(RBRACK); - setState(412); + setState(409); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(409); + setState(406); postfix(); } } } - setState(414); + setState(411); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); } } break; @@ -2954,41 +2973,41 @@ class PainlessParser extends Parser { enterRule(_localctx, 48, RULE_listinitializer); int _la; try { - setState(430); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + setState(427); + switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(417); + setState(414); match(LBRACE); - setState(418); + setState(415); expression(0); - setState(423); + setState(420); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(419); + setState(416); match(COMMA); - setState(420); + setState(417); expression(0); } } - setState(425); + setState(422); _errHandler.sync(this); _la = _input.LA(1); } - setState(426); + setState(423); match(RBRACE); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(428); + setState(425); match(LBRACE); - setState(429); + setState(426); match(RBRACE); } break; @@ -3035,43 +3054,43 @@ class PainlessParser extends Parser { enterRule(_localctx, 50, RULE_mapinitializer); int _la; try { - setState(446); - switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { + setState(443); + switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(432); + setState(429); match(LBRACE); - setState(433); + setState(430); maptoken(); - setState(438); + setState(435); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(434); + setState(431); match(COMMA); - setState(435); + setState(432); maptoken(); } } - setState(440); + setState(437); _errHandler.sync(this); _la = _input.LA(1); } - setState(441); + setState(438); match(RBRACE); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(443); + setState(440); match(LBRACE); - setState(444); + setState(441); match(COLON); - setState(445); + setState(442); match(RBRACE); } break; @@ -3113,11 +3132,11 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(448); + setState(445); expression(0); - setState(449); + setState(446); match(COLON); - setState(450); + setState(447); expression(0); } } @@ -3164,34 +3183,34 @@ class PainlessParser extends Parser { enterOuterAlt(_localctx, 1); { { - setState(452); + setState(449); match(LP); - setState(461); + setState(458); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << THIS) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)) | (1L << (STRING - 70)) | (1L << (REGEX - 70)) | (1L << (TRUE - 70)) | (1L << (FALSE - 70)) | (1L << (NULL - 70)) | (1L << (TYPE - 70)) | (1L << (ID - 70)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LBRACE) | (1L << LP) | (1L << NEW) | (1L << THIS) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (OCTAL - 72)) | (1L << (HEX - 72)) | (1L << (INTEGER - 72)) | (1L << (DECIMAL - 72)) | (1L << (STRING - 72)) | (1L << (REGEX - 72)) | (1L << (TRUE - 72)) | (1L << (FALSE - 72)) | (1L << (NULL - 72)) | (1L << (TYPE - 72)) | (1L << (ID - 72)))) != 0)) { { - setState(453); + setState(450); argument(); - setState(458); + setState(455); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(454); + setState(451); match(COMMA); - setState(455); + setState(452); argument(); } } - setState(460); + setState(457); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(463); + setState(460); match(RP); } } @@ -3232,26 +3251,26 @@ class PainlessParser extends Parser { ArgumentContext _localctx = new ArgumentContext(_ctx, getState()); enterRule(_localctx, 56, RULE_argument); try { - setState(468); - switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { + setState(465); + switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(465); + setState(462); expression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(466); + setState(463); lambda(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(467); + setState(464); funcref(); } break; @@ -3306,58 +3325,58 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(483); + setState(480); switch (_input.LA(1)) { case TYPE: case ID: { - setState(470); + setState(467); lamtype(); } break; case LP: { - setState(471); + setState(468); match(LP); - setState(480); + setState(477); _la = _input.LA(1); if (_la==TYPE || _la==ID) { { - setState(472); + setState(469); lamtype(); - setState(477); + setState(474); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(473); + setState(470); match(COMMA); - setState(474); + setState(471); lamtype(); } } - setState(479); + setState(476); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(482); + setState(479); match(RP); } break; default: throw new NoViableAltException(this); } - setState(485); + setState(482); match(ARROW); - setState(488); + setState(485); switch (_input.LA(1)) { case LBRACK: { - setState(486); + setState(483); block(); } break; @@ -3382,7 +3401,7 @@ class PainlessParser extends Parser { case TYPE: case ID: { - setState(487); + setState(484); expression(0); } break; @@ -3425,16 +3444,16 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(491); + setState(488); _la = _input.LA(1); if (_la==TYPE) { { - setState(490); + setState(487); decltype(); } } - setState(493); + setState(490); match(ID); } } @@ -3513,17 +3532,17 @@ class PainlessParser extends Parser { FuncrefContext _localctx = new FuncrefContext(_ctx, getState()); enterRule(_localctx, 62, RULE_funcref); try { - setState(508); - switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { + setState(505); + switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: _localctx = new ClassfuncrefContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(495); + setState(492); match(TYPE); - setState(496); + setState(493); match(REF); - setState(497); + setState(494); match(ID); } break; @@ -3531,11 +3550,11 @@ class PainlessParser extends Parser { _localctx = new ConstructorfuncrefContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(498); + setState(495); decltype(); - setState(499); + setState(496); match(REF); - setState(500); + setState(497); match(NEW); } break; @@ -3543,11 +3562,11 @@ class PainlessParser extends Parser { _localctx = new CapturingfuncrefContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(502); + setState(499); match(ID); - setState(503); + setState(500); match(REF); - setState(504); + setState(501); match(ID); } break; @@ -3555,11 +3574,11 @@ class PainlessParser extends Parser { _localctx = new LocalfuncrefContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(505); + setState(502); match(THIS); - setState(506); + setState(503); match(REF); - setState(507); + setState(504); match(ID); } break; @@ -3595,39 +3614,41 @@ class PainlessParser extends Parser { private boolean expression_sempred(ExpressionContext _localctx, int predIndex) { switch (predIndex) { case 1: - return precpred(_ctx, 14); + return precpred(_ctx, 15); case 2: - return precpred(_ctx, 13); + return precpred(_ctx, 14); case 3: - return precpred(_ctx, 12); + return precpred(_ctx, 13); case 4: - return precpred(_ctx, 11); + return precpred(_ctx, 12); case 5: - return precpred(_ctx, 10); + return precpred(_ctx, 11); case 6: - return precpred(_ctx, 8); - case 7: - return precpred(_ctx, 7); - case 8: - return precpred(_ctx, 6); - case 9: - return precpred(_ctx, 5); - case 10: - return precpred(_ctx, 4); - case 11: - return precpred(_ctx, 3); - case 12: - return precpred(_ctx, 2); - case 13: - return precpred(_ctx, 1); - case 14: return precpred(_ctx, 9); + case 7: + return precpred(_ctx, 8); + case 8: + return precpred(_ctx, 7); + case 9: + return precpred(_ctx, 6); + case 10: + return precpred(_ctx, 5); + case 11: + return precpred(_ctx, 4); + case 12: + return precpred(_ctx, 3); + case 13: + return precpred(_ctx, 2); + case 14: + return precpred(_ctx, 1); + case 15: + return precpred(_ctx, 10); } return true; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3T\u0201\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3V\u01fe\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -3647,182 +3668,181 @@ class PainlessParser extends Parser { "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ - "\3\20\3\20\3\20\3\20\3\20\3\20\7\20\u0122\n\20\f\20\16\20\u0125\13\20"+ - "\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\5\21"+ - "\u0134\n\21\3\22\3\22\7\22\u0138\n\22\f\22\16\22\u013b\13\22\3\22\3\22"+ - "\3\22\7\22\u0140\n\22\f\22\16\22\u0143\13\22\3\22\5\22\u0146\n\22\3\23"+ - "\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23"+ - "\3\23\3\23\3\23\5\23\u015a\n\23\3\24\3\24\3\24\5\24\u015f\n\24\3\25\3"+ - "\25\5\25\u0163\n\25\3\26\5\26\u0166\n\26\3\26\3\26\3\26\3\26\3\27\5\27"+ - "\u016d\n\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31"+ - "\3\31\6\31\u017c\n\31\r\31\16\31\u017d\3\31\3\31\7\31\u0182\n\31\f\31"+ - "\16\31\u0185\13\31\5\31\u0187\n\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31"+ - "\3\31\7\31\u0191\n\31\f\31\16\31\u0194\13\31\5\31\u0196\n\31\3\31\5\31"+ - "\u0199\n\31\3\31\3\31\7\31\u019d\n\31\f\31\16\31\u01a0\13\31\5\31\u01a2"+ - "\n\31\3\32\3\32\3\32\3\32\7\32\u01a8\n\32\f\32\16\32\u01ab\13\32\3\32"+ - "\3\32\3\32\3\32\5\32\u01b1\n\32\3\33\3\33\3\33\3\33\7\33\u01b7\n\33\f"+ - "\33\16\33\u01ba\13\33\3\33\3\33\3\33\3\33\3\33\5\33\u01c1\n\33\3\34\3"+ - "\34\3\34\3\34\3\35\3\35\3\35\3\35\7\35\u01cb\n\35\f\35\16\35\u01ce\13"+ - "\35\5\35\u01d0\n\35\3\35\3\35\3\36\3\36\3\36\5\36\u01d7\n\36\3\37\3\37"+ - "\3\37\3\37\3\37\7\37\u01de\n\37\f\37\16\37\u01e1\13\37\5\37\u01e3\n\37"+ - "\3\37\5\37\u01e6\n\37\3\37\3\37\3\37\5\37\u01eb\n\37\3 \5 \u01ee\n \3"+ - " \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\5!\u01ff\n!\3!\2\3\36\"\2"+ - "\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@\2\16\3"+ - "\3\r\r\3\2\37!\3\2\"#\3\289\3\2$&\3\2\'*\3\2+.\3\2\u01ed"+ - "\3\2\2\2@\u01fe\3\2\2\2BD\5\4\3\2CB\3\2\2\2DG\3\2\2\2EC\3\2\2\2EF\3\2"+ - "\2\2FK\3\2\2\2GE\3\2\2\2HJ\5\b\5\2IH\3\2\2\2JM\3\2\2\2KI\3\2\2\2KL\3\2"+ - "\2\2LN\3\2\2\2MK\3\2\2\2NO\7\2\2\3O\3\3\2\2\2PQ\5\26\f\2QR\7R\2\2RS\5"+ - "\6\4\2ST\5\f\7\2T\5\3\2\2\2Ua\7\t\2\2VW\5\26\f\2W^\7R\2\2XY\7\f\2\2YZ"+ - "\5\26\f\2Z[\7R\2\2[]\3\2\2\2\\X\3\2\2\2]`\3\2\2\2^\\\3\2\2\2^_\3\2\2\2"+ - "_b\3\2\2\2`^\3\2\2\2aV\3\2\2\2ab\3\2\2\2bc\3\2\2\2cd\7\n\2\2d\7\3\2\2"+ - "\2ef\7\16\2\2fg\7\t\2\2gh\5\36\20\2hi\7\n\2\2im\5\n\6\2jk\7\20\2\2kn\5"+ - "\n\6\2ln\6\5\2\2mj\3\2\2\2ml\3\2\2\2n\u00bc\3\2\2\2op\7\21\2\2pq\7\t\2"+ - "\2qr\5\36\20\2ru\7\n\2\2sv\5\n\6\2tv\5\16\b\2us\3\2\2\2ut\3\2\2\2v\u00bc"+ - "\3\2\2\2wx\7\22\2\2xy\5\f\7\2yz\7\21\2\2z{\7\t\2\2{|\5\36\20\2|}\7\n\2"+ - "\2}~\5\34\17\2~\u00bc\3\2\2\2\177\u0080\7\23\2\2\u0080\u0082\7\t\2\2\u0081"+ - "\u0083\5\20\t\2\u0082\u0081\3\2\2\2\u0082\u0083\3\2\2\2\u0083\u0084\3"+ - "\2\2\2\u0084\u0086\7\r\2\2\u0085\u0087\5\36\20\2\u0086\u0085\3\2\2\2\u0086"+ - "\u0087\3\2\2\2\u0087\u0088\3\2\2\2\u0088\u008a\7\r\2\2\u0089\u008b\5\22"+ - "\n\2\u008a\u0089\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c"+ - "\u008f\7\n\2\2\u008d\u0090\5\n\6\2\u008e\u0090\5\16\b\2\u008f\u008d\3"+ - "\2\2\2\u008f\u008e\3\2\2\2\u0090\u00bc\3\2\2\2\u0091\u0092\7\23\2\2\u0092"+ - "\u0093\7\t\2\2\u0093\u0094\5\26\f\2\u0094\u0095\7R\2\2\u0095\u0096\7\65"+ - "\2\2\u0096\u0097\5\36\20\2\u0097\u0098\7\n\2\2\u0098\u0099\5\n\6\2\u0099"+ - "\u00bc\3\2\2\2\u009a\u009b\7\23\2\2\u009b\u009c\7\t\2\2\u009c\u009d\7"+ - "R\2\2\u009d\u009e\7\17\2\2\u009e\u009f\5\36\20\2\u009f\u00a0\7\n\2\2\u00a0"+ - "\u00a1\5\n\6\2\u00a1\u00bc\3\2\2\2\u00a2\u00a3\5\24\13\2\u00a3\u00a4\5"+ - "\34\17\2\u00a4\u00bc\3\2\2\2\u00a5\u00a6\7\24\2\2\u00a6\u00bc\5\34\17"+ - "\2\u00a7\u00a8\7\25\2\2\u00a8\u00bc\5\34\17\2\u00a9\u00aa\7\26\2\2\u00aa"+ - "\u00ab\5\36\20\2\u00ab\u00ac\5\34\17\2\u00ac\u00bc\3\2\2\2\u00ad\u00ae"+ - "\7\30\2\2\u00ae\u00b0\5\f\7\2\u00af\u00b1\5\32\16\2\u00b0\u00af\3\2\2"+ - "\2\u00b1\u00b2\3\2\2\2\u00b2\u00b0\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3\u00bc"+ - "\3\2\2\2\u00b4\u00b5\7\32\2\2\u00b5\u00b6\5\36\20\2\u00b6\u00b7\5\34\17"+ - "\2\u00b7\u00bc\3\2\2\2\u00b8\u00b9\5\36\20\2\u00b9\u00ba\5\34\17\2\u00ba"+ - "\u00bc\3\2\2\2\u00bbe\3\2\2\2\u00bbo\3\2\2\2\u00bbw\3\2\2\2\u00bb\177"+ - "\3\2\2\2\u00bb\u0091\3\2\2\2\u00bb\u009a\3\2\2\2\u00bb\u00a2\3\2\2\2\u00bb"+ - "\u00a5\3\2\2\2\u00bb\u00a7\3\2\2\2\u00bb\u00a9\3\2\2\2\u00bb\u00ad\3\2"+ - "\2\2\u00bb\u00b4\3\2\2\2\u00bb\u00b8\3\2\2\2\u00bc\t\3\2\2\2\u00bd\u00c0"+ - "\5\f\7\2\u00be\u00c0\5\b\5\2\u00bf\u00bd\3\2\2\2\u00bf\u00be\3\2\2\2\u00c0"+ - "\13\3\2\2\2\u00c1\u00c5\7\5\2\2\u00c2\u00c4\5\b\5\2\u00c3\u00c2\3\2\2"+ - "\2\u00c4\u00c7\3\2\2\2\u00c5\u00c3\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c6\u00c8"+ - "\3\2\2\2\u00c7\u00c5\3\2\2\2\u00c8\u00c9\7\6\2\2\u00c9\r\3\2\2\2\u00ca"+ - "\u00cb\7\r\2\2\u00cb\17\3\2\2\2\u00cc\u00cf\5\24\13\2\u00cd\u00cf\5\36"+ - "\20\2\u00ce\u00cc\3\2\2\2\u00ce\u00cd\3\2\2\2\u00cf\21\3\2\2\2\u00d0\u00d1"+ - "\5\36\20\2\u00d1\23\3\2\2\2\u00d2\u00d3\5\26\f\2\u00d3\u00d8\5\30\r\2"+ - "\u00d4\u00d5\7\f\2\2\u00d5\u00d7\5\30\r\2\u00d6\u00d4\3\2\2\2\u00d7\u00da"+ - "\3\2\2\2\u00d8\u00d6\3\2\2\2\u00d8\u00d9\3\2\2\2\u00d9\25\3\2\2\2\u00da"+ - "\u00d8\3\2\2\2\u00db\u00e0\7Q\2\2\u00dc\u00dd\7\7\2\2\u00dd\u00df\7\b"+ - "\2\2\u00de\u00dc\3\2\2\2\u00df\u00e2\3\2\2\2\u00e0\u00de\3\2\2\2\u00e0"+ - "\u00e1\3\2\2\2\u00e1\27\3\2\2\2\u00e2\u00e0\3\2\2\2\u00e3\u00e6\7R\2\2"+ - "\u00e4\u00e5\7<\2\2\u00e5\u00e7\5\36\20\2\u00e6\u00e4\3\2\2\2\u00e6\u00e7"+ - "\3\2\2\2\u00e7\31\3\2\2\2\u00e8\u00e9\7\31\2\2\u00e9\u00ea\7\t\2\2\u00ea"+ - "\u00eb\7Q\2\2\u00eb\u00ec\7R\2\2\u00ec\u00ed\7\n\2\2\u00ed\u00ee\5\f\7"+ - "\2\u00ee\33\3\2\2\2\u00ef\u00f0\t\2\2\2\u00f0\35\3\2\2\2\u00f1\u00f2\b"+ - "\20\1\2\u00f2\u00f3\5 \21\2\u00f3\u0123\3\2\2\2\u00f4\u00f5\f\20\2\2\u00f5"+ - "\u00f6\t\3\2\2\u00f6\u0122\5\36\20\21\u00f7\u00f8\f\17\2\2\u00f8\u00f9"+ - "\t\4\2\2\u00f9\u0122\5\36\20\20\u00fa\u00fb\f\16\2\2\u00fb\u00fc\t\5\2"+ - "\2\u00fc\u0122\5\36\20\17\u00fd\u00fe\f\r\2\2\u00fe\u00ff\t\6\2\2\u00ff"+ - "\u0122\5\36\20\16\u0100\u0101\f\f\2\2\u0101\u0102\t\7\2\2\u0102\u0122"+ - "\5\36\20\r\u0103\u0104\f\n\2\2\u0104\u0105\t\b\2\2\u0105\u0122\5\36\20"+ - "\13\u0106\u0107\f\t\2\2\u0107\u0108\7/\2\2\u0108\u0122\5\36\20\n\u0109"+ - "\u010a\f\b\2\2\u010a\u010b\7\60\2\2\u010b\u0122\5\36\20\t\u010c\u010d"+ - "\f\7\2\2\u010d\u010e\7\61\2\2\u010e\u0122\5\36\20\b\u010f\u0110\f\6\2"+ - "\2\u0110\u0111\7\62\2\2\u0111\u0122\5\36\20\7\u0112\u0113\f\5\2\2\u0113"+ - "\u0114\7\63\2\2\u0114\u0122\5\36\20\6\u0115\u0116\f\4\2\2\u0116\u0117"+ - "\7\64\2\2\u0117\u0118\5\36\20\2\u0118\u0119\7\65\2\2\u0119\u011a\5\36"+ - "\20\4\u011a\u0122\3\2\2\2\u011b\u011c\f\3\2\2\u011c\u011d\t\t\2\2\u011d"+ - "\u0122\5\36\20\3\u011e\u011f\f\13\2\2\u011f\u0120\7\34\2\2\u0120\u0122"+ - "\5\26\f\2\u0121\u00f4\3\2\2\2\u0121\u00f7\3\2\2\2\u0121\u00fa\3\2\2\2"+ - "\u0121\u00fd\3\2\2\2\u0121\u0100\3\2\2\2\u0121\u0103\3\2\2\2\u0121\u0106"+ - "\3\2\2\2\u0121\u0109\3\2\2\2\u0121\u010c\3\2\2\2\u0121\u010f\3\2\2\2\u0121"+ - "\u0112\3\2\2\2\u0121\u0115\3\2\2\2\u0121\u011b\3\2\2\2\u0121\u011e\3\2"+ - "\2\2\u0122\u0125\3\2\2\2\u0123\u0121\3\2\2\2\u0123\u0124\3\2\2\2\u0124"+ - "\37\3\2\2\2\u0125\u0123\3\2\2\2\u0126\u0127\t\n\2\2\u0127\u0134\5\"\22"+ - "\2\u0128\u0129\5\"\22\2\u0129\u012a\t\n\2\2\u012a\u0134\3\2\2\2\u012b"+ - "\u0134\5\"\22\2\u012c\u012d\t\13\2\2\u012d\u0134\5 \21\2\u012e\u012f\7"+ - "\t\2\2\u012f\u0130\5\26\f\2\u0130\u0131\7\n\2\2\u0131\u0132\5 \21\2\u0132"+ - "\u0134\3\2\2\2\u0133\u0126\3\2\2\2\u0133\u0128\3\2\2\2\u0133\u012b\3\2"+ - "\2\2\u0133\u012c\3\2\2\2\u0133\u012e\3\2\2\2\u0134!\3\2\2\2\u0135\u0139"+ - "\5$\23\2\u0136\u0138\5&\24\2\u0137\u0136\3\2\2\2\u0138\u013b\3\2\2\2\u0139"+ - "\u0137\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u0146\3\2\2\2\u013b\u0139\3\2"+ - "\2\2\u013c\u013d\5\26\f\2\u013d\u0141\5(\25\2\u013e\u0140\5&\24\2\u013f"+ - "\u013e\3\2\2\2\u0140\u0143\3\2\2\2\u0141\u013f\3\2\2\2\u0141\u0142\3\2"+ - "\2\2\u0142\u0146\3\2\2\2\u0143\u0141\3\2\2\2\u0144\u0146\5\60\31\2\u0145"+ - "\u0135\3\2\2\2\u0145\u013c\3\2\2\2\u0145\u0144\3\2\2\2\u0146#\3\2\2\2"+ - "\u0147\u0148\7\t\2\2\u0148\u0149\5\36\20\2\u0149\u014a\7\n\2\2\u014a\u015a"+ - "\3\2\2\2\u014b\u015a\t\f\2\2\u014c\u015a\7N\2\2\u014d\u015a\7O\2\2\u014e"+ - "\u015a\7P\2\2\u014f\u015a\7L\2\2\u0150\u015a\7M\2\2\u0151\u015a\5\62\32"+ - "\2\u0152\u015a\5\64\33\2\u0153\u015a\7R\2\2\u0154\u0155\7R\2\2\u0155\u015a"+ - "\58\35\2\u0156\u0157\7\27\2\2\u0157\u0158\7Q\2\2\u0158\u015a\58\35\2\u0159"+ - "\u0147\3\2\2\2\u0159\u014b\3\2\2\2\u0159\u014c\3\2\2\2\u0159\u014d\3\2"+ - "\2\2\u0159\u014e\3\2\2\2\u0159\u014f\3\2\2\2\u0159\u0150\3\2\2\2\u0159"+ - "\u0151\3\2\2\2\u0159\u0152\3\2\2\2\u0159\u0153\3\2\2\2\u0159\u0154\3\2"+ - "\2\2\u0159\u0156\3\2\2\2\u015a%\3\2\2\2\u015b\u015f\5*\26\2\u015c\u015f"+ - "\5,\27\2\u015d\u015f\5.\30\2\u015e\u015b\3\2\2\2\u015e\u015c\3\2\2\2\u015e"+ - "\u015d\3\2\2\2\u015f\'\3\2\2\2\u0160\u0163\5*\26\2\u0161\u0163\5,\27\2"+ - "\u0162\u0160\3\2\2\2\u0162\u0161\3\2\2\2\u0163)\3\2\2\2\u0164\u0166\7"+ - "\64\2\2\u0165\u0164\3\2\2\2\u0165\u0166\3\2\2\2\u0166\u0167\3\2\2\2\u0167"+ - "\u0168\7\13\2\2\u0168\u0169\7T\2\2\u0169\u016a\58\35\2\u016a+\3\2\2\2"+ - "\u016b\u016d\7\64\2\2\u016c\u016b\3\2\2\2\u016c\u016d\3\2\2\2\u016d\u016e"+ - "\3\2\2\2\u016e\u016f\7\13\2\2\u016f\u0170\t\r\2\2\u0170-\3\2\2\2\u0171"+ - "\u0172\7\7\2\2\u0172\u0173\5\36\20\2\u0173\u0174\7\b\2\2\u0174/\3\2\2"+ - "\2\u0175\u0176\7\27\2\2\u0176\u017b\7Q\2\2\u0177\u0178\7\7\2\2\u0178\u0179"+ - "\5\36\20\2\u0179\u017a\7\b\2\2\u017a\u017c\3\2\2\2\u017b\u0177\3\2\2\2"+ - "\u017c\u017d\3\2\2\2\u017d\u017b\3\2\2\2\u017d\u017e\3\2\2\2\u017e\u0186"+ - "\3\2\2\2\u017f\u0183\5(\25\2\u0180\u0182\5&\24\2\u0181\u0180\3\2\2\2\u0182"+ - "\u0185\3\2\2\2\u0183\u0181\3\2\2\2\u0183\u0184\3\2\2\2\u0184\u0187\3\2"+ - "\2\2\u0185\u0183\3\2\2\2\u0186\u017f\3\2\2\2\u0186\u0187\3\2\2\2\u0187"+ - "\u01a2\3\2\2\2\u0188\u0189\7\27\2\2\u0189\u018a\7Q\2\2\u018a\u018b\7\7"+ - "\2\2\u018b\u018c\7\b\2\2\u018c\u0195\7\5\2\2\u018d\u0192\5\36\20\2\u018e"+ - "\u018f\7\f\2\2\u018f\u0191\5\36\20\2\u0190\u018e\3\2\2\2\u0191\u0194\3"+ - "\2\2\2\u0192\u0190\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u0196\3\2\2\2\u0194"+ - "\u0192\3\2\2\2\u0195\u018d\3\2\2\2\u0195\u0196\3\2\2\2\u0196\u0198\3\2"+ - "\2\2\u0197\u0199\7\r\2\2\u0198\u0197\3\2\2\2\u0198\u0199\3\2\2\2\u0199"+ - "\u019a\3\2\2\2\u019a\u019e\7\6\2\2\u019b\u019d\5&\24\2\u019c\u019b\3\2"+ - "\2\2\u019d\u01a0\3\2\2\2\u019e\u019c\3\2\2\2\u019e\u019f\3\2\2\2\u019f"+ - "\u01a2\3\2\2\2\u01a0\u019e\3\2\2\2\u01a1\u0175\3\2\2\2\u01a1\u0188\3\2"+ - "\2\2\u01a2\61\3\2\2\2\u01a3\u01a4\7\7\2\2\u01a4\u01a9\5\36\20\2\u01a5"+ - "\u01a6\7\f\2\2\u01a6\u01a8\5\36\20\2\u01a7\u01a5\3\2\2\2\u01a8\u01ab\3"+ - "\2\2\2\u01a9\u01a7\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa\u01ac\3\2\2\2\u01ab"+ - "\u01a9\3\2\2\2\u01ac\u01ad\7\b\2\2\u01ad\u01b1\3\2\2\2\u01ae\u01af\7\7"+ - "\2\2\u01af\u01b1\7\b\2\2\u01b0\u01a3\3\2\2\2\u01b0\u01ae\3\2\2\2\u01b1"+ - "\63\3\2\2\2\u01b2\u01b3\7\7\2\2\u01b3\u01b8\5\66\34\2\u01b4\u01b5\7\f"+ - "\2\2\u01b5\u01b7\5\66\34\2\u01b6\u01b4\3\2\2\2\u01b7\u01ba\3\2\2\2\u01b8"+ - "\u01b6\3\2\2\2\u01b8\u01b9\3\2\2\2\u01b9\u01bb\3\2\2\2\u01ba\u01b8\3\2"+ - "\2\2\u01bb\u01bc\7\b\2\2\u01bc\u01c1\3\2\2\2\u01bd\u01be\7\7\2\2\u01be"+ - "\u01bf\7\65\2\2\u01bf\u01c1\7\b\2\2\u01c0\u01b2\3\2\2\2\u01c0\u01bd\3"+ - "\2\2\2\u01c1\65\3\2\2\2\u01c2\u01c3\5\36\20\2\u01c3\u01c4\7\65\2\2\u01c4"+ - "\u01c5\5\36\20\2\u01c5\67\3\2\2\2\u01c6\u01cf\7\t\2\2\u01c7\u01cc\5:\36"+ - "\2\u01c8\u01c9\7\f\2\2\u01c9\u01cb\5:\36\2\u01ca\u01c8\3\2\2\2\u01cb\u01ce"+ - "\3\2\2\2\u01cc\u01ca\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cd\u01d0\3\2\2\2\u01ce"+ - "\u01cc\3\2\2\2\u01cf\u01c7\3\2\2\2\u01cf\u01d0\3\2\2\2\u01d0\u01d1\3\2"+ - "\2\2\u01d1\u01d2\7\n\2\2\u01d29\3\2\2\2\u01d3\u01d7\5\36\20\2\u01d4\u01d7"+ - "\5<\37\2\u01d5\u01d7\5@!\2\u01d6\u01d3\3\2\2\2\u01d6\u01d4\3\2\2\2\u01d6"+ - "\u01d5\3\2\2\2\u01d7;\3\2\2\2\u01d8\u01e6\5> \2\u01d9\u01e2\7\t\2\2\u01da"+ - "\u01df\5> \2\u01db\u01dc\7\f\2\2\u01dc\u01de\5> \2\u01dd\u01db\3\2\2\2"+ - "\u01de\u01e1\3\2\2\2\u01df\u01dd\3\2\2\2\u01df\u01e0\3\2\2\2\u01e0\u01e3"+ - "\3\2\2\2\u01e1\u01df\3\2\2\2\u01e2\u01da\3\2\2\2\u01e2\u01e3\3\2\2\2\u01e3"+ - "\u01e4\3\2\2\2\u01e4\u01e6\7\n\2\2\u01e5\u01d8\3\2\2\2\u01e5\u01d9\3\2"+ - "\2\2\u01e6\u01e7\3\2\2\2\u01e7\u01ea\7\67\2\2\u01e8\u01eb\5\f\7\2\u01e9"+ - "\u01eb\5\36\20\2\u01ea\u01e8\3\2\2\2\u01ea\u01e9\3\2\2\2\u01eb=\3\2\2"+ - "\2\u01ec\u01ee\5\26\f\2\u01ed\u01ec\3\2\2\2\u01ed\u01ee\3\2\2\2\u01ee"+ - "\u01ef\3\2\2\2\u01ef\u01f0\7R\2\2\u01f0?\3\2\2\2\u01f1\u01f2\7Q\2\2\u01f2"+ - "\u01f3\7\66\2\2\u01f3\u01ff\7R\2\2\u01f4\u01f5\5\26\f\2\u01f5\u01f6\7"+ - "\66\2\2\u01f6\u01f7\7\27\2\2\u01f7\u01ff\3\2\2\2\u01f8\u01f9\7R\2\2\u01f9"+ - "\u01fa\7\66\2\2\u01fa\u01ff\7R\2\2\u01fb\u01fc\7\33\2\2\u01fc\u01fd\7"+ - "\66\2\2\u01fd\u01ff\7R\2\2\u01fe\u01f1\3\2\2\2\u01fe\u01f4\3\2\2\2\u01fe"+ - "\u01f8\3\2\2\2\u01fe\u01fb\3\2\2\2\u01ffA\3\2\2\2\64EK^amu\u0082\u0086"+ - "\u008a\u008f\u00b2\u00bb\u00bf\u00c5\u00ce\u00d8\u00e0\u00e6\u0121\u0123"+ - "\u0133\u0139\u0141\u0145\u0159\u015e\u0162\u0165\u016c\u017d\u0183\u0186"+ - "\u0192\u0195\u0198\u019e\u01a1\u01a9\u01b0\u01b8\u01c0\u01cc\u01cf\u01d6"+ - "\u01df\u01e2\u01e5\u01ea\u01ed\u01fe"; + "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\7\20\u0125\n\20\f\20\16"+ + "\20\u0128\13\20\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21"+ + "\3\21\3\21\5\21\u0137\n\21\3\22\3\22\7\22\u013b\n\22\f\22\16\22\u013e"+ + "\13\22\3\22\3\22\3\22\7\22\u0143\n\22\f\22\16\22\u0146\13\22\3\22\5\22"+ + "\u0149\n\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23"+ + "\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u015d\n\23\3\24\3\24\3\24\5\24\u0162"+ + "\n\24\3\25\3\25\5\25\u0166\n\25\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\30"+ + "\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\6\31\u0179\n\31\r\31\16"+ + "\31\u017a\3\31\3\31\7\31\u017f\n\31\f\31\16\31\u0182\13\31\5\31\u0184"+ + "\n\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\7\31\u018e\n\31\f\31\16"+ + "\31\u0191\13\31\5\31\u0193\n\31\3\31\5\31\u0196\n\31\3\31\3\31\7\31\u019a"+ + "\n\31\f\31\16\31\u019d\13\31\5\31\u019f\n\31\3\32\3\32\3\32\3\32\7\32"+ + "\u01a5\n\32\f\32\16\32\u01a8\13\32\3\32\3\32\3\32\3\32\5\32\u01ae\n\32"+ + "\3\33\3\33\3\33\3\33\7\33\u01b4\n\33\f\33\16\33\u01b7\13\33\3\33\3\33"+ + "\3\33\3\33\3\33\5\33\u01be\n\33\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35"+ + "\7\35\u01c8\n\35\f\35\16\35\u01cb\13\35\5\35\u01cd\n\35\3\35\3\35\3\36"+ + "\3\36\3\36\5\36\u01d4\n\36\3\37\3\37\3\37\3\37\3\37\7\37\u01db\n\37\f"+ + "\37\16\37\u01de\13\37\5\37\u01e0\n\37\3\37\5\37\u01e3\n\37\3\37\3\37\3"+ + "\37\5\37\u01e8\n\37\3 \5 \u01eb\n \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\3"+ + "!\3!\3!\3!\5!\u01fc\n!\3!\2\3\36\"\2\4\6\b\n\f\16\20\22\24\26\30\32\34"+ + "\36 \"$&(*,.\60\62\64\668:<>@\2\17\3\3\16\16\3\2 \"\3\2#$\3\2:;\3\2%\'"+ + "\3\2(+\3\2,/\3\2>I\3\2<=\4\2\36\37#$\3\2JM\3\2\13\f\3\2UV\u0237\2E\3\2"+ + "\2\2\4P\3\2\2\2\6U\3\2\2\2\b\u00bb\3\2\2\2\n\u00bf\3\2\2\2\f\u00c1\3\2"+ + "\2\2\16\u00ca\3\2\2\2\20\u00ce\3\2\2\2\22\u00d0\3\2\2\2\24\u00d2\3\2\2"+ + "\2\26\u00db\3\2\2\2\30\u00e3\3\2\2\2\32\u00e8\3\2\2\2\34\u00ef\3\2\2\2"+ + "\36\u00f1\3\2\2\2 \u0136\3\2\2\2\"\u0148\3\2\2\2$\u015c\3\2\2\2&\u0161"+ + "\3\2\2\2(\u0165\3\2\2\2*\u0167\3\2\2\2,\u016b\3\2\2\2.\u016e\3\2\2\2\60"+ + "\u019e\3\2\2\2\62\u01ad\3\2\2\2\64\u01bd\3\2\2\2\66\u01bf\3\2\2\28\u01c3"+ + "\3\2\2\2:\u01d3\3\2\2\2<\u01e2\3\2\2\2>\u01ea\3\2\2\2@\u01fb\3\2\2\2B"+ + "D\5\4\3\2CB\3\2\2\2DG\3\2\2\2EC\3\2\2\2EF\3\2\2\2FK\3\2\2\2GE\3\2\2\2"+ + "HJ\5\b\5\2IH\3\2\2\2JM\3\2\2\2KI\3\2\2\2KL\3\2\2\2LN\3\2\2\2MK\3\2\2\2"+ + "NO\7\2\2\3O\3\3\2\2\2PQ\5\26\f\2QR\7T\2\2RS\5\6\4\2ST\5\f\7\2T\5\3\2\2"+ + "\2Ua\7\t\2\2VW\5\26\f\2W^\7T\2\2XY\7\r\2\2YZ\5\26\f\2Z[\7T\2\2[]\3\2\2"+ + "\2\\X\3\2\2\2]`\3\2\2\2^\\\3\2\2\2^_\3\2\2\2_b\3\2\2\2`^\3\2\2\2aV\3\2"+ + "\2\2ab\3\2\2\2bc\3\2\2\2cd\7\n\2\2d\7\3\2\2\2ef\7\17\2\2fg\7\t\2\2gh\5"+ + "\36\20\2hi\7\n\2\2im\5\n\6\2jk\7\21\2\2kn\5\n\6\2ln\6\5\2\2mj\3\2\2\2"+ + "ml\3\2\2\2n\u00bc\3\2\2\2op\7\22\2\2pq\7\t\2\2qr\5\36\20\2ru\7\n\2\2s"+ + "v\5\n\6\2tv\5\16\b\2us\3\2\2\2ut\3\2\2\2v\u00bc\3\2\2\2wx\7\23\2\2xy\5"+ + "\f\7\2yz\7\22\2\2z{\7\t\2\2{|\5\36\20\2|}\7\n\2\2}~\5\34\17\2~\u00bc\3"+ + "\2\2\2\177\u0080\7\24\2\2\u0080\u0082\7\t\2\2\u0081\u0083\5\20\t\2\u0082"+ + "\u0081\3\2\2\2\u0082\u0083\3\2\2\2\u0083\u0084\3\2\2\2\u0084\u0086\7\16"+ + "\2\2\u0085\u0087\5\36\20\2\u0086\u0085\3\2\2\2\u0086\u0087\3\2\2\2\u0087"+ + "\u0088\3\2\2\2\u0088\u008a\7\16\2\2\u0089\u008b\5\22\n\2\u008a\u0089\3"+ + "\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c\u008f\7\n\2\2\u008d"+ + "\u0090\5\n\6\2\u008e\u0090\5\16\b\2\u008f\u008d\3\2\2\2\u008f\u008e\3"+ + "\2\2\2\u0090\u00bc\3\2\2\2\u0091\u0092\7\24\2\2\u0092\u0093\7\t\2\2\u0093"+ + "\u0094\5\26\f\2\u0094\u0095\7T\2\2\u0095\u0096\7\66\2\2\u0096\u0097\5"+ + "\36\20\2\u0097\u0098\7\n\2\2\u0098\u0099\5\n\6\2\u0099\u00bc\3\2\2\2\u009a"+ + "\u009b\7\24\2\2\u009b\u009c\7\t\2\2\u009c\u009d\7T\2\2\u009d\u009e\7\20"+ + "\2\2\u009e\u009f\5\36\20\2\u009f\u00a0\7\n\2\2\u00a0\u00a1\5\n\6\2\u00a1"+ + "\u00bc\3\2\2\2\u00a2\u00a3\5\24\13\2\u00a3\u00a4\5\34\17\2\u00a4\u00bc"+ + "\3\2\2\2\u00a5\u00a6\7\25\2\2\u00a6\u00bc\5\34\17\2\u00a7\u00a8\7\26\2"+ + "\2\u00a8\u00bc\5\34\17\2\u00a9\u00aa\7\27\2\2\u00aa\u00ab\5\36\20\2\u00ab"+ + "\u00ac\5\34\17\2\u00ac\u00bc\3\2\2\2\u00ad\u00ae\7\31\2\2\u00ae\u00b0"+ + "\5\f\7\2\u00af\u00b1\5\32\16\2\u00b0\u00af\3\2\2\2\u00b1\u00b2\3\2\2\2"+ + "\u00b2\u00b0\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3\u00bc\3\2\2\2\u00b4\u00b5"+ + "\7\33\2\2\u00b5\u00b6\5\36\20\2\u00b6\u00b7\5\34\17\2\u00b7\u00bc\3\2"+ + "\2\2\u00b8\u00b9\5\36\20\2\u00b9\u00ba\5\34\17\2\u00ba\u00bc\3\2\2\2\u00bb"+ + "e\3\2\2\2\u00bbo\3\2\2\2\u00bbw\3\2\2\2\u00bb\177\3\2\2\2\u00bb\u0091"+ + "\3\2\2\2\u00bb\u009a\3\2\2\2\u00bb\u00a2\3\2\2\2\u00bb\u00a5\3\2\2\2\u00bb"+ + "\u00a7\3\2\2\2\u00bb\u00a9\3\2\2\2\u00bb\u00ad\3\2\2\2\u00bb\u00b4\3\2"+ + "\2\2\u00bb\u00b8\3\2\2\2\u00bc\t\3\2\2\2\u00bd\u00c0\5\f\7\2\u00be\u00c0"+ + "\5\b\5\2\u00bf\u00bd\3\2\2\2\u00bf\u00be\3\2\2\2\u00c0\13\3\2\2\2\u00c1"+ + "\u00c5\7\5\2\2\u00c2\u00c4\5\b\5\2\u00c3\u00c2\3\2\2\2\u00c4\u00c7\3\2"+ + "\2\2\u00c5\u00c3\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c6\u00c8\3\2\2\2\u00c7"+ + "\u00c5\3\2\2\2\u00c8\u00c9\7\6\2\2\u00c9\r\3\2\2\2\u00ca\u00cb\7\16\2"+ + "\2\u00cb\17\3\2\2\2\u00cc\u00cf\5\24\13\2\u00cd\u00cf\5\36\20\2\u00ce"+ + "\u00cc\3\2\2\2\u00ce\u00cd\3\2\2\2\u00cf\21\3\2\2\2\u00d0\u00d1\5\36\20"+ + "\2\u00d1\23\3\2\2\2\u00d2\u00d3\5\26\f\2\u00d3\u00d8\5\30\r\2\u00d4\u00d5"+ + "\7\r\2\2\u00d5\u00d7\5\30\r\2\u00d6\u00d4\3\2\2\2\u00d7\u00da\3\2\2\2"+ + "\u00d8\u00d6\3\2\2\2\u00d8\u00d9\3\2\2\2\u00d9\25\3\2\2\2\u00da\u00d8"+ + "\3\2\2\2\u00db\u00e0\7S\2\2\u00dc\u00dd\7\7\2\2\u00dd\u00df\7\b\2\2\u00de"+ + "\u00dc\3\2\2\2\u00df\u00e2\3\2\2\2\u00e0\u00de\3\2\2\2\u00e0\u00e1\3\2"+ + "\2\2\u00e1\27\3\2\2\2\u00e2\u00e0\3\2\2\2\u00e3\u00e6\7T\2\2\u00e4\u00e5"+ + "\7>\2\2\u00e5\u00e7\5\36\20\2\u00e6\u00e4\3\2\2\2\u00e6\u00e7\3\2\2\2"+ + "\u00e7\31\3\2\2\2\u00e8\u00e9\7\32\2\2\u00e9\u00ea\7\t\2\2\u00ea\u00eb"+ + "\7S\2\2\u00eb\u00ec\7T\2\2\u00ec\u00ed\7\n\2\2\u00ed\u00ee\5\f\7\2\u00ee"+ + "\33\3\2\2\2\u00ef\u00f0\t\2\2\2\u00f0\35\3\2\2\2\u00f1\u00f2\b\20\1\2"+ + "\u00f2\u00f3\5 \21\2\u00f3\u0126\3\2\2\2\u00f4\u00f5\f\21\2\2\u00f5\u00f6"+ + "\t\3\2\2\u00f6\u0125\5\36\20\22\u00f7\u00f8\f\20\2\2\u00f8\u00f9\t\4\2"+ + "\2\u00f9\u0125\5\36\20\21\u00fa\u00fb\f\17\2\2\u00fb\u00fc\t\5\2\2\u00fc"+ + "\u0125\5\36\20\20\u00fd\u00fe\f\16\2\2\u00fe\u00ff\t\6\2\2\u00ff\u0125"+ + "\5\36\20\17\u0100\u0101\f\r\2\2\u0101\u0102\t\7\2\2\u0102\u0125\5\36\20"+ + "\16\u0103\u0104\f\13\2\2\u0104\u0105\t\b\2\2\u0105\u0125\5\36\20\f\u0106"+ + "\u0107\f\n\2\2\u0107\u0108\7\60\2\2\u0108\u0125\5\36\20\13\u0109\u010a"+ + "\f\t\2\2\u010a\u010b\7\61\2\2\u010b\u0125\5\36\20\n\u010c\u010d\f\b\2"+ + "\2\u010d\u010e\7\62\2\2\u010e\u0125\5\36\20\t\u010f\u0110\f\7\2\2\u0110"+ + "\u0111\7\63\2\2\u0111\u0125\5\36\20\b\u0112\u0113\f\6\2\2\u0113\u0114"+ + "\7\64\2\2\u0114\u0125\5\36\20\7\u0115\u0116\f\5\2\2\u0116\u0117\7\65\2"+ + "\2\u0117\u0118\5\36\20\2\u0118\u0119\7\66\2\2\u0119\u011a\5\36\20\5\u011a"+ + "\u0125\3\2\2\2\u011b\u011c\f\4\2\2\u011c\u011d\7\67\2\2\u011d\u0125\5"+ + "\36\20\4\u011e\u011f\f\3\2\2\u011f\u0120\t\t\2\2\u0120\u0125\5\36\20\3"+ + "\u0121\u0122\f\f\2\2\u0122\u0123\7\35\2\2\u0123\u0125\5\26\f\2\u0124\u00f4"+ + "\3\2\2\2\u0124\u00f7\3\2\2\2\u0124\u00fa\3\2\2\2\u0124\u00fd\3\2\2\2\u0124"+ + "\u0100\3\2\2\2\u0124\u0103\3\2\2\2\u0124\u0106\3\2\2\2\u0124\u0109\3\2"+ + "\2\2\u0124\u010c\3\2\2\2\u0124\u010f\3\2\2\2\u0124\u0112\3\2\2\2\u0124"+ + "\u0115\3\2\2\2\u0124\u011b\3\2\2\2\u0124\u011e\3\2\2\2\u0124\u0121\3\2"+ + "\2\2\u0125\u0128\3\2\2\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2\2\2\u0127"+ + "\37\3\2\2\2\u0128\u0126\3\2\2\2\u0129\u012a\t\n\2\2\u012a\u0137\5\"\22"+ + "\2\u012b\u012c\5\"\22\2\u012c\u012d\t\n\2\2\u012d\u0137\3\2\2\2\u012e"+ + "\u0137\5\"\22\2\u012f\u0130\t\13\2\2\u0130\u0137\5 \21\2\u0131\u0132\7"+ + "\t\2\2\u0132\u0133\5\26\f\2\u0133\u0134\7\n\2\2\u0134\u0135\5 \21\2\u0135"+ + "\u0137\3\2\2\2\u0136\u0129\3\2\2\2\u0136\u012b\3\2\2\2\u0136\u012e\3\2"+ + "\2\2\u0136\u012f\3\2\2\2\u0136\u0131\3\2\2\2\u0137!\3\2\2\2\u0138\u013c"+ + "\5$\23\2\u0139\u013b\5&\24\2\u013a\u0139\3\2\2\2\u013b\u013e\3\2\2\2\u013c"+ + "\u013a\3\2\2\2\u013c\u013d\3\2\2\2\u013d\u0149\3\2\2\2\u013e\u013c\3\2"+ + "\2\2\u013f\u0140\5\26\f\2\u0140\u0144\5(\25\2\u0141\u0143\5&\24\2\u0142"+ + "\u0141\3\2\2\2\u0143\u0146\3\2\2\2\u0144\u0142\3\2\2\2\u0144\u0145\3\2"+ + "\2\2\u0145\u0149\3\2\2\2\u0146\u0144\3\2\2\2\u0147\u0149\5\60\31\2\u0148"+ + "\u0138\3\2\2\2\u0148\u013f\3\2\2\2\u0148\u0147\3\2\2\2\u0149#\3\2\2\2"+ + "\u014a\u014b\7\t\2\2\u014b\u014c\5\36\20\2\u014c\u014d\7\n\2\2\u014d\u015d"+ + "\3\2\2\2\u014e\u015d\t\f\2\2\u014f\u015d\7P\2\2\u0150\u015d\7Q\2\2\u0151"+ + "\u015d\7R\2\2\u0152\u015d\7N\2\2\u0153\u015d\7O\2\2\u0154\u015d\5\62\32"+ + "\2\u0155\u015d\5\64\33\2\u0156\u015d\7T\2\2\u0157\u0158\7T\2\2\u0158\u015d"+ + "\58\35\2\u0159\u015a\7\30\2\2\u015a\u015b\7S\2\2\u015b\u015d\58\35\2\u015c"+ + "\u014a\3\2\2\2\u015c\u014e\3\2\2\2\u015c\u014f\3\2\2\2\u015c\u0150\3\2"+ + "\2\2\u015c\u0151\3\2\2\2\u015c\u0152\3\2\2\2\u015c\u0153\3\2\2\2\u015c"+ + "\u0154\3\2\2\2\u015c\u0155\3\2\2\2\u015c\u0156\3\2\2\2\u015c\u0157\3\2"+ + "\2\2\u015c\u0159\3\2\2\2\u015d%\3\2\2\2\u015e\u0162\5*\26\2\u015f\u0162"+ + "\5,\27\2\u0160\u0162\5.\30\2\u0161\u015e\3\2\2\2\u0161\u015f\3\2\2\2\u0161"+ + "\u0160\3\2\2\2\u0162\'\3\2\2\2\u0163\u0166\5*\26\2\u0164\u0166\5,\27\2"+ + "\u0165\u0163\3\2\2\2\u0165\u0164\3\2\2\2\u0166)\3\2\2\2\u0167\u0168\t"+ + "\r\2\2\u0168\u0169\7V\2\2\u0169\u016a\58\35\2\u016a+\3\2\2\2\u016b\u016c"+ + "\t\r\2\2\u016c\u016d\t\16\2\2\u016d-\3\2\2\2\u016e\u016f\7\7\2\2\u016f"+ + "\u0170\5\36\20\2\u0170\u0171\7\b\2\2\u0171/\3\2\2\2\u0172\u0173\7\30\2"+ + "\2\u0173\u0178\7S\2\2\u0174\u0175\7\7\2\2\u0175\u0176\5\36\20\2\u0176"+ + "\u0177\7\b\2\2\u0177\u0179\3\2\2\2\u0178\u0174\3\2\2\2\u0179\u017a\3\2"+ + "\2\2\u017a\u0178\3\2\2\2\u017a\u017b\3\2\2\2\u017b\u0183\3\2\2\2\u017c"+ + "\u0180\5(\25\2\u017d\u017f\5&\24\2\u017e\u017d\3\2\2\2\u017f\u0182\3\2"+ + "\2\2\u0180\u017e\3\2\2\2\u0180\u0181\3\2\2\2\u0181\u0184\3\2\2\2\u0182"+ + "\u0180\3\2\2\2\u0183\u017c\3\2\2\2\u0183\u0184\3\2\2\2\u0184\u019f\3\2"+ + "\2\2\u0185\u0186\7\30\2\2\u0186\u0187\7S\2\2\u0187\u0188\7\7\2\2\u0188"+ + "\u0189\7\b\2\2\u0189\u0192\7\5\2\2\u018a\u018f\5\36\20\2\u018b\u018c\7"+ + "\r\2\2\u018c\u018e\5\36\20\2\u018d\u018b\3\2\2\2\u018e\u0191\3\2\2\2\u018f"+ + "\u018d\3\2\2\2\u018f\u0190\3\2\2\2\u0190\u0193\3\2\2\2\u0191\u018f\3\2"+ + "\2\2\u0192\u018a\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u0195\3\2\2\2\u0194"+ + "\u0196\7\16\2\2\u0195\u0194\3\2\2\2\u0195\u0196\3\2\2\2\u0196\u0197\3"+ + "\2\2\2\u0197\u019b\7\6\2\2\u0198\u019a\5&\24\2\u0199\u0198\3\2\2\2\u019a"+ + "\u019d\3\2\2\2\u019b\u0199\3\2\2\2\u019b\u019c\3\2\2\2\u019c\u019f\3\2"+ + "\2\2\u019d\u019b\3\2\2\2\u019e\u0172\3\2\2\2\u019e\u0185\3\2\2\2\u019f"+ + "\61\3\2\2\2\u01a0\u01a1\7\7\2\2\u01a1\u01a6\5\36\20\2\u01a2\u01a3\7\r"+ + "\2\2\u01a3\u01a5\5\36\20\2\u01a4\u01a2\3\2\2\2\u01a5\u01a8\3\2\2\2\u01a6"+ + "\u01a4\3\2\2\2\u01a6\u01a7\3\2\2\2\u01a7\u01a9\3\2\2\2\u01a8\u01a6\3\2"+ + "\2\2\u01a9\u01aa\7\b\2\2\u01aa\u01ae\3\2\2\2\u01ab\u01ac\7\7\2\2\u01ac"+ + "\u01ae\7\b\2\2\u01ad\u01a0\3\2\2\2\u01ad\u01ab\3\2\2\2\u01ae\63\3\2\2"+ + "\2\u01af\u01b0\7\7\2\2\u01b0\u01b5\5\66\34\2\u01b1\u01b2\7\r\2\2\u01b2"+ + "\u01b4\5\66\34\2\u01b3\u01b1\3\2\2\2\u01b4\u01b7\3\2\2\2\u01b5\u01b3\3"+ + "\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01b8\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8"+ + "\u01b9\7\b\2\2\u01b9\u01be\3\2\2\2\u01ba\u01bb\7\7\2\2\u01bb\u01bc\7\66"+ + "\2\2\u01bc\u01be\7\b\2\2\u01bd\u01af\3\2\2\2\u01bd\u01ba\3\2\2\2\u01be"+ + "\65\3\2\2\2\u01bf\u01c0\5\36\20\2\u01c0\u01c1\7\66\2\2\u01c1\u01c2\5\36"+ + "\20\2\u01c2\67\3\2\2\2\u01c3\u01cc\7\t\2\2\u01c4\u01c9\5:\36\2\u01c5\u01c6"+ + "\7\r\2\2\u01c6\u01c8\5:\36\2\u01c7\u01c5\3\2\2\2\u01c8\u01cb\3\2\2\2\u01c9"+ + "\u01c7\3\2\2\2\u01c9\u01ca\3\2\2\2\u01ca\u01cd\3\2\2\2\u01cb\u01c9\3\2"+ + "\2\2\u01cc\u01c4\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cd\u01ce\3\2\2\2\u01ce"+ + "\u01cf\7\n\2\2\u01cf9\3\2\2\2\u01d0\u01d4\5\36\20\2\u01d1\u01d4\5<\37"+ + "\2\u01d2\u01d4\5@!\2\u01d3\u01d0\3\2\2\2\u01d3\u01d1\3\2\2\2\u01d3\u01d2"+ + "\3\2\2\2\u01d4;\3\2\2\2\u01d5\u01e3\5> \2\u01d6\u01df\7\t\2\2\u01d7\u01dc"+ + "\5> \2\u01d8\u01d9\7\r\2\2\u01d9\u01db\5> \2\u01da\u01d8\3\2\2\2\u01db"+ + "\u01de\3\2\2\2\u01dc\u01da\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd\u01e0\3\2"+ + "\2\2\u01de\u01dc\3\2\2\2\u01df\u01d7\3\2\2\2\u01df\u01e0\3\2\2\2\u01e0"+ + "\u01e1\3\2\2\2\u01e1\u01e3\7\n\2\2\u01e2\u01d5\3\2\2\2\u01e2\u01d6\3\2"+ + "\2\2\u01e3\u01e4\3\2\2\2\u01e4\u01e7\79\2\2\u01e5\u01e8\5\f\7\2\u01e6"+ + "\u01e8\5\36\20\2\u01e7\u01e5\3\2\2\2\u01e7\u01e6\3\2\2\2\u01e8=\3\2\2"+ + "\2\u01e9\u01eb\5\26\f\2\u01ea\u01e9\3\2\2\2\u01ea\u01eb\3\2\2\2\u01eb"+ + "\u01ec\3\2\2\2\u01ec\u01ed\7T\2\2\u01ed?\3\2\2\2\u01ee\u01ef\7S\2\2\u01ef"+ + "\u01f0\78\2\2\u01f0\u01fc\7T\2\2\u01f1\u01f2\5\26\f\2\u01f2\u01f3\78\2"+ + "\2\u01f3\u01f4\7\30\2\2\u01f4\u01fc\3\2\2\2\u01f5\u01f6\7T\2\2\u01f6\u01f7"+ + "\78\2\2\u01f7\u01fc\7T\2\2\u01f8\u01f9\7\34\2\2\u01f9\u01fa\78\2\2\u01fa"+ + "\u01fc\7T\2\2\u01fb\u01ee\3\2\2\2\u01fb\u01f1\3\2\2\2\u01fb\u01f5\3\2"+ + "\2\2\u01fb\u01f8\3\2\2\2\u01fcA\3\2\2\2\62EK^amu\u0082\u0086\u008a\u008f"+ + "\u00b2\u00bb\u00bf\u00c5\u00ce\u00d8\u00e0\u00e6\u0124\u0126\u0136\u013c"+ + "\u0144\u0148\u015c\u0161\u0165\u017a\u0180\u0183\u018f\u0192\u0195\u019b"+ + "\u019e\u01a6\u01ad\u01b5\u01bd\u01c9\u01cc\u01d3\u01dc\u01df\u01e2\u01e7"+ + "\u01ea\u01fb"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java index d3a87f1a099..8c4741e6725 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java @@ -235,6 +235,13 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitBinary(PainlessParser.BinaryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitElvis(PainlessParser.ElvisContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java index 0f7fef6185a..47bfd4a1d05 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java @@ -221,6 +221,13 @@ interface PainlessParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitBinary(PainlessParser.BinaryContext ctx); + /** + * Visit a parse tree produced by the {@code elvis} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitElvis(PainlessParser.ElvisContext ctx); /** * Visit a parse tree produced by the {@code instanceof} * labeled alternative in {@link PainlessParser#expression}. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index 5659afc75ea..84d58afa62d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -58,6 +58,7 @@ import org.elasticsearch.painless.antlr.PainlessParser.DelimiterContext; import org.elasticsearch.painless.antlr.PainlessParser.DoContext; import org.elasticsearch.painless.antlr.PainlessParser.DynamicContext; import org.elasticsearch.painless.antlr.PainlessParser.EachContext; +import org.elasticsearch.painless.antlr.PainlessParser.ElvisContext; import org.elasticsearch.painless.antlr.PainlessParser.EmptyContext; import org.elasticsearch.painless.antlr.PainlessParser.ExprContext; import org.elasticsearch.painless.antlr.PainlessParser.ExpressionContext; @@ -117,6 +118,7 @@ import org.elasticsearch.painless.node.ECapturingFunctionRef; import org.elasticsearch.painless.node.EComp; import org.elasticsearch.painless.node.EConditional; import org.elasticsearch.painless.node.EDecimal; +import org.elasticsearch.painless.node.EElvis; import org.elasticsearch.painless.node.EExplicit; import org.elasticsearch.painless.node.EFunctionRef; import org.elasticsearch.painless.node.EInstanceof; @@ -616,6 +618,14 @@ public final class Walker extends PainlessParserBaseVisitor { return new EConditional(location(ctx), condition, left, right); } + @Override + public ANode visitElvis(ElvisContext ctx) { + AExpression left = (AExpression)visit(ctx.expression(0)); + AExpression right = (AExpression)visit(ctx.expression(1)); + + return new EElvis(location(ctx), left, right); + } + @Override public ANode visitAssignment(AssignmentContext ctx) { AExpression lhs = (AExpression)visit(ctx.expression(0)); @@ -898,7 +908,7 @@ public final class Walker extends PainlessParserBaseVisitor { String name = ctx.DOTID().getText(); List arguments = collectArguments(ctx.arguments()); - return new PCallInvoke(location(ctx), prefix, name, ctx.COND() != null, arguments); + return new PCallInvoke(location(ctx), prefix, name, ctx.NSDOT() != null, arguments); } @Override @@ -917,7 +927,7 @@ public final class Walker extends PainlessParserBaseVisitor { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } - return new PField(location(ctx), prefix, ctx.COND() != null, value); + return new PField(location(ctx), prefix, ctx.NSDOT() != null, value); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java new file mode 100644 index 00000000000..216fd3b8a43 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.AnalyzerCaster; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Label; + +import java.util.Set; + +import static java.util.Objects.requireNonNull; + +/** + * The Elvis operator ({@code ?:}), a null coalescing operator. Binary operator that evaluates the first expression and return it if it is + * non null. If the first expression is null then it evaluates the second expression and returns it. + */ +public class EElvis extends AExpression { + private AExpression lhs; + private AExpression rhs; + + public EElvis(Location location, AExpression lhs, AExpression rhs) { + super(location); + + this.lhs = requireNonNull(lhs); + this.rhs = requireNonNull(rhs); + } + + @Override + void extractVariables(Set variables) { + lhs.extractVariables(variables); + rhs.extractVariables(variables); + } + + @Override + void analyze(Locals locals) { + if (expected != null && expected.sort.primitive) { + throw createError(new IllegalArgumentException("Evlis operator cannot return primitives")); + } + lhs.expected = expected; + lhs.explicit = explicit; + lhs.internal = internal; + rhs.expected = expected; + rhs.explicit = explicit; + rhs.internal = internal; + actual = expected; + lhs.analyze(locals); + rhs.analyze(locals); + + if (lhs.isNull) { + throw createError(new IllegalArgumentException("Extraneous elvis operator. LHS is null.")); + } + if (lhs.constant != null) { + throw createError(new IllegalArgumentException("Extraneous elvis operator. LHS is a constant.")); + } + if (lhs.actual.sort.primitive) { + throw createError(new IllegalArgumentException("Extraneous elvis operator. LHS is a primitive.")); + } + if (rhs.isNull) { + throw createError(new IllegalArgumentException("Extraneous elvis operator. RHS is null.")); + } + + if (expected == null) { + final Type promote = AnalyzerCaster.promoteConditional(lhs.actual, rhs.actual, lhs.constant, rhs.constant); + + lhs.expected = promote; + rhs.expected = promote; + actual = promote; + } + + lhs = lhs.cast(locals); + rhs = rhs.cast(locals); + } + + @Override + void write(MethodWriter writer, Globals globals) { + writer.writeDebugInfo(location); + + Label end = new Label(); + + lhs.write(writer, globals); + writer.dup(); + writer.ifNonNull(end); + writer.pop(); + rhs.write(writer, globals); + writer.mark(end); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java index 51349949dbd..c621f497ed9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -54,8 +53,7 @@ public class PSubNullSafeCallInvoke extends AExpression { guarded.analyze(locals); actual = guarded.actual; if (actual.sort.primitive) { - // Result must be nullable. We emit boxing instructions if needed. - actual = Definition.getType(actual.sort.boxed.getSimpleName()); + throw new IllegalArgumentException("Result of null safe operator must be nullable"); } } @@ -67,10 +65,6 @@ public class PSubNullSafeCallInvoke extends AExpression { writer.dup(); writer.ifNull(end); guarded.write(writer, globals); - if (guarded.actual.sort.primitive) { - // Box primitives so they are nullable - writer.box(guarded.actual.type); - } writer.mark(end); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java index 32ad6c0cb62..61c30a6aa20 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Definition.Type; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -54,8 +53,7 @@ public class PSubNullSafeField extends AStoreable { guarded.analyze(locals); actual = guarded.actual; if (actual.sort.primitive) { - // Result must be nullable. We emit boxing instructions if needed. - actual = Definition.getType(actual.sort.boxed.getSimpleName()); + throw new IllegalArgumentException("Result of null safe operator must be nullable"); } } @@ -81,10 +79,6 @@ public class PSubNullSafeField extends AStoreable { writer.dup(); writer.ifNull(end); guarded.write(writer, globals); - if (guarded.actual.sort.primitive) { - // Box primitives so they are nullable - writer.box(guarded.actual.type); - } writer.mark(end); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java index f35ffc77f7b..7c0694d67ba 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java @@ -148,29 +148,46 @@ public class BasicExpressionTests extends ScriptTestCase { public void testNullSafeDeref() { // Objects in general - assertNull( exec("String a = null; return a?.toString()")); // Call - assertNull( exec("String a = null; return a?.length()")); // Call and box - assertEquals("foo", exec("String a = 'foo'; return a?.toString()")); // Call - assertEquals(Integer.valueOf(3), exec("String a = 'foo'; return a?.length()")); // Call and box + // Call + assertNull( exec("String a = null; return a?.toString()")); + assertEquals("foo", exec("String a = 'foo'; return a?.toString()")); + assertNull( exec("def a = null; return a?.toString()")); + assertEquals("foo", exec("def a = 'foo'; return a?.toString()")); + // Call with primitive result + assertMustBeNullable( "String a = null; return a?.length()"); + assertMustBeNullable( "String a = 'foo'; return a?.length()"); + assertNull( exec("def a = null; return a?.length()")); + assertEquals(3, exec("def a = 'foo'; return a?.length()")); + // Read shortcut + assertMustBeNullable( "org.elasticsearch.painless.FeatureTest a = null; return a?.x"); + assertMustBeNullable( + "org.elasticsearch.painless.FeatureTest a = new org.elasticsearch.painless.FeatureTest(); return a?.x"); + assertNull( exec("def a = null; return a?.x")); + assertEquals(0, exec("def a = new org.elasticsearch.painless.FeatureTest(); return a?.x")); // Maps - assertNull( exec("Map a = null; return a?.toString()")); // Call - assertNull( exec("Map a = null; return a?.size()")); // Call and box + // Call + assertNull( exec("Map a = null; return a?.toString()")); + assertEquals("{}", exec("Map a = [:]; return a?.toString()")); + assertNull( exec("def a = null; return a?.toString()")); + assertEquals("{}", exec("def a = [:]; return a?.toString()")); + // Call with primitive result + assertMustBeNullable( "Map a = [:]; return a?.size()"); + assertMustBeNullable( "Map a = null; return a?.size()"); + assertNull( exec("def a = null; return a?.size()")); + assertEquals(0, exec("def a = [:]; return a?.size()")); + // Read shortcut assertNull( exec("Map a = null; return a?.other")); // Read shortcut - assertEquals("{}", exec("Map a = [:]; return a?.toString()")); // Call - assertEquals(0, exec("Map a = [:]; return a?.size()")); // Call and box assertEquals(1, exec("Map a = ['other':1]; return a?.other")); // Read shortcut + assertNull( exec("def a = null; return a?.other")); // Read shortcut + assertEquals(1, exec("def a = ['other':1]; return a?.other")); // Read shortcut // Array // Since you can't invoke methods on arrays we skip the toString and hashCode tests - assertNull( exec("int[] a = null; return a?.length")); // Length (boxed) - assertEquals(2, exec("int[] a = new int[] {2, 3}; return a?.length")); // Length (boxed) - - // Def - assertNull( exec("def a = null; return a?.getX()")); // Invoke - assertNull( exec("def a = null; return a?.x")); // Read shortcut - assertEquals(0, exec("def a = new org.elasticsearch.painless.FeatureTest(); return a?.getX()")); // Invoke - assertEquals(0, exec("def a = new org.elasticsearch.painless.FeatureTest(); return a?.x")); // Read shortcut + assertMustBeNullable("int[] a = null; return a?.length"); + assertMustBeNullable("int[] a = new int[] {2, 3}; return a?.length"); + assertNull( exec("def a = null; return a?.length")); + assertEquals(2, exec("def a = new int[] {2, 3}; return a?.length")); // Results from maps (should just work but let's test anyway) FeatureTest t = new FeatureTest(); @@ -193,7 +210,7 @@ public class BasicExpressionTests extends ScriptTestCase { assertEquals(0, exec("def a = ['other': ['cat': params.t]]; return a.other?.cat?.getX()", singletonMap("t", t), true)); assertEquals(0, exec("def a = ['other': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true)); - // Check that we don't try to cast when the LHS doesn't provide an "expected" value + // Assignments assertNull(exec( "def a = [:];\n" + "a.missing_length = a.missing?.length();\n" @@ -214,4 +231,9 @@ public class BasicExpressionTests extends ScriptTestCase { // assertEquals(null, exec("Map a = ['thing': 'bar']; a.other?.cat?.dog = 'wombat'; return a.other?.cat?.dog")); // assertEquals(null, exec("def a = ['thing': 'bar']; a.other?.cat?.dog = 'wombat'; return a.other?.cat?.dog")); } + + private void assertMustBeNullable(String script) { + Exception e = expectScriptThrows(IllegalArgumentException.class , () -> exec(script)); + assertEquals("Result of null safe operator must be nullable", e.getMessage()); + } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ElvisTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ElvisTests.java new file mode 100644 index 00000000000..44859852240 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ElvisTests.java @@ -0,0 +1,143 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThan; + +/** + * Tests for the Elvis operator ({@code ?:}). + */ +public class ElvisTests extends ScriptTestCase { + public void testBasics() { + // Basics + assertEquals("str", exec("return params.a ?: 'str'")); + assertEquals("str", exec("return params.a ?: 'str2'", singletonMap("a", "str"), true)); + assertEquals("str", exec("return params.a ?: 'asdf'", singletonMap("a", "str"), true)); + + // Assigning to a primitive + assertCannotReturnPrimitive("int i = params.a ?: 1; return i"); + assertCannotReturnPrimitive("Integer a = Integer.valueOf(1); int b = a ?: 2; return b"); + assertCannotReturnPrimitive("Integer a = Integer.valueOf(1); int b = a ?: Integer.valueOf(2); return b"); + + // Assigning to an object + assertEquals(1, exec("Integer i = params.a ?: Integer.valueOf(1); return i")); + assertEquals(1, exec("Integer i = params.a ?: Integer.valueOf(2); return i", singletonMap("a", 1), true)); + assertEquals(1, exec("Integer a = Integer.valueOf(1); Integer b = a ?: Integer.valueOf(2); return b")); + assertEquals(2, exec("Integer a = null; Integer b = a ?: Integer.valueOf(2); return b")); + + // Explicit casting + assertEquals(1, exec("return (Integer)(params.a ?: Integer.valueOf(1))")); + assertEquals(1, exec("return (Integer)(params.a ?: Integer.valueOf(2))", singletonMap("a", 1), true)); + assertCannotReturnPrimitive("return (int)(params.a ?: 1)"); + + // Now some chains + assertEquals(1, exec("return params.a ?: params.a ?: 1")); + assertEquals(1, exec("return params.a ?: params.b ?: 'j'", singletonMap("b", 1), true)); + assertEquals(1, exec("return params.a ?: params.b ?: 'j'", singletonMap("a", 1), true)); + + // Precedence + assertEquals(1, exec("return params.a ?: 2 + 2", singletonMap("a", 1), true)); + assertEquals(4, exec("return params.a ?: 2 + 2")); + assertEquals(2, exec("return params.a + 1 ?: 2 + 2", singletonMap("a", 1), true)); // Yes, this is silly, but it should be valid + + // Weird casts + assertEquals(1, exec("int i = params.i; String s = params.s; return s ?: i", singletonMap("i", 1), true)); + assertEquals("str", exec("Integer i = params.i; String s = params.s; return s ?: i", singletonMap("s", "str"), true)); + + // Combining + assertEquals(2, exec("return (params.a ?: 0) + 1", singletonMap("a", 1), true)); + assertEquals(1, exec("return (params.a ?: 0) + 1")); + assertEquals(2, exec("return (params.a ?: ['b': 10]).b + 1", singletonMap("a", singletonMap("b", 1)), true)); + assertEquals(11, exec("return (params.a ?: ['b': 10]).b + 1")); + } + + public void testWithNullSafeDereferences() { + assertEquals(1, exec("return params.a?.b ?: 1")); + assertEquals(1, exec("return params.a?.b ?: 2", singletonMap("a", singletonMap("b", 1)), true)); + + // TODO This could be expanded to allow primitives where neither of the two operations allow them alone + } + + public void testLazy() { + assertEquals(1, exec("def fail() {throw new RuntimeException('test')} return params.a ?: fail()", singletonMap("a", 1), true)); + Exception e = expectScriptThrows(RuntimeException.class, () -> + exec("def fail() {throw new RuntimeException('test')} return params.a ?: fail()")); + assertEquals(e.getMessage(), "test"); + } + + /** + * Checks that {@code a ?: b ?: c} is be parsed as {@code a ?: (b ?: c)} instead of {@code (a ?: b) ?: c} which is nice because the + * first one only needs one comparison if the {@code a} is non-null while the second one needs two. + */ + public void testRightAssociative() { + checkOneBranch("params.a ?: (params.b ?: params.c)", true); + checkOneBranch("(params.a ?: params.b) ?: params.c", false); + checkOneBranch("params.a ?: params.b ?: params.c", true); + } + + private void checkOneBranch(String code, boolean expectOneBranch) { + /* Sadly this is a super finicky about the output of the disassembly but I think it is worth having because it makes sure that + * the code generated for the elvis operator is as efficient as possible. */ + String disassembled = Debugger.toString(code); + int firstLookup = disassembled.indexOf("INVOKEINTERFACE java/util/Map.get (Ljava/lang/Object;)Ljava/lang/Object;"); + assertThat(disassembled, firstLookup, greaterThan(-1)); + int firstElvisDestinationLabelIndex = disassembled.indexOf("IFNONNULL L", firstLookup); + assertThat(disassembled, firstElvisDestinationLabelIndex, greaterThan(-1)); + String firstElvisDestinationLabel = disassembled.substring(firstElvisDestinationLabelIndex + "IFNONNULL ".length(), + disassembled.indexOf('\n', firstElvisDestinationLabelIndex)); + int firstElvisDestionation = disassembled.indexOf(" " + firstElvisDestinationLabel); + assertThat(disassembled, firstElvisDestionation, greaterThan(-1)); + int ifAfterFirstElvisDestination = disassembled.indexOf("IF", firstElvisDestionation); + if (expectOneBranch) { + assertThat(disassembled, ifAfterFirstElvisDestination, lessThan(0)); + } else { + assertThat(disassembled, ifAfterFirstElvisDestination, greaterThan(-1)); + } + int returnAfterFirstElvisDestination = disassembled.indexOf("RETURN", firstElvisDestionation); + assertThat(disassembled, returnAfterFirstElvisDestination, greaterThan(-1)); + } + + public void testExtraneous() { + Exception e = expectScriptThrows(IllegalArgumentException.class, () -> exec("int i = params.a; return i ?: 1")); + assertEquals("Extraneous elvis operator. LHS is a primitive.", e.getMessage()); + expectScriptThrows(IllegalArgumentException.class, () -> exec("int i = params.a; return i + 10 ?: 'ignored'")); + assertEquals("Extraneous elvis operator. LHS is a primitive.", e.getMessage()); + e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return 'cat' ?: 1")); + assertEquals("Extraneous elvis operator. LHS is a constant.", e.getMessage()); + e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return null ?: 'j'")); + assertEquals("Extraneous elvis operator. LHS is null.", e.getMessage()); + e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return params.a ?: null ?: 'j'")); + assertEquals("Extraneous elvis operator. LHS is null.", e.getMessage()); + e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return params.a ?: null")); + assertEquals("Extraneous elvis operator. RHS is null.", e.getMessage()); + } + + public void testQuestionSpaceColonIsNotElvis() { + Exception e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return params.a ? : 1", false)); + assertEquals("invalid sequence of tokens near [':'].", e.getMessage()); + } + + private void assertCannotReturnPrimitive(String script) { + Exception e = expectScriptThrows(IllegalArgumentException.class, () -> exec(script)); + assertEquals("Evlis operator cannot return primitives", e.getMessage()); + } +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java index 664394a6d81..f23b13341cc 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java @@ -261,4 +261,10 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return -92233720368547758070")); assertEquals("Invalid int constant [-92233720368547758070].", e.getMessage()); } + + public void testQuestionSpaceDotIsNotNullSafeDereference() { + Exception e = expectScriptThrows(IllegalArgumentException.class, () -> exec("return params.a? .b", false)); + assertEquals("invalid sequence of tokens near ['.'].", e.getMessage()); + } + } diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml index 01f3048300b..f626a77bf93 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml @@ -68,7 +68,7 @@ setup: script_fields: bar: script: - inline: "doc['foo'].value?.length() + params.x;" + inline: "(doc['foo'].value?.length() ?: 0) + params.x;" params: x: 5 @@ -83,7 +83,7 @@ setup: script_fields: bar: script: - inline: "(doc['missing'].value?.length() == null ? 0 : doc['missing'].value?.length()) + params.x;" + inline: "(doc['missing'].value?.length() ?: 0) + params.x;" params: x: 5 From aed88fe7a2a5e9dd2cde6c20963b231a36875fa2 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 19 Nov 2016 06:27:25 -0500 Subject: [PATCH 19/50] Log node ID on startup If the node name is explicitly set it's not derived from the node ID meaning that it doesn't immediately appear in the logs. While it can be tracked down in other places, it would be easier for info purposes if it just showed up explicitly. This commit adds the node ID to the logs, whether or not the node name is set. Relates #21673 --- core/src/main/java/org/elasticsearch/node/Node.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 1268a115de1..3be9f757125 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -252,11 +252,15 @@ public class Node implements Closeable { } final boolean hadPredefinedNodeName = NODE_NAME_SETTING.exists(tmpSettings); - tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeEnvironment.nodeId()); Logger logger = Loggers.getLogger(Node.class, tmpSettings); + final String nodeId = nodeEnvironment.nodeId(); + tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeId); + // this must be captured after the node name is possibly added to the settings + final String nodeName = NODE_NAME_SETTING.get(tmpSettings); if (hadPredefinedNodeName == false) { - logger.info("node name [{}] derived from node ID; set [{}] to override", - NODE_NAME_SETTING.get(tmpSettings), NODE_NAME_SETTING.getKey()); + logger.info("node name [{}] derived from node ID [{}]; set [{}] to override", nodeName, nodeId, NODE_NAME_SETTING.getKey()); + } else { + logger.info("node name [{}], node ID [{}]", nodeName, nodeId); } final JvmInfo jvmInfo = JvmInfo.jvmInfo(); @@ -319,7 +323,7 @@ public class Node implements Closeable { final ClusterService clusterService = new ClusterService(settings, settingsModule.getClusterSettings(), threadPool); clusterService.add(scriptModule.getScriptService()); resourcesToClose.add(clusterService); - final TribeService tribeService = new TribeService(settings, clusterService, nodeEnvironment.nodeId(), + final TribeService tribeService = new TribeService(settings, clusterService, nodeId, s -> newTribeClientNode(s, classpathPlugins)); resourcesToClose.add(tribeService); final IngestService ingestService = new IngestService(settings, threadPool, this.environment, From dce51e2062ef100e1bdba2825b814044d3384a95 Mon Sep 17 00:00:00 2001 From: Shubham Aggarwal Date: Sat, 19 Nov 2016 17:15:00 +0530 Subject: [PATCH 20/50] Update getting-started.asciidoc (#21677) --- docs/reference/getting-started.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index e9e3af44d86..26d0f6a6f83 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -201,7 +201,7 @@ Let's start with a basic health check, which we can use to see how our cluster i To check the cluster health, we will be using the <>. You can run the command below in https://www.elastic.co/guide/en/kibana/{branch}/console-kibana.html[Kibana's Console] by clicking "VIEW IN CONSOLE" or with `curl` by clicking the "COPY AS CURL" -link below and pasting the into a terminal. +link below and pasting it into a terminal. [source,js] -------------------------------------------------- From 655c4fe17294b51d567add4bccfd6ad6cdfe9c5e Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 19 Nov 2016 07:05:39 -0500 Subject: [PATCH 21/50] Wrap GroovyBugErrors in ScriptExceptions When Groovy detects a bug in its runtime because an internal assertion was violated, it throws an GroovyBugError. This descends from AssertionError and if it goes uncaught will land in the uncaught exception handler and will not deliver any useful information to the user. This commit wraps GroovyBugErrors in ScriptExceptions so that useful information is returned to the user. --- .../script/groovy/GroovyScriptEngineService.java | 16 +++++++++++----- .../script/groovy/GroovySecurityTests.java | 8 ++++++++ 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 0cd8976c76c..7ce05f82038 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -28,6 +28,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; +import org.codehaus.groovy.GroovyBugError; import org.codehaus.groovy.ast.ClassCodeExpressionTransformer; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.ast.expr.ConstantExpression; @@ -67,6 +68,7 @@ import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -302,20 +304,24 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri // NOTE: we truncate the stack because IndyInterface has security issue (needs getClassLoader) // we don't do a security check just as a tradeoff, it cannot really escalate to anything. return AccessController.doPrivileged((PrivilegedAction) script::run); - } catch (AssertionError ae) { + } catch (final AssertionError ae) { + if (ae instanceof GroovyBugError) { + // we encountered a bug in Groovy; we wrap this so it does not go to the uncaught exception handler and tear us down + final String message = "encountered bug in Groovy while executing script [" + compiledScript.name() + "]"; + throw new ScriptException(message, ae, Collections.emptyList(), compiledScript.toString(), compiledScript.lang()); + } // Groovy asserts are not java asserts, and cannot be disabled, so we do a best-effort trying to determine if this is a // Groovy assert (in which case we wrap it and throw), or a real Java assert, in which case we rethrow it as-is, likely // resulting in the uncaughtExceptionHandler handling it. final StackTraceElement[] elements = ae.getStackTrace(); if (elements.length > 0 && "org.codehaus.groovy.runtime.InvokerHelper".equals(elements[0].getClassName())) { - logger.trace((Supplier) () -> new ParameterizedMessage("failed to run {}", compiledScript), ae); - throw new ScriptException("Error evaluating " + compiledScript.name(), - ae, emptyList(), "", compiledScript.lang()); + logger.debug((Supplier) () -> new ParameterizedMessage("failed to run {}", compiledScript), ae); + throw new ScriptException("error evaluating " + compiledScript.name(), ae, emptyList(), "", compiledScript.lang()); } throw ae; } catch (Exception | NoClassDefFoundError e) { logger.trace((Supplier) () -> new ParameterizedMessage("failed to run {}", compiledScript), e); - throw new ScriptException("Error evaluating " + compiledScript.name(), e, emptyList(), "", compiledScript.lang()); + throw new ScriptException("error evaluating " + compiledScript.name(), e, emptyList(), "", compiledScript.lang()); } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java index 1ac31a70589..ce3f6ea3c88 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java @@ -130,6 +130,14 @@ public class GroovySecurityTests extends ESTestCase { assertFailure("def foo=false; assert foo, \"msg2\";", NoClassDefFoundError.class); } + public void testGroovyBugError() { + // this script throws a GroovyBugError because our security manager permissions prevent Groovy from accessing this private field + // and Groovy does not handle it gracefully; this test will likely start failing if the bug is fixed upstream so that a + // GroovyBugError no longer surfaces here in which case the script should be replaced with another script that intentionally + // surfaces a GroovyBugError + assertFailure("[1, 2].size", AssertionError.class); + } + /** runs a script */ private void doTest(String script) { Map vars = new HashMap(); From 9322c4fe6257e062de2378b71b12e7271ceddf4f Mon Sep 17 00:00:00 2001 From: Jeffery Bradberry Date: Sat, 19 Nov 2016 10:47:39 -0500 Subject: [PATCH 22/50] The alias is switched to point to the new index (#21512) If the index satisfies the specified conditions then a new index is created and the alias is switched to point to the new index. --- docs/reference/indices/rollover-index.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index 20ffdd44b30..db78104be12 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -7,7 +7,7 @@ index is considered to be too large or too old. The API accepts a single alias name and a list of `conditions`. The alias must point to a single index only. If the index satisfies the specified conditions then a new index is created and the alias is switched to point to -the new alias. +the new index. [source,js] From 366241508efe199a460c71e9a168e2739b1e08ec Mon Sep 17 00:00:00 2001 From: Ludovic Dubost Date: Sat, 19 Nov 2016 16:51:05 +0100 Subject: [PATCH 23/50] Adding XWiki Elasticsearch Macro (#21505) Added link to Elasticsearch Macro with description --- docs/plugins/integrations.asciidoc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/plugins/integrations.asciidoc b/docs/plugins/integrations.asciidoc index 503f1274d81..9bf0a4c0458 100644 --- a/docs/plugins/integrations.asciidoc +++ b/docs/plugins/integrations.asciidoc @@ -27,7 +27,10 @@ Integrations are not plugins, but are external tools or modules that make it eas Tiki has native support for Elasticsearch. This provides faster & better search (facets, etc), along with some Natural Language Processing features (ex.: More like this) - + +* http://extensions.xwiki.org/xwiki/bin/view/Extension/Elastic+Search+Macro/[XWiki Next Generation Wiki]: + XWiki has an Elasticsearch and Kibana macro allowing to run Elasticsearch queries and display the results in XWiki pages using XWiki's scripting language as well as include Kibana Widgets in XWiki pages + [float] [[data-integrations]] === Data import/export and validation From b19c606cef37a6ecdb0b476ae73a0fa8ed5cef8a Mon Sep 17 00:00:00 2001 From: javanna Date: Sat, 19 Nov 2016 15:22:21 +0100 Subject: [PATCH 24/50] Remove minNodeVersion and corresponding public `getSmallestVersion` getter method from DiscoveryNodes --- .../cluster/node/DiscoveryNodes.java | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 895195d35b3..76c8e9a0e52 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -56,19 +56,17 @@ public class DiscoveryNodes extends AbstractDiffable implements private final String masterNodeId; private final String localNodeId; - private final Version minNodeVersion; private final Version minNonClientNodeVersion; private DiscoveryNodes(ImmutableOpenMap nodes, ImmutableOpenMap dataNodes, ImmutableOpenMap masterNodes, ImmutableOpenMap ingestNodes, - String masterNodeId, String localNodeId, Version minNodeVersion, Version minNonClientNodeVersion) { + String masterNodeId, String localNodeId, Version minNonClientNodeVersion) { this.nodes = nodes; this.dataNodes = dataNodes; this.masterNodes = masterNodes; this.ingestNodes = ingestNodes; this.masterNodeId = masterNodeId; this.localNodeId = localNodeId; - this.minNodeVersion = minNodeVersion; this.minNonClientNodeVersion = minNonClientNodeVersion; } @@ -173,7 +171,6 @@ public class DiscoveryNodes extends AbstractDiffable implements return existing != null && existing.equals(node); } - /** * Get the id of the master node * @@ -230,16 +227,6 @@ public class DiscoveryNodes extends AbstractDiffable implements return nodesIds == null || nodesIds.length == 0 || (nodesIds.length == 1 && nodesIds[0].equals("_all")); } - - /** - * Returns the version of the node with the oldest version in the cluster - * - * @return the oldest version in the cluster - */ - public Version getSmallestVersion() { - return minNodeVersion; - } - /** * Returns the version of the node with the oldest version in the cluster that is not a client node * @@ -677,7 +664,7 @@ public class DiscoveryNodes extends AbstractDiffable implements return new DiscoveryNodes( nodes.build(), dataNodesBuilder.build(), masterNodesBuilder.build(), ingestNodesBuilder.build(), - masterNodeId, localNodeId, minNodeVersion, minNonClientNodeVersion + masterNodeId, localNodeId, minNonClientNodeVersion ); } From 596eebcf980c40f37bef1bc1b8725fcc403218c7 Mon Sep 17 00:00:00 2001 From: javanna Date: Sat, 19 Nov 2016 15:23:13 +0100 Subject: [PATCH 25/50] Remove unused DiscoveryNode#removeDeadMembers public method --- .../elasticsearch/cluster/node/DiscoveryNodes.java | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 76c8e9a0e52..5e1450c9198 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -38,7 +38,6 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Set; /** * This class holds all {@link DiscoveryNode} in the cluster and provides convenience methods to @@ -340,16 +339,6 @@ public class DiscoveryNodes extends AbstractDiffable implements } } - public DiscoveryNodes removeDeadMembers(Set newNodes, String masterNodeId) { - Builder builder = new Builder().masterNodeId(masterNodeId).localNodeId(localNodeId); - for (DiscoveryNode node : this) { - if (newNodes.contains(node.getId())) { - builder.add(node); - } - } - return builder.build(); - } - public DiscoveryNodes newNode(DiscoveryNode node) { return new Builder(this).add(node).build(); } From e0661c52627b0fa4ceeb7d6c7c8381560c169154 Mon Sep 17 00:00:00 2001 From: javanna Date: Sat, 19 Nov 2016 15:24:08 +0100 Subject: [PATCH 26/50] Remove unused DiscoveryNodes.Delta constructor --- .../java/org/elasticsearch/cluster/node/DiscoveryNodes.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 5e1450c9198..3d56e95563b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -396,10 +396,6 @@ public class DiscoveryNodes extends AbstractDiffable implements private final List removed; private final List added; - public Delta(String localNodeId, List removed, List added) { - this(null, null, localNodeId, removed, added); - } - public Delta(@Nullable DiscoveryNode previousMasterNode, @Nullable DiscoveryNode newMasterNode, String localNodeId, List removed, List added) { this.previousMasterNode = previousMasterNode; From 9594b6f50f8130fdbae3afd7d3201fdd45cd0969 Mon Sep 17 00:00:00 2001 From: javanna Date: Sat, 19 Nov 2016 15:25:26 +0100 Subject: [PATCH 27/50] adjust visibility of DiscoveryNodes.Delta constructor It can be private as it gets called by DiscoveryNodes#delta method, which is supposed to be the only way to create a Delta --- .../java/org/elasticsearch/cluster/node/DiscoveryNodes.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 3d56e95563b..e557b52c1d4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -396,7 +396,7 @@ public class DiscoveryNodes extends AbstractDiffable implements private final List removed; private final List added; - public Delta(@Nullable DiscoveryNode previousMasterNode, @Nullable DiscoveryNode newMasterNode, String localNodeId, + private Delta(@Nullable DiscoveryNode previousMasterNode, @Nullable DiscoveryNode newMasterNode, String localNodeId, List removed, List added) { this.previousMasterNode = previousMasterNode; this.newMasterNode = newMasterNode; From 6daeb569697bd63cf4e0299cc1aa4c683eea3d00 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Mon, 21 Nov 2016 09:20:09 +0000 Subject: [PATCH 28/50] Set execute permissions for native plugin programs (#21657) --- .../plugins/InstallPluginCommand.java | 20 ++++++++++--- .../plugins/InstallPluginCommandTests.java | 28 +++++++++++++++++++ 2 files changed, 44 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 43af643854e..bbf783d4e85 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -47,9 +47,12 @@ import java.net.URLConnection; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; +import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; +import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; @@ -493,15 +496,24 @@ class InstallPluginCommand extends SettingCommand { } Files.move(tmpRoot, destination, StandardCopyOption.ATOMIC_MOVE); - try (DirectoryStream stream = Files.newDirectoryStream(destination)) { - for (Path pluginFile : stream) { + Files.walkFileTree(destination, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path pluginFile, BasicFileAttributes attrs) throws IOException { if (Files.isDirectory(pluginFile)) { setFileAttributes(pluginFile, PLUGIN_DIR_PERMS); } else { - setFileAttributes(pluginFile, PLUGIN_FILES_PERMS); + // There can also be "bin" directories under the plugin directory, storing native code executables + Path parentDir = pluginFile.getParent().getFileName(); + if ("bin".equals(parentDir.toString())) { + setFileAttributes(pluginFile, BIN_FILES_PERMS); + } else { + setFileAttributes(pluginFile, PLUGIN_FILES_PERMS); + } } + return FileVisitResult.CONTINUE; } - } + }); + terminal.println("-> Installed " + info.getName()); } catch (Exception installProblem) { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 3bed3350f0f..08ce0083ec2 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -477,6 +477,34 @@ public class InstallPluginCommandTests extends ESTestCase { } } + public void testPlatformBinPermissions() throws Exception { + assumeTrue("posix filesystem", isPosix); + Tuple env = createEnv(fs, temp); + Path pluginDir = createPluginDir(temp); + Path platformDir = pluginDir.resolve("platform"); + Path platformNameDir = platformDir.resolve("linux-x86_64"); + Path platformBinDir = platformNameDir.resolve("bin"); + Files.createDirectories(platformBinDir); + Path programFile = Files.createFile(platformBinDir.resolve("someprogram")); + // a file created with Files.createFile() should not have execute permissions + Set sourcePerms = Files.getPosixFilePermissions(programFile); + assertFalse(sourcePerms.contains(PosixFilePermission.OWNER_EXECUTE)); + assertFalse(sourcePerms.contains(PosixFilePermission.GROUP_EXECUTE)); + assertFalse(sourcePerms.contains(PosixFilePermission.OTHERS_EXECUTE)); + String pluginZip = createPlugin("fake", pluginDir); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); + // check that the installed program has execute permissions, even though the one added to the plugin didn't + Path installedPlatformBinDir = env.v2().pluginsFile().resolve("fake").resolve("platform").resolve("linux-x86_64").resolve("bin"); + assertTrue(Files.isDirectory(installedPlatformBinDir)); + Path installedProgramFile = installedPlatformBinDir.resolve("someprogram"); + assertTrue(Files.isRegularFile(installedProgramFile)); + Set installedPerms = Files.getPosixFilePermissions(installedProgramFile); + assertTrue(installedPerms.contains(PosixFilePermission.OWNER_EXECUTE)); + assertTrue(installedPerms.contains(PosixFilePermission.GROUP_EXECUTE)); + assertTrue(installedPerms.contains(PosixFilePermission.OTHERS_EXECUTE)); + } + public void testConfig() throws Exception { Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); From 90247446aa5ec3da2d8b4760049183862ea6b40d Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 21 Nov 2016 10:29:30 +0100 Subject: [PATCH 29/50] Fix highlighting on a stored keyword field (#21645) * Fix highlighting on a stored keyword field The highlighter converts stored keyword fields using toString(). Since the keyword fields are stored as utf8 bytes the conversion is broken. This change uses BytesRef.utf8toString() to convert the field value in a valid string. Fixes #21636 * Replace BytesRef#utf8ToString with MappedFieldType#valueForDisplay --- .../subphase/highlight/PlainHighlighter.java | 8 +++++- .../highlight/HighlighterSearchIT.java | 25 +++++++++++++++++-- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index e821b0fd9a8..127a008f9cc 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -31,6 +31,7 @@ import org.apache.lucene.search.highlight.SimpleFragmenter; import org.apache.lucene.search.highlight.SimpleHTMLFormatter; import org.apache.lucene.search.highlight.SimpleSpanFragmenter; import org.apache.lucene.search.highlight.TextFragment; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; @@ -106,7 +107,12 @@ public class PlainHighlighter implements Highlighter { textsToHighlight = HighlightUtils.loadFieldValues(field, mapper, context, hitContext); for (Object textToHighlight : textsToHighlight) { - String text = textToHighlight.toString(); + String text; + if (textToHighlight instanceof BytesRef) { + text = mapper.fieldType().valueForDisplay(textToHighlight).toString(); + } else { + text = textToHighlight.toString(); + } try (TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().name(), text)) { if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index ed63ea1ea1c..8f8887bd150 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.geo.GeoPoint; @@ -41,7 +40,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.search.MatchQuery; -import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; @@ -106,6 +104,29 @@ public class HighlighterSearchIT extends ESIntegTestCase { return Collections.singletonList(InternalSettingsPlugin.class); } + public void testHighlightingWithStoredKeyword() throws IOException { + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("properties") + .startObject("text") + .field("type", "keyword") + .field("store", true) + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings)); + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("text", "foo").endObject()) + .get(); + refresh(); + SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "foo")) + .highlighter(new HighlightBuilder().field(new Field("text"))).get(); + assertHighlight(search, 0, "text", 0, equalTo("foo")); + } + public void testHighlightingWithWildcardName() throws IOException { // test the kibana case with * as fieldname that will try highlight all fields including meta fields XContentBuilder mappings = jsonBuilder(); From 23d5293f827fe521f892a1b76a65b58751cc84c5 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Mon, 21 Nov 2016 10:41:08 +0100 Subject: [PATCH 30/50] Fix integer overflows when dealing with templates. (#21628) The overflows were happening in two places, the parsing of the template that implicitly truncates the `order` when its value does not fall into the `integer` range, and the comparator that sorts templates in ascending order, since it returns `order2-order1`, which might overflow. Closes #21622 --- .../metadata/MetaDataCreateIndexService.java | 7 +- .../org/elasticsearch/common/Numbers.java | 55 +++++++ .../xcontent/support/XContentMapValues.java | 14 +- .../elasticsearch/common/NumbersTests.java | 146 ++++++++++++++++++ 4 files changed, 208 insertions(+), 14 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/common/NumbersTests.java diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 0471ef1c09b..c19dcdd0ecb 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -474,12 +474,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { } } - CollectionUtil.timSort(templateMetadata, new Comparator() { - @Override - public int compare(IndexTemplateMetaData o1, IndexTemplateMetaData o2) { - return o2.order() - o1.order(); - } - }); + CollectionUtil.timSort(templateMetadata, Comparator.comparingInt(IndexTemplateMetaData::order).reversed()); return templateMetadata; } diff --git a/core/src/main/java/org/elasticsearch/common/Numbers.java b/core/src/main/java/org/elasticsearch/common/Numbers.java index 52d0337ef73..1735a0dfa65 100644 --- a/core/src/main/java/org/elasticsearch/common/Numbers.java +++ b/core/src/main/java/org/elasticsearch/common/Numbers.java @@ -21,6 +21,9 @@ package org.elasticsearch.common; import org.apache.lucene.util.BytesRef; +import java.math.BigDecimal; +import java.math.BigInteger; + /** * A set of utilities for numbers. */ @@ -178,4 +181,56 @@ public final class Numbers { } return true; } + + /** Return the long that {@code n} stores, or throws an exception if the + * stored value cannot be converted to a long that stores the exact same + * value. */ + public static long toLongExact(Number n) { + if (n instanceof Byte || n instanceof Short || n instanceof Integer + || n instanceof Long) { + return n.longValue(); + } else if (n instanceof Float || n instanceof Double) { + double d = n.doubleValue(); + if (d != Math.round(d)) { + throw new IllegalArgumentException(n + " is not an integer value"); + } + return n.longValue(); + } else if (n instanceof BigDecimal) { + return ((BigDecimal) n).toBigIntegerExact().longValueExact(); + } else if (n instanceof BigInteger) { + return ((BigInteger) n).longValueExact(); + } else { + throw new IllegalArgumentException("Cannot check whether [" + n + "] of class [" + n.getClass().getName() + + "] is actually a long"); + } + } + + /** Return the int that {@code n} stores, or throws an exception if the + * stored value cannot be converted to an int that stores the exact same + * value. */ + public static int toIntExact(Number n) { + return Math.toIntExact(toLongExact(n)); + } + + /** Return the short that {@code n} stores, or throws an exception if the + * stored value cannot be converted to a short that stores the exact same + * value. */ + public static short toShortExact(Number n) { + long l = toLongExact(n); + if (l != (short) l) { + throw new ArithmeticException("short overflow: " + l); + } + return (short) l; + } + + /** Return the byte that {@code n} stores, or throws an exception if the + * stored value cannot be converted to a byte that stores the exact same + * value. */ + public static byte toByteExact(Number n) { + long l = toLongExact(n); + if (l != (byte) l) { + throw new ArithmeticException("byte overflow: " + l); + } + return (byte) l; + } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index a94bf63e270..a1affb4fe57 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -24,6 +24,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Numbers; import org.elasticsearch.common.Strings; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.TimeValue; @@ -357,7 +358,7 @@ public class XContentMapValues { public static int nodeIntegerValue(Object node) { if (node instanceof Number) { - return ((Number) node).intValue(); + return Numbers.toIntExact((Number) node); } return Integer.parseInt(node.toString()); } @@ -366,10 +367,7 @@ public class XContentMapValues { if (node == null) { return defaultValue; } - if (node instanceof Number) { - return ((Number) node).intValue(); - } - return Integer.parseInt(node.toString()); + return nodeIntegerValue(node); } public static short nodeShortValue(Object node, short defaultValue) { @@ -381,7 +379,7 @@ public class XContentMapValues { public static short nodeShortValue(Object node) { if (node instanceof Number) { - return ((Number) node).shortValue(); + return Numbers.toShortExact((Number) node); } return Short.parseShort(node.toString()); } @@ -395,7 +393,7 @@ public class XContentMapValues { public static byte nodeByteValue(Object node) { if (node instanceof Number) { - return ((Number) node).byteValue(); + return Numbers.toByteExact((Number) node); } return Byte.parseByte(node.toString()); } @@ -409,7 +407,7 @@ public class XContentMapValues { public static long nodeLongValue(Object node) { if (node instanceof Number) { - return ((Number) node).longValue(); + return Numbers.toLongExact((Number) node); } return Long.parseLong(node.toString()); } diff --git a/core/src/test/java/org/elasticsearch/common/NumbersTests.java b/core/src/test/java/org/elasticsearch/common/NumbersTests.java new file mode 100644 index 00000000000..e5563993ad5 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/NumbersTests.java @@ -0,0 +1,146 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common; + +import org.elasticsearch.test.ESTestCase; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.concurrent.atomic.AtomicInteger; + +public class NumbersTests extends ESTestCase { + + public void testToLongExact() { + assertEquals(3L, Numbers.toLongExact(Long.valueOf(3L))); + assertEquals(3L, Numbers.toLongExact(Integer.valueOf(3))); + assertEquals(3L, Numbers.toLongExact(Short.valueOf((short) 3))); + assertEquals(3L, Numbers.toLongExact(Byte.valueOf((byte) 3))); + assertEquals(3L, Numbers.toLongExact(3d)); + assertEquals(3L, Numbers.toLongExact(3f)); + assertEquals(3L, Numbers.toLongExact(BigInteger.valueOf(3L))); + assertEquals(3L, Numbers.toLongExact(BigDecimal.valueOf(3L))); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(3.1d)); + assertEquals("3.1 is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(Double.NaN)); + assertEquals("NaN is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(Double.POSITIVE_INFINITY)); + assertEquals("Infinity is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(3.1f)); + assertEquals("3.1 is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(new AtomicInteger(3))); // not supported + assertEquals("Cannot check whether [3] of class [java.util.concurrent.atomic.AtomicInteger] is actually a long", e.getMessage()); + } + + public void testToIntExact() { + assertEquals(3L, Numbers.toIntExact(Long.valueOf(3L))); + assertEquals(3L, Numbers.toIntExact(Integer.valueOf(3))); + assertEquals(3L, Numbers.toIntExact(Short.valueOf((short) 3))); + assertEquals(3L, Numbers.toIntExact(Byte.valueOf((byte) 3))); + assertEquals(3L, Numbers.toIntExact(3d)); + assertEquals(3L, Numbers.toIntExact(3f)); + assertEquals(3L, Numbers.toIntExact(BigInteger.valueOf(3L))); + assertEquals(3L, Numbers.toIntExact(BigDecimal.valueOf(3L))); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toIntExact(3.1d)); + assertEquals("3.1 is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(Double.NaN)); + assertEquals("NaN is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(Double.POSITIVE_INFINITY)); + assertEquals("Infinity is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toIntExact(3.1f)); + assertEquals("3.1 is not an integer value", e.getMessage()); + ArithmeticException ae = expectThrows(ArithmeticException.class, + () -> Numbers.toIntExact(1L << 40)); + assertEquals("integer overflow", ae.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toIntExact(new AtomicInteger(3))); // not supported + assertEquals("Cannot check whether [3] of class [java.util.concurrent.atomic.AtomicInteger] is actually a long", e.getMessage()); + } + + public void testToShortExact() { + assertEquals(3L, Numbers.toShortExact(Long.valueOf(3L))); + assertEquals(3L, Numbers.toShortExact(Integer.valueOf(3))); + assertEquals(3L, Numbers.toShortExact(Short.valueOf((short) 3))); + assertEquals(3L, Numbers.toShortExact(Byte.valueOf((byte) 3))); + assertEquals(3L, Numbers.toShortExact(3d)); + assertEquals(3L, Numbers.toShortExact(3f)); + assertEquals(3L, Numbers.toShortExact(BigInteger.valueOf(3L))); + assertEquals(3L, Numbers.toShortExact(BigDecimal.valueOf(3L))); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toShortExact(3.1d)); + assertEquals("3.1 is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(Double.NaN)); + assertEquals("NaN is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(Double.POSITIVE_INFINITY)); + assertEquals("Infinity is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toShortExact(3.1f)); + assertEquals("3.1 is not an integer value", e.getMessage()); + ArithmeticException ae = expectThrows(ArithmeticException.class, + () -> Numbers.toShortExact(100000)); + assertEquals("short overflow: " + 100000, ae.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toShortExact(new AtomicInteger(3))); // not supported + assertEquals("Cannot check whether [3] of class [java.util.concurrent.atomic.AtomicInteger] is actually a long", e.getMessage()); + } + + public void testToByteExact() { + assertEquals(3L, Numbers.toByteExact(Long.valueOf(3L))); + assertEquals(3L, Numbers.toByteExact(Integer.valueOf(3))); + assertEquals(3L, Numbers.toByteExact(Short.valueOf((short) 3))); + assertEquals(3L, Numbers.toByteExact(Byte.valueOf((byte) 3))); + assertEquals(3L, Numbers.toByteExact(3d)); + assertEquals(3L, Numbers.toByteExact(3f)); + assertEquals(3L, Numbers.toByteExact(BigInteger.valueOf(3L))); + assertEquals(3L, Numbers.toByteExact(BigDecimal.valueOf(3L))); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toByteExact(3.1d)); + assertEquals("3.1 is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(Double.NaN)); + assertEquals("NaN is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toLongExact(Double.POSITIVE_INFINITY)); + assertEquals("Infinity is not an integer value", e.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toByteExact(3.1f)); + assertEquals("3.1 is not an integer value", e.getMessage()); + ArithmeticException ae = expectThrows(ArithmeticException.class, + () -> Numbers.toByteExact(300)); + assertEquals("byte overflow: " + 300, ae.getMessage()); + e = expectThrows(IllegalArgumentException.class, + () -> Numbers.toByteExact(new AtomicInteger(3))); // not supported + assertEquals("Cannot check whether [3] of class [java.util.concurrent.atomic.AtomicInteger] is actually a long", e.getMessage()); + } +} From d913242ca1d1b62d759d53805bd6b416ba5251a8 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 21 Nov 2016 10:47:50 +0100 Subject: [PATCH 31/50] Use a buffer to do character to byte conversion in StreamOutput#writeString (#21680) Today we call `writeByte` up to 3x per character in each string written via `StreamOutput#writeString` this can have quite some overhead when strings are long or many strings are written. This change adds a local buffer to convert chars to bytes into the local buffer. Converted bytes are then written via `writeBytes` instead reducing the overhead of this opertion. Closes #21660 --- .../common/io/stream/StreamInput.java | 29 +++++++++----- .../common/io/stream/StreamOutput.java | 37 +++++++++++++----- .../common/io/stream/BytesStreamsTests.java | 39 +++++++++++++++++++ 3 files changed, 87 insertions(+), 18 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 794ed6f36fa..899779eee43 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -24,8 +24,10 @@ import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -323,15 +325,22 @@ public abstract class StreamInput extends InputStream { return null; } - private final CharsRefBuilder spare = new CharsRefBuilder(); + // we don't use a CharsRefBuilder since we exactly know the size of the character array up front + // this prevents calling grow for every character since we don't need this + private final CharsRef spare = new CharsRef(); public String readString() throws IOException { + // TODO it would be nice to not call readByte() for every character but we don't know how much to read up-front + // we can make the loop much more complicated but that won't buy us much compared to the bounds checks in readByte() final int charCount = readVInt(); - spare.clear(); - spare.grow(charCount); - int c; - while (spare.length() < charCount) { - c = readByte() & 0xff; + if (spare.chars.length < charCount) { + // we don't use ArrayUtils.grow since there is no need to copy the array + spare.chars = new char[ArrayUtil.oversize(charCount, Character.BYTES)]; + } + spare.length = charCount; + final char[] buffer = spare.chars; + for (int i = 0; i < charCount; i++) { + final int c = readByte() & 0xff; switch (c >> 4) { case 0: case 1: @@ -341,15 +350,17 @@ public abstract class StreamInput extends InputStream { case 5: case 6: case 7: - spare.append((char) c); + buffer[i] = (char) c; break; case 12: case 13: - spare.append((char) ((c & 0x1F) << 6 | readByte() & 0x3F)); + buffer[i] = ((char) ((c & 0x1F) << 6 | readByte() & 0x3F)); break; case 14: - spare.append((char) ((c & 0x0F) << 12 | (readByte() & 0x3F) << 6 | (readByte() & 0x3F) << 0)); + buffer[i] = ((char) ((c & 0x0F) << 12 | (readByte() & 0x3F) << 6 | (readByte() & 0x3F) << 0)); break; + default: + new AssertionError("unexpected character: " + c + " hex: " + Integer.toHexString(c)); } } return spare.toString(); diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 3ba911ef9ee..788d8dfb925 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.LockObtainFailedException; +import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; @@ -298,23 +299,41 @@ public abstract class StreamOutput extends OutputStream { } } + // we use a small buffer to convert strings to bytes since we want to prevent calling writeByte + // for every byte in the string (see #21660 for details). + // This buffer will never be the oversized limit of 1024 bytes and will not be shared across streams + private byte[] convertStringBuffer = BytesRef.EMPTY_BYTES; // TODO should we reduce it to 0 bytes once the stream is closed? + public void writeString(String str) throws IOException { - int charCount = str.length(); + final int charCount = str.length(); + final int bufferSize = Math.min(3 * charCount, 1024); // at most 3 bytes per character is needed here + if (convertStringBuffer.length < bufferSize) { // we don't use ArrayUtils.grow since copying the bytes is unnecessary + convertStringBuffer = new byte[ArrayUtil.oversize(bufferSize, Byte.BYTES)]; + } + byte[] buffer = convertStringBuffer; + int offset = 0; writeVInt(charCount); - int c; for (int i = 0; i < charCount; i++) { - c = str.charAt(i); + final int c = str.charAt(i); if (c <= 0x007F) { - writeByte((byte) c); + buffer[offset++] = ((byte) c); } else if (c > 0x07FF) { - writeByte((byte) (0xE0 | c >> 12 & 0x0F)); - writeByte((byte) (0x80 | c >> 6 & 0x3F)); - writeByte((byte) (0x80 | c >> 0 & 0x3F)); + buffer[offset++] = ((byte) (0xE0 | c >> 12 & 0x0F)); + buffer[offset++] = ((byte) (0x80 | c >> 6 & 0x3F)); + buffer[offset++] = ((byte) (0x80 | c >> 0 & 0x3F)); } else { - writeByte((byte) (0xC0 | c >> 6 & 0x1F)); - writeByte((byte) (0x80 | c >> 0 & 0x3F)); + buffer[offset++] = ((byte) (0xC0 | c >> 6 & 0x1F)); + buffer[offset++] = ((byte) (0x80 | c >> 0 & 0x3F)); + } + // make sure any possible char can fit into the buffer in any possible iteration + // we need at most 3 bytes so we flush the buffer once we have less than 3 bytes + // left before we start another iteration + if (offset > buffer.length-3) { + writeBytes(buffer, offset); + offset = 0; } } + writeBytes(buffer, offset); } public void writeFloat(float v) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java index f51a85b2f9a..e9958c1c516 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.common.io.stream; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; +import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; @@ -657,4 +659,41 @@ public class BytesStreamsTests extends ESTestCase { IntStream.range(0, size).forEach(i -> map.put(keyGenerator.get(), valueGenerator.get())); return map; } + + public void testWriteRandomStrings() throws IOException { + final int iters = scaledRandomIntBetween(5, 20); + for (int iter = 0; iter < iters; iter++) { + List strings = new ArrayList<>(); + int numStrings = randomIntBetween(100, 1000); + BytesStreamOutput output = new BytesStreamOutput(0); + for (int i = 0; i < numStrings; i++) { + String s = randomRealisticUnicodeOfLengthBetween(0, 2048); + strings.add(s); + output.writeString(s); + } + + try (StreamInput streamInput = output.bytes().streamInput()) { + for (int i = 0; i < numStrings; i++) { + String s = streamInput.readString(); + assertEquals(strings.get(i), s); + } + } + } + } + + /* + * tests the extreme case where characters use more than 2 bytes + */ + public void testWriteLargeSurrogateOnlyString() throws IOException { + String deseretLetter = "\uD801\uDC00"; + assertEquals(2, deseretLetter.length()); + String largeString = IntStream.range(0, 2048).mapToObj(s -> deseretLetter).collect(Collectors.joining("")).trim(); + assertEquals("expands to 4 bytes", 4, new BytesRef(deseretLetter).length); + try (BytesStreamOutput output = new BytesStreamOutput(0)) { + output.writeString(largeString); + try (StreamInput streamInput = output.bytes().streamInput()) { + assertEquals(largeString, streamInput.readString()); + } + } + } } From a1d88e65509a32e074fe49e8fa894e6c58ebdfee Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 21 Nov 2016 11:36:56 +0100 Subject: [PATCH 32/50] Rename ClusterState#lookupPrototypeSafe to `lookupPrototype` and remove previous "unsafe" unused variant (#21686) The `lookupPrototype` method is not used anywhere. Seems like we rather use its `lookupProrotypeSafe` variant (which also throws exception if the prototype is not found) is always. This commit makes the safer variant the default one, by renaming it to "lookupPrototype" and removes the previous "unsafe" variant. --- .../elasticsearch/cluster/ClusterState.java | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java index 7699e6fff87..c842c57daec 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -108,13 +108,7 @@ public class ClusterState implements ToXContent, Diffable { registerPrototype(RestoreInProgress.TYPE, RestoreInProgress.PROTO); } - @Nullable public static T lookupPrototype(String type) { - //noinspection unchecked - return (T) customPrototypes.get(type); - } - - public static T lookupPrototypeSafe(String type) { @SuppressWarnings("unchecked") T proto = (T) customPrototypes.get(type); if (proto == null) { @@ -308,7 +302,7 @@ public class ClusterState implements ToXContent, Diffable { private final String value; - private Metric(String value) { + Metric(String value) { this.value = value; } @@ -630,10 +624,6 @@ public class ClusterState implements ToXContent, Diffable { return this; } - public Custom getCustom(String type) { - return customs.get(type); - } - public Builder putCustom(String type, Custom custom) { customs.put(type, custom); return this; @@ -707,7 +697,7 @@ public class ClusterState implements ToXContent, Diffable { int customSize = in.readVInt(); for (int i = 0; i < customSize; i++) { String type = in.readString(); - Custom customIndexMetaData = lookupPrototypeSafe(type).readFrom(in); + Custom customIndexMetaData = lookupPrototype(type).readFrom(in); builder.putCustom(type, customIndexMetaData); } return builder.build(); @@ -779,12 +769,12 @@ public class ClusterState implements ToXContent, Diffable { new DiffableUtils.DiffableValueSerializer() { @Override public Custom read(StreamInput in, String key) throws IOException { - return lookupPrototypeSafe(key).readFrom(in); + return lookupPrototype(key).readFrom(in); } @Override public Diff readDiff(StreamInput in, String key) throws IOException { - return lookupPrototypeSafe(key).readDiffFrom(in); + return lookupPrototype(key).readDiffFrom(in); } }); } From 6122b84ebaefff5c50ac8d35650877df6b41b943 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 21 Nov 2016 11:46:26 +0100 Subject: [PATCH 33/50] remove pointless catch exception in TransportSearchAction (#21689) TransportSearchAction optimizes the search_type in certain cases, when for instance we are searching against a single shard, or when there is only a suggest section in the request. That optimization is wrapped in a try catch, and when an exception happens we log it and ignore it. This may be a leftover from the past though, as no exception is expected to be thrown in that code block, hence if there is any exception we are probably better off bubbling it up rather than ignoring it. --- .../action/search/TransportSearchAction.java | 34 +++++++------------ 1 file changed, 13 insertions(+), 21 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 1b818d86eac..53ca5fb84b3 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -33,8 +33,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.AliasFilter; @@ -108,26 +106,20 @@ public class TransportSearchAction extends HandledTransportAction Date: Mon, 21 Nov 2016 11:47:48 +0100 Subject: [PATCH 34/50] Docs: Added offline install link for discovery-file plugin Closes #21696 --- docs/plugins/discovery-file.asciidoc | 30 ++++++++++++++++------------ 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/docs/plugins/discovery-file.asciidoc b/docs/plugins/discovery-file.asciidoc index 15175620d52..ec501a08a8a 100644 --- a/docs/plugins/discovery-file.asciidoc +++ b/docs/plugins/discovery-file.asciidoc @@ -1,7 +1,7 @@ [[discovery-file]] === File-Based Discovery Plugin -The file-based discovery plugin uses a list of hosts/ports in a `unicast_hosts.txt` file +The file-based discovery plugin uses a list of hosts/ports in a `unicast_hosts.txt` file in the `config/discovery-file` directory for unicast discovery. [[discovery-file-install]] @@ -20,6 +20,10 @@ be restarted after installation. Note that installing the plugin will add a `discovery-file` directory to the `config` folder, and a default `unicast_hosts.txt` file that must be edited with the correct unicast hosts list before starting the node. +This plugin can be downloaded for <> from +{plugin_url}/discovery-file/discovery-file-{version}.zip. + + [[discovery-file-remove]] [float] ==== Removal @@ -37,7 +41,7 @@ The node must be stopped before removing the plugin. [float] ==== Using the file-based discovery plugin -The file-based discovery plugin provides the ability to specify the +The file-based discovery plugin provides the ability to specify the unicast hosts list through a simple `unicast_hosts.txt` file that can be dynamically updated at any time. To enable, add the following in `elasticsearch.yml`: @@ -47,12 +51,12 @@ discovery.zen.hosts_provider: file ---- This plugin simply provides a facility to supply the unicast hosts list for -zen discovery through an external file that can be updated at any time by a side process. +zen discovery through an external file that can be updated at any time by a side process. -For example, this gives a convenient mechanism for an Elasticsearch instance -that is run in docker containers to be dynamically supplied a list of IP -addresses to connect to for zen discovery when those IP addresses may not be -known at node startup. +For example, this gives a convenient mechanism for an Elasticsearch instance +that is run in docker containers to be dynamically supplied a list of IP +addresses to connect to for zen discovery when those IP addresses may not be +known at node startup. Note that the file-based discovery plugin is meant to augment the unicast hosts list in `elasticsearch.yml` (if specified), not replace it. Therefore, @@ -73,11 +77,11 @@ cannot start in the middle of a line). [float] ==== unicast_hosts.txt file format -The format of the file is to specify one unicast host entry per line. +The format of the file is to specify one unicast host entry per line. Each unicast host entry consists of the host (host name or IP address) and -an optional transport port number. If the port number is specified, is must -come immediately after the host (on the same line) separated by a `:`. -If the port number is not specified, a default value of 9300 is used. +an optional transport port number. If the port number is specified, is must +come immediately after the host (on the same line) separated by a `:`. +If the port number is not specified, a default value of 9300 is used. For example, this is an example of `unicast_hosts.txt` for a cluster with four nodes that participate in unicast discovery, some of which are not @@ -92,6 +96,6 @@ running on the default port: [2001:0db8:85a3:0000:0000:8a2e:0370:7334]:9301 ---------------------------------------------------------------- -Host names are allowed instead of IP addresses (similar to +Host names are allowed instead of IP addresses (similar to `discovery.zen.ping.unicast.hosts`), and IPv6 addresses must be -specified in brackets with the port coming after the brackets. +specified in brackets with the port coming after the brackets. From e7b9e65fc3b06e784008a4dd14a8ba305ac30374 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 21 Nov 2016 12:36:44 +0100 Subject: [PATCH 35/50] Add checkstyle rule to forbid empty javadoc comments (#20881) This commit adds a RegexpMultiline check to checkstyle that yells when an empty Javadoc comment is found in Java files. Related #20871 --- buildSrc/src/main/resources/checkstyle.xml | 7 +++++++ .../org/elasticsearch/search/nested/SimpleNestedIT.java | 3 --- .../test/rest/yaml/ESClientYamlSuiteTestCase.java | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/buildSrc/src/main/resources/checkstyle.xml b/buildSrc/src/main/resources/checkstyle.xml index 706ef46ffa1..891a85d50a9 100644 --- a/buildSrc/src/main/resources/checkstyle.xml +++ b/buildSrc/src/main/resources/checkstyle.xml @@ -10,6 +10,13 @@ + + + + + + +